diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index fc590f029433f64a0f655dc28c3898c7847e4ab5..af3ca77a897a2ddea9ee8d7c6e4e30fa7f369e94 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -297,6 +297,34 @@ deploy-uc4-flink:
     JAVA_PROJECT_NAME: "uc4-flink"
     JAVA_PROJECT_DEPS: "flink-commons"
 
+deploy-uc1-hazelcastjet:
+  extends: .deploy-benchmarks
+  variables:
+    IMAGE_NAME: "theodolite-uc1-hazelcastjet"
+    JAVA_PROJECT_NAME: "uc1-hazelcastjet"
+    JAVA_PROJECT_DEPS: "hazelcastjet-commons"
+
+deploy-uc2-hazelcastjet:
+  extends: .deploy-benchmarks
+  variables:
+    IMAGE_NAME: "theodolite-uc2-hazelcastjet"
+    JAVA_PROJECT_NAME: "uc2-hazelcastjet"
+    JAVA_PROJECT_DEPS: "hazelcastjet-commons"
+
+deploy-uc3-hazelcastjet:
+  extends: .deploy-benchmarks
+  variables:
+    IMAGE_NAME: "theodolite-uc3-hazelcastjet"
+    JAVA_PROJECT_NAME: "uc3-hazelcastjet"
+    JAVA_PROJECT_DEPS: "hazelcastjet-commons"
+
+deploy-uc4-hazelcastjet:
+  extends: .deploy-benchmarks
+  variables:
+    IMAGE_NAME: "theodolite-uc4-hazelcastjet"
+    JAVA_PROJECT_NAME: "uc4-hazelcastjet"
+    JAVA_PROJECT_DEPS: "hazelcastjet-commons"
+
 deploy-uc1-beam-flink:
   extends: .deploy-benchmarks
   variables:
@@ -449,6 +477,15 @@ smoketest-uc1-beam-samza:
     DOCKER_COMPOSE_DIR: "uc1-beam-samza"
     JAVA_PROJECT_DEPS: "uc1-beam-samza,uc1-beam,beam-commons,uc1-load-generator,load-generator-commons"
 
+smoketest-uc1-hazelcastjet:
+  extends: .smoketest-benchmarks
+  needs:
+    - deploy-uc1-hazelcastjet
+    - deploy-uc1-load-generator
+  variables:
+    DOCKER_COMPOSE_DIR: "uc1-hazelcastjet"
+    JAVA_PROJECT_DEPS: "uc1-hazelcastjet,hazelcastjet-commons,uc1-load-generator,load-generator-commons"
+
 smoketest-uc2-kstreams:
   extends: .smoketest-benchmarks
   needs:
@@ -485,6 +522,15 @@ smoketest-uc2-beam-samza:
     DOCKER_COMPOSE_DIR: "uc2-beam-samza"
     JAVA_PROJECT_DEPS: "uc2-beam-samza,uc2-beam,beam-commons,uc2-load-generator,load-generator-commons"
 
+smoketest-uc2-hazelcastjet:
+  extends: .smoketest-benchmarks
+  needs:
+    - deploy-uc2-hazelcastjet
+    - deploy-uc2-load-generator
+  variables:
+    DOCKER_COMPOSE_DIR: "uc2-hazelcastjet"
+    JAVA_PROJECT_DEPS: "uc2-hazelcastjet,hazelcastjet-commons,uc2-load-generator,load-generator-commons"
+
 smoketest-uc3-kstreams:
   extends: .smoketest-benchmarks
   needs:
@@ -512,6 +558,15 @@ smoketest-uc3-beam-samza:
     DOCKER_COMPOSE_DIR: "uc3-beam-samza"
     JAVA_PROJECT_DEPS: "uc3-beam-samza,uc3-beam,beam-commons,uc3-load-generator,load-generator-commons"
 
+smoketest-uc3-hazelcastjet:
+  extends: .smoketest-benchmarks
+  needs:
+    - deploy-uc3-hazelcastjet
+    - deploy-uc3-load-generator
+  variables:
+    DOCKER_COMPOSE_DIR: "uc3-hazelcastjet"
+    JAVA_PROJECT_DEPS: "uc3-hazelcastjet,hazelcastjet-commons,uc3-load-generator,load-generator-commons"
+
 smoketest-uc4-kstreams:
   extends: .smoketest-benchmarks
   needs:
@@ -548,6 +603,14 @@ smoketest-uc4-beam-samza:
     DOCKER_COMPOSE_DIR: "uc4-beam-samza"
     JAVA_PROJECT_DEPS: "uc4-beam-samza,uc4-beam,beam-commons,uc4-load-generator,load-generator-commons"
 
+smoketest-uc4-hazelcastjet:
+  extends: .smoketest-benchmarks
+  needs:
+    - deploy-uc4-hazelcastjet
+    - deploy-uc4-load-generator
+  variables:
+    DOCKER_COMPOSE_DIR: "uc4-hazelcastjet"
+    JAVA_PROJECT_DEPS: "uc4-hazelcastjet,hazelcastjet-commons,uc4-load-generator,load-generator-commons"
 
 # Theodolite Framework
 
diff --git a/CITATION.cff b/CITATION.cff
index b6ca4542010b83e12206fbc0d9841683b43e1d57..160146c844b1d128299617ae8d93ac4af77e4ca0 100644
--- a/CITATION.cff
+++ b/CITATION.cff
@@ -1,6 +1,6 @@
 cff-version: "1.1.0"
 message: "If you use Theodolite, please cite it using these metadata."
-authors: 
+authors:
   - family-names: Henning
     given-names: "Sören"
     orcid: "https://orcid.org/0000-0001-6912-2549"
@@ -8,13 +8,13 @@ authors:
     given-names: Wilhelm
     orcid: "https://orcid.org/0000-0001-6625-4335"
 title: Theodolite
-version: "0.6.4"
+version: "0.7.0"
 repository-code: "https://github.com/cau-se/theodolite"
 license: "Apache-2.0"
 doi: "10.1016/j.bdr.2021.100209"
 preferred-citation:
   type: article
-  authors: 
+  authors:
     - family-names: Henning
       given-names: "Sören"
       orcid: "https://orcid.org/0000-0001-6912-2549"
diff --git a/codemeta.json b/codemeta.json
index 948a34628ec919e2492b61e3ac9997392dc5e030..fa2eb23a956d9ad2525beee1b5ca343845c38d2d 100644
--- a/codemeta.json
+++ b/codemeta.json
@@ -5,10 +5,10 @@
     "codeRepository": "https://github.com/cau-se/theodolite",
     "dateCreated": "2020-03-13",
     "datePublished": "2020-07-27",
-    "dateModified": "2022-01-24",
+    "dateModified": "2022-05-11",
     "downloadUrl": "https://github.com/cau-se/theodolite/releases",
     "name": "Theodolite",
-    "version": "0.6.4",
+    "version": "0.7.0",
     "description": "Theodolite is a framework for benchmarking the horizontal and vertical scalability of cloud-native applications.",
     "developmentStatus": "active",
     "relatedLink": [
diff --git a/docs/Gemfile b/docs/Gemfile
index c9ce8dff9c02691c4a6a3be9e94341745378fb0b..b2bab9c1cd6c4591d474c8545e459276692ab487 100644
--- a/docs/Gemfile
+++ b/docs/Gemfile
@@ -9,11 +9,11 @@ source "https://rubygems.org"
 # Happy Jekylling!
 #gem "jekyll", "~> 4.2.0"
 # This is the default theme for new Jekyll sites. You may change this to anything you like.
-gem "minima", "~> 2.5"
+#gem "minima", "~> 2.5"
 # If you want to use GitHub Pages, remove the "gem "jekyll"" above and
 # uncomment the line below. To upgrade, run `bundle update github-pages`.
-# gem "github-pages", group: :jekyll_plugins
-gem "github-pages", "~> 215", group: :jekyll_plugins
+gem "github-pages", group: :jekyll_plugins
+#gem "github-pages", "~> 226", group: :jekyll_plugins
 # If you have any plugins, put them here!
 #group :jekyll_plugins do
   #gem "jekyll-feed", "~> 0.12"
diff --git a/docs/Gemfile.lock b/docs/Gemfile.lock
index 2159f9d34e79005f1ae5292b5f828074b38f096e..fc60fc6f6fddbb9b18a85ed1f472a444e0ed4cea 100644
--- a/docs/Gemfile.lock
+++ b/docs/Gemfile.lock
@@ -1,7 +1,7 @@
 GEM
   remote: https://rubygems.org/
   specs:
-    activesupport (6.0.4)
+    activesupport (6.0.4.8)
       concurrent-ruby (~> 1.0, >= 1.0.2)
       i18n (>= 0.7, < 2)
       minitest (~> 5.1)
@@ -14,44 +14,54 @@ GEM
       execjs
     coffee-script-source (1.11.1)
     colorator (1.1.0)
-    commonmarker (0.17.13)
-      ruby-enum (~> 0.5)
-    concurrent-ruby (1.1.9)
-    dnsruby (1.61.7)
+    commonmarker (0.23.4)
+    concurrent-ruby (1.1.10)
+    dnsruby (1.61.9)
       simpleidn (~> 0.1)
-    em-websocket (0.5.2)
+    em-websocket (0.5.3)
       eventmachine (>= 0.12.9)
-      http_parser.rb (~> 0.6.0)
-    ethon (0.14.0)
+      http_parser.rb (~> 0)
+    ethon (0.15.0)
       ffi (>= 1.15.0)
     eventmachine (1.2.7)
     execjs (2.8.1)
-    faraday (1.4.3)
+    faraday (1.10.0)
       faraday-em_http (~> 1.0)
       faraday-em_synchrony (~> 1.0)
       faraday-excon (~> 1.1)
+      faraday-httpclient (~> 1.0)
+      faraday-multipart (~> 1.0)
       faraday-net_http (~> 1.0)
-      faraday-net_http_persistent (~> 1.1)
-      multipart-post (>= 1.2, < 3)
+      faraday-net_http_persistent (~> 1.0)
+      faraday-patron (~> 1.0)
+      faraday-rack (~> 1.0)
+      faraday-retry (~> 1.0)
       ruby2_keywords (>= 0.0.4)
     faraday-em_http (1.0.0)
     faraday-em_synchrony (1.0.0)
     faraday-excon (1.1.0)
+    faraday-httpclient (1.0.1)
+    faraday-multipart (1.0.3)
+      multipart-post (>= 1.2, < 3)
     faraday-net_http (1.0.1)
-    faraday-net_http_persistent (1.1.0)
-    ffi (1.15.3)
+    faraday-net_http_persistent (1.2.0)
+    faraday-patron (1.0.0)
+    faraday-rack (1.0.0)
+    faraday-retry (1.0.3)
+    ffi (1.15.5)
     forwardable-extended (2.6.0)
     gemoji (3.0.1)
-    github-pages (215)
-      github-pages-health-check (= 1.17.2)
-      jekyll (= 3.9.0)
+    github-pages (226)
+      github-pages-health-check (= 1.17.9)
+      jekyll (= 3.9.2)
       jekyll-avatar (= 0.7.0)
       jekyll-coffeescript (= 1.1.1)
-      jekyll-commonmark-ghpages (= 0.1.6)
+      jekyll-commonmark-ghpages (= 0.2.0)
       jekyll-default-layout (= 0.1.4)
       jekyll-feed (= 0.15.1)
       jekyll-gist (= 1.5.0)
       jekyll-github-metadata (= 2.13.0)
+      jekyll-include-cache (= 0.2.1)
       jekyll-mentions (= 1.6.0)
       jekyll-optional-front-matter (= 0.3.2)
       jekyll-paginate (= 1.1.0)
@@ -60,53 +70,53 @@ GEM
       jekyll-relative-links (= 0.6.1)
       jekyll-remote-theme (= 0.4.3)
       jekyll-sass-converter (= 1.5.2)
-      jekyll-seo-tag (= 2.7.1)
+      jekyll-seo-tag (= 2.8.0)
       jekyll-sitemap (= 1.4.0)
       jekyll-swiss (= 1.0.0)
-      jekyll-theme-architect (= 0.1.1)
-      jekyll-theme-cayman (= 0.1.1)
-      jekyll-theme-dinky (= 0.1.1)
-      jekyll-theme-hacker (= 0.1.2)
-      jekyll-theme-leap-day (= 0.1.1)
-      jekyll-theme-merlot (= 0.1.1)
-      jekyll-theme-midnight (= 0.1.1)
-      jekyll-theme-minimal (= 0.1.1)
-      jekyll-theme-modernist (= 0.1.1)
-      jekyll-theme-primer (= 0.5.4)
-      jekyll-theme-slate (= 0.1.1)
-      jekyll-theme-tactile (= 0.1.1)
-      jekyll-theme-time-machine (= 0.1.1)
+      jekyll-theme-architect (= 0.2.0)
+      jekyll-theme-cayman (= 0.2.0)
+      jekyll-theme-dinky (= 0.2.0)
+      jekyll-theme-hacker (= 0.2.0)
+      jekyll-theme-leap-day (= 0.2.0)
+      jekyll-theme-merlot (= 0.2.0)
+      jekyll-theme-midnight (= 0.2.0)
+      jekyll-theme-minimal (= 0.2.0)
+      jekyll-theme-modernist (= 0.2.0)
+      jekyll-theme-primer (= 0.6.0)
+      jekyll-theme-slate (= 0.2.0)
+      jekyll-theme-tactile (= 0.2.0)
+      jekyll-theme-time-machine (= 0.2.0)
       jekyll-titles-from-headings (= 0.5.3)
       jemoji (= 0.12.0)
-      kramdown (= 2.3.1)
+      kramdown (= 2.3.2)
       kramdown-parser-gfm (= 1.1.0)
       liquid (= 4.0.3)
       mercenary (~> 0.3)
       minima (= 2.5.1)
-      nokogiri (>= 1.10.4, < 2.0)
+      nokogiri (>= 1.13.4, < 2.0)
       rouge (= 3.26.0)
       terminal-table (~> 1.4)
-    github-pages-health-check (1.17.2)
+    github-pages-health-check (1.17.9)
       addressable (~> 2.3)
       dnsruby (~> 1.60)
       octokit (~> 4.0)
-      public_suffix (>= 2.0.2, < 5.0)
+      public_suffix (>= 3.0, < 5.0)
       typhoeus (~> 1.3)
-    html-pipeline (2.14.0)
+    html-pipeline (2.14.1)
       activesupport (>= 2)
       nokogiri (>= 1.4)
-    html-proofer (3.19.2)
+    html-proofer (3.19.3)
       addressable (~> 2.3)
       mercenary (~> 0.3)
-      nokogumbo (~> 2.0)
+      nokogiri (~> 1.12)
       parallel (~> 1.3)
       rainbow (~> 3.0)
       typhoeus (~> 1.3)
       yell (~> 2.0)
-    http_parser.rb (0.6.0)
+    http_parser.rb (0.8.0)
     i18n (0.9.5)
       concurrent-ruby (~> 1.0)
-    jekyll (3.9.0)
+    jekyll (3.9.2)
       addressable (~> 2.4)
       colorator (~> 1.0)
       em-websocket (~> 0.5)
@@ -124,12 +134,12 @@ GEM
     jekyll-coffeescript (1.1.1)
       coffee-script (~> 2.2)
       coffee-script-source (~> 1.11.1)
-    jekyll-commonmark (1.3.1)
-      commonmarker (~> 0.14)
-      jekyll (>= 3.7, < 5.0)
-    jekyll-commonmark-ghpages (0.1.6)
-      commonmarker (~> 0.17.6)
-      jekyll-commonmark (~> 1.2)
+    jekyll-commonmark (1.4.0)
+      commonmarker (~> 0.22)
+    jekyll-commonmark-ghpages (0.2.0)
+      commonmarker (~> 0.23.4)
+      jekyll (~> 3.9.0)
+      jekyll-commonmark (~> 1.4.0)
       rouge (>= 2.0, < 4.0)
     jekyll-default-layout (0.1.4)
       jekyll (~> 3.0)
@@ -140,6 +150,8 @@ GEM
     jekyll-github-metadata (2.13.0)
       jekyll (>= 3.4, < 5.0)
       octokit (~> 4.0, != 4.4.0)
+    jekyll-include-cache (0.2.1)
+      jekyll (>= 3.7, < 5.0)
     jekyll-mentions (1.6.0)
       html-pipeline (~> 2.3)
       jekyll (>= 3.7, < 5.0)
@@ -159,50 +171,50 @@ GEM
       rubyzip (>= 1.3.0, < 3.0)
     jekyll-sass-converter (1.5.2)
       sass (~> 3.4)
-    jekyll-seo-tag (2.7.1)
+    jekyll-seo-tag (2.8.0)
       jekyll (>= 3.8, < 5.0)
     jekyll-sitemap (1.4.0)
       jekyll (>= 3.7, < 5.0)
     jekyll-swiss (1.0.0)
-    jekyll-theme-architect (0.1.1)
-      jekyll (~> 3.5)
+    jekyll-theme-architect (0.2.0)
+      jekyll (> 3.5, < 5.0)
       jekyll-seo-tag (~> 2.0)
-    jekyll-theme-cayman (0.1.1)
-      jekyll (~> 3.5)
+    jekyll-theme-cayman (0.2.0)
+      jekyll (> 3.5, < 5.0)
       jekyll-seo-tag (~> 2.0)
-    jekyll-theme-dinky (0.1.1)
-      jekyll (~> 3.5)
+    jekyll-theme-dinky (0.2.0)
+      jekyll (> 3.5, < 5.0)
       jekyll-seo-tag (~> 2.0)
-    jekyll-theme-hacker (0.1.2)
+    jekyll-theme-hacker (0.2.0)
       jekyll (> 3.5, < 5.0)
       jekyll-seo-tag (~> 2.0)
-    jekyll-theme-leap-day (0.1.1)
-      jekyll (~> 3.5)
+    jekyll-theme-leap-day (0.2.0)
+      jekyll (> 3.5, < 5.0)
       jekyll-seo-tag (~> 2.0)
-    jekyll-theme-merlot (0.1.1)
-      jekyll (~> 3.5)
+    jekyll-theme-merlot (0.2.0)
+      jekyll (> 3.5, < 5.0)
       jekyll-seo-tag (~> 2.0)
-    jekyll-theme-midnight (0.1.1)
-      jekyll (~> 3.5)
+    jekyll-theme-midnight (0.2.0)
+      jekyll (> 3.5, < 5.0)
       jekyll-seo-tag (~> 2.0)
-    jekyll-theme-minimal (0.1.1)
-      jekyll (~> 3.5)
+    jekyll-theme-minimal (0.2.0)
+      jekyll (> 3.5, < 5.0)
       jekyll-seo-tag (~> 2.0)
-    jekyll-theme-modernist (0.1.1)
-      jekyll (~> 3.5)
+    jekyll-theme-modernist (0.2.0)
+      jekyll (> 3.5, < 5.0)
       jekyll-seo-tag (~> 2.0)
-    jekyll-theme-primer (0.5.4)
+    jekyll-theme-primer (0.6.0)
       jekyll (> 3.5, < 5.0)
       jekyll-github-metadata (~> 2.9)
       jekyll-seo-tag (~> 2.0)
-    jekyll-theme-slate (0.1.1)
-      jekyll (~> 3.5)
+    jekyll-theme-slate (0.2.0)
+      jekyll (> 3.5, < 5.0)
       jekyll-seo-tag (~> 2.0)
-    jekyll-theme-tactile (0.1.1)
-      jekyll (~> 3.5)
+    jekyll-theme-tactile (0.2.0)
+      jekyll (> 3.5, < 5.0)
       jekyll-seo-tag (~> 2.0)
-    jekyll-theme-time-machine (0.1.1)
-      jekyll (~> 3.5)
+    jekyll-theme-time-machine (0.2.0)
+      jekyll (> 3.5, < 5.0)
       jekyll-seo-tag (~> 2.0)
     jekyll-titles-from-headings (0.5.3)
       jekyll (>= 3.3, < 5.0)
@@ -212,12 +224,12 @@ GEM
       gemoji (~> 3.0)
       html-pipeline (~> 2.2)
       jekyll (>= 3.0, < 5.0)
-    kramdown (2.3.1)
+    kramdown (2.3.2)
       rexml
     kramdown-parser-gfm (1.1.0)
       kramdown (~> 2.0)
     liquid (4.0.3)
-    listen (3.5.1)
+    listen (3.7.1)
       rb-fsevent (~> 0.10, >= 0.10.3)
       rb-inotify (~> 0.9, >= 0.9.10)
     mercenary (0.3.6)
@@ -225,30 +237,26 @@ GEM
       jekyll (>= 3.5, < 5.0)
       jekyll-feed (~> 0.9)
       jekyll-seo-tag (~> 2.1)
-    minitest (5.14.4)
+    minitest (5.15.0)
     multipart-post (2.1.1)
-    nokogiri (1.13.3-x86_64-linux)
+    nokogiri (1.13.4-x86_64-linux)
       racc (~> 1.4)
-    nokogumbo (2.0.5)
-      nokogiri (~> 1.8, >= 1.8.4)
-    octokit (4.21.0)
+    octokit (4.22.0)
       faraday (>= 0.9)
       sawyer (~> 0.8.0, >= 0.5.3)
-    parallel (1.21.0)
+    parallel (1.22.1)
     pathutil (0.16.2)
       forwardable-extended (~> 2.6)
-    public_suffix (4.0.6)
+    public_suffix (4.0.7)
     racc (1.6.0)
-    rainbow (3.0.0)
-    rb-fsevent (0.11.0)
+    rainbow (3.1.1)
+    rb-fsevent (0.11.1)
     rb-inotify (0.10.1)
       ffi (~> 1.0)
     rexml (3.2.5)
     rouge (3.26.0)
-    ruby-enum (0.9.0)
-      i18n
-    ruby2_keywords (0.0.4)
-    rubyzip (2.3.0)
+    ruby2_keywords (0.0.5)
+    rubyzip (2.3.2)
     safe_yaml (1.0.5)
     sass (3.7.4)
       sass-listen (~> 4.0.0)
@@ -269,19 +277,17 @@ GEM
       thread_safe (~> 0.1)
     unf (0.1.4)
       unf_ext
-    unf_ext (0.0.7.7)
-    unicode-display_width (1.7.0)
+    unf_ext (0.0.8.1)
+    unicode-display_width (1.8.0)
     yell (2.2.2)
-    zeitwerk (2.4.2)
+    zeitwerk (2.5.4)
 
 PLATFORMS
   x86_64-linux
-  x86_64-linux-musl
 
 DEPENDENCIES
-  github-pages (~> 215)
+  github-pages
   html-proofer
-  minima (~> 2.5)
   tzinfo (~> 1.2)
   tzinfo-data
   wdm (~> 0.1.1)
diff --git a/docs/creating-a-benchmark.md b/docs/creating-a-benchmark.md
index b09c989e59d910fc352af9d1c5690b224e3346e6..b4602d6f7fd209b5851a9a9db128dfce6369b2a4 100644
--- a/docs/creating-a-benchmark.md
+++ b/docs/creating-a-benchmark.md
@@ -91,10 +91,37 @@ filesystem:
   - example-service.yaml
 ```
 
-<!-- ### Before and after actions -->
+### Actions
 
+Sometimes it is not sufficient to just define resources that are created and deleted when running a benchmark. Instead, it might be necessary to define certain actions that will be executed before running or after stopping the benchmark.
 
+Theodolite allows to execute commands on running pods. This is similar to `kubectl exec` or Kubernetes' [container lifecycle handlers](https://kubernetes.io/docs/tasks/configure-pod-container/attach-handler-lifecycle-event/). Theodolite actions can run before (`beforeActions`) or after `afterActions` all `sut`, `loadGenerator` or `infrastructure` resources are deployed.
+For example, the following actions will create a file in a pod with label `app: logger` before the SUT is started and delete if after the SUT is stopped:
 
+ ```yaml
+  sut:
+    resources: # ...
+    beforeActions:
+      - selector:
+          pod:
+            matchLabels:
+              app: logger
+        exec:
+          command: ["touch", "file-used-by-logger.txt"]
+          timeoutSeconds: 90
+    afterActions:
+      - selector:
+          pod:
+            matchLabels:
+              app: logger
+        exec:
+          command: [ "rm", "file-used-by-logger.txt" ]
+          timeoutSeconds: 90
+```
+
+Theodolite checks if all referenced pods are available for the specified actions. That means these pods must either be defined in `infrastructure` or already deployed in the cluster. If not all referenced pods are available, the benchmark will not be set as `Ready`. Consequently, an action cannot be executed on a pod that is defined as an SUT or load generator resource.
+
+*Note: Actions should be used sparingly. While it is possible to define entire benchmarks imperatively as actions, it is considered better practice to define as much as possible using declarative, native Kubernetes resource files.*
 
 <!--
 A Benchmark refers to other Kubernetes resources (e.g., Deployments, Services, ConfigMaps), which describe the system under test, the load generator and infrastructure components such as a middleware used in the benchmark. To manage those resources, Theodolite needs to have access to them. This is done by bundling resources in ConfigMaps.
@@ -116,7 +143,7 @@ If a benchmark is [executed by an Execution](running-benchmarks), these patchers
 ## Kafka Configuration
 
 Theodolite allows to automatically create and remove Kafka topics for each SLO experiment by setting a `kafkaConfig`.
-It `bootstrapServer` needs to point your Kafka cluster and `topics` configures the list of Kafka topics to be created/removed.
+`bootstrapServer` needs to point your Kafka cluster and `topics` configures the list of Kafka topics to be created/removed.
 For each topic, you configure its name, the number of partitions and the replication factor.
 
 With the `removeOnly: True` property, you can also instruct Theodolite to only remove topics and not create them.
diff --git a/docs/drafts/actions.md b/docs/drafts/actions.md
deleted file mode 100644
index 8092fddb088b3fe8fc64f51bff03bb0c6504b74f..0000000000000000000000000000000000000000
--- a/docs/drafts/actions.md
+++ /dev/null
@@ -1,62 +0,0 @@
-## Infrastructure
-The necessary infrastructure for an execution can be defined in the benchmark manifests. The related resources are create *before* an execution is started, and removed *after* an execution is finished.
-
-### Example
-
-```yaml
-  infrastructure:
-    resources:
-      - configMap:
-          name: "example-configmap"
-          files:
-            - "uc1-kstreams-deployment.yaml"
-```
-
-## Action Commands
-Theodolite allows to execute commands on running pods (similar to the `kubectl exec -it <pod-name> -- <command>` command). This commands can be run either before (via so called `beforeActions`) or after (via so called `afterActions`) an experiment is executed.
-
-Theodolite checks if all required pods are available for the specified actions (i.e. the pods must either be defined as infrastructure or already deployed in the cluster). If not all pods/resources are available, the benchmark will not be set as `Ready`. Consequently, an action cannot be executed on a pod that is defined as an SUT or loadGen resource.
-
-### Example
-
-```yaml
-# For the system under test
-  sut:
-    resources: ...
-    beforeActions:
-      - selector:
-          pod:
-            matchLabels:
-              app: busybox1
-        exec:
-          command: ["touch", "test-file-sut"]
-          timeoutSeconds: 90
-    afterActions:
-      - selector:
-          pod:
-            matchLabels:
-              app: busybox1
-        exec:
-          command: [ "touch", "test-file-sut-after" ]
-          timeoutSeconds: 90
-
-# analog, for the load generator
-  loadGenerator:
-    resources: ... 
-    beforeActions:
-      - selector:
-          pod:
-            matchLabels:
-              app: busybox1
-        exec:
-          command: ["touch", "test-file-loadGen"]
-          timeoutSeconds: 90
-    afterActions:
-      - selector:
-          pod:
-            matchLabels:
-              app: busybox1
-        exec:
-          command: [ "touch", "test-file-loadGen-after" ]
-          timeoutSeconds: 90
-```
\ No newline at end of file
diff --git a/docs/index.yaml b/docs/index.yaml
index 3e0de103a78f3529d314727ed59be3dcdc333fc9..c6cf4f6dcbeaf4b4ede37bca925025db950a2a77 100644
--- a/docs/index.yaml
+++ b/docs/index.yaml
@@ -1,6 +1,76 @@
 apiVersion: v1
 entries:
   theodolite:
+  - apiVersion: v2
+    appVersion: 0.7.0
+    created: "2022-05-11T13:49:02.457130925+02:00"
+    dependencies:
+    - condition: grafana.enabled
+      name: grafana
+      repository: https://grafana.github.io/helm-charts
+      version: 6.17.5
+    - condition: kube-prometheus-stack.enabled
+      name: kube-prometheus-stack
+      repository: https://prometheus-community.github.io/helm-charts
+      version: 20.0.1
+    - condition: cp-helm-charts.enabled
+      name: cp-helm-charts
+      repository: https://soerenhenning.github.io/cp-helm-charts
+      version: 0.6.0
+    - condition: strimzi.enabled
+      name: strimzi-kafka-operator
+      repository: https://strimzi.io/charts/
+      version: 0.28.0
+    description: Theodolite is a framework for benchmarking the horizontal and vertical
+      scalability of cloud-native applications.
+    digest: af10134baa30bb07423f78240fe1c609381e1c616585883cf5d3aded2d86a2b1
+    home: https://www.theodolite.rocks
+    maintainers:
+    - email: soeren.henning@email.uni-kiel.de
+      name: Sören Henning
+      url: https://www.se.informatik.uni-kiel.de/en/team/soeren-henning-m-sc
+    name: theodolite
+    sources:
+    - https://github.com/cau-se/theodolite
+    type: application
+    urls:
+    - https://github.com/cau-se/theodolite/releases/download/v0.7.0/theodolite-0.7.0%20copy.tgz
+    version: 0.7.0
+  - apiVersion: v2
+    appVersion: 0.7.0
+    created: "2022-05-11T13:49:02.491041789+02:00"
+    dependencies:
+    - condition: grafana.enabled
+      name: grafana
+      repository: https://grafana.github.io/helm-charts
+      version: 6.17.5
+    - condition: kube-prometheus-stack.enabled
+      name: kube-prometheus-stack
+      repository: https://prometheus-community.github.io/helm-charts
+      version: 20.0.1
+    - condition: cp-helm-charts.enabled
+      name: cp-helm-charts
+      repository: https://soerenhenning.github.io/cp-helm-charts
+      version: 0.6.0
+    - condition: strimzi.enabled
+      name: strimzi-kafka-operator
+      repository: https://strimzi.io/charts/
+      version: 0.28.0
+    description: Theodolite is a framework for benchmarking the horizontal and vertical
+      scalability of cloud-native applications.
+    digest: af10134baa30bb07423f78240fe1c609381e1c616585883cf5d3aded2d86a2b1
+    home: https://www.theodolite.rocks
+    maintainers:
+    - email: soeren.henning@email.uni-kiel.de
+      name: Sören Henning
+      url: https://www.se.informatik.uni-kiel.de/en/team/soeren-henning-m-sc
+    name: theodolite
+    sources:
+    - https://github.com/cau-se/theodolite
+    type: application
+    urls:
+    - https://github.com/cau-se/theodolite/releases/download/v0.7.0/theodolite-0.7.0.tgz
+    version: 0.7.0
   - apiVersion: v2
     appVersion: 0.6.4
     created: "2022-02-16T16:09:11.967649304+01:00"
@@ -316,4 +386,4 @@ entries:
     urls:
     - https://github.com/cau-se/theodolite/releases/download/v0.4.0/theodolite-0.4.0.tgz
     version: 0.4.0
-generated: "2022-02-16T16:09:11.93111234+01:00"
+generated: "2022-05-11T13:49:02.423487026+02:00"
diff --git a/helm/Chart.yaml b/helm/Chart.yaml
index 973c985b5bdaa4d53390954017ed9176bb396f55..5d707a70599b8a26ce828c94223ac20903f71848 100644
--- a/helm/Chart.yaml
+++ b/helm/Chart.yaml
@@ -30,6 +30,6 @@ dependencies:
     condition: strimzi.enabled
 
 
-version: 0.7.0-SNAPSHOT
+version: 0.8.0-SNAPSHOT
 
-appVersion: 0.7.0-SNAPSHOT
+appVersion: 0.8.0-SNAPSHOT
diff --git a/helm/templates/prometheus/operator-role-binding.yaml b/helm/templates/prometheus/operator-role-binding.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..0fadc048940ca5254e7ce2867a657361f32ef3b7
--- /dev/null
+++ b/helm/templates/prometheus/operator-role-binding.yaml
@@ -0,0 +1,16 @@
+{{- if not (index .Values "kube-prometheus-stack" "global" "rbac" "create") -}}
+apiVersion: rbac.authorization.k8s.io/v1
+kind: RoleBinding
+metadata:
+  name: {{ template "theodolite.fullname" . }}-kube-prometheus-operator
+  labels:
+    app: {{ template "theodolite.fullname" . }}-kube-prometheus-operator
+roleRef:
+  apiGroup: rbac.authorization.k8s.io
+  kind: Role
+  name: {{ template "theodolite.fullname" . }}-kube-prometheus-operator
+subjects:
+- kind: ServiceAccount
+  name: {{ template "theodolite.fullname" . }}-kube-prometheus-operator
+  namespace: {{ .Release.Namespace }}
+{{- end }}
diff --git a/helm/templates/prometheus/operator-role.yaml b/helm/templates/prometheus/operator-role.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..16481a2ec6a4387e589079433490ac5d437dc04b
--- /dev/null
+++ b/helm/templates/prometheus/operator-role.yaml
@@ -0,0 +1,79 @@
+{{- if not (index .Values "kube-prometheus-stack" "global" "rbac" "create") -}}
+apiVersion: rbac.authorization.k8s.io/v1
+kind: Role
+metadata:
+  name: {{ template "theodolite.fullname" . }}-kube-prometheus-operator
+  labels:
+    app: {{ template "theodolite.name" . }}-kube-prometheus-operator
+rules:
+- apiGroups:
+  - monitoring.coreos.com
+  resources:
+  - alertmanagers
+  - alertmanagers/finalizers
+  - alertmanagerconfigs
+  - prometheuses
+  - prometheuses/finalizers
+  - thanosrulers
+  - thanosrulers/finalizers
+  - servicemonitors
+  - podmonitors
+  - probes
+  - prometheusrules
+  verbs:
+  - '*'
+- apiGroups:
+  - apps
+  resources:
+  - statefulsets
+  verbs:
+  - '*'
+- apiGroups:
+  - ""
+  resources:
+  - configmaps
+  - secrets
+  verbs:
+  - '*'
+- apiGroups:
+  - ""
+  resources:
+  - pods
+  verbs:
+  - list
+  - delete
+- apiGroups:
+  - ""
+  resources:
+  - services
+  - services/finalizers
+  - endpoints
+  verbs:
+  - get
+  - create
+  - update
+  - delete
+- apiGroups:
+  - ""
+  resources:
+  - nodes
+  verbs:
+  - list
+  - watch
+- apiGroups:
+  - ""
+  resources:
+  - namespaces
+  verbs:
+  - get
+  - list
+  - watch
+- apiGroups:
+  - networking.k8s.io
+  resources:
+  - ingresses
+  verbs:
+  - get
+  - list
+  - watch
+{{- end }}
diff --git a/helm/templates/prometheus/cluster-role-binding.yaml b/helm/templates/prometheus/role-binding.yaml
similarity index 75%
rename from helm/templates/prometheus/cluster-role-binding.yaml
rename to helm/templates/prometheus/role-binding.yaml
index f2f167b94b79ad4db130565777cb8af486762c8c..722f806e0621a5775083f74f064e0c9eae18f1d8 100644
--- a/helm/templates/prometheus/cluster-role-binding.yaml
+++ b/helm/templates/prometheus/role-binding.yaml
@@ -1,14 +1,14 @@
-{{- if .Values.prometheus.clusterRoleBinding.enabled -}}
+{{- if .Values.prometheus.roleBinding.enabled -}}
 apiVersion: rbac.authorization.k8s.io/v1
-kind: ClusterRoleBinding
+kind: RoleBinding
 metadata:
   name: {{ template "theodolite.fullname" . }}-prometheus
 roleRef:
   apiGroup: rbac.authorization.k8s.io
-  kind: ClusterRole
+  kind: Role
   name: {{ template "theodolite.fullname" . }}-prometheus
 subjects:
 - kind: ServiceAccount
   name: {{ template "theodolite.fullname" . }}-prometheus
   namespace: {{ .Release.Namespace }}
-{{- end}}
\ No newline at end of file
+{{- end}}
diff --git a/helm/templates/prometheus/cluster-role.yaml b/helm/templates/prometheus/role.yaml
similarity index 67%
rename from helm/templates/prometheus/cluster-role.yaml
rename to helm/templates/prometheus/role.yaml
index c2fea2205451e01474d1ab7ef1ca342a9d975dc9..321d2825b5b98d31fc34619e88994058bac74cab 100644
--- a/helm/templates/prometheus/cluster-role.yaml
+++ b/helm/templates/prometheus/role.yaml
@@ -1,12 +1,11 @@
-{{- if .Values.prometheus.clusterRole.enabled -}}
+{{- if .Values.prometheus.role.enabled -}}
 apiVersion: rbac.authorization.k8s.io/v1
-kind: ClusterRole
+kind: Role
 metadata:
   name: {{ template "theodolite.fullname" . }}-prometheus
 rules:
 - apiGroups: [""]
   resources:
-  - nodes
   - services
   - endpoints
   - pods
@@ -15,6 +14,4 @@ rules:
   resources:
   - configmaps
   verbs: ["get"]
-- nonResourceURLs: ["/metrics"]
-  verbs: ["get"]
-{{- end }}
\ No newline at end of file
+{{- end}}
diff --git a/helm/values.yaml b/helm/values.yaml
index e3e2143a2694a58be7fb4f48efe0723fc05fb0f7..34a32ce60927a751f645b7f8ff4af46793865797 100644
--- a/helm/values.yaml
+++ b/helm/values.yaml
@@ -5,7 +5,7 @@
 kafkaClient:
   enabled: false
   nodeSelector: {}
-  
+
 
 ####
 ## configuration of sub charts
@@ -52,6 +52,9 @@ grafana:
   service:
     nodePort: 31199
     type: NodePort
+  rbac:
+    pspEnabled: false
+    namespaced: true
 
 
 ###
@@ -155,6 +158,9 @@ cp-helm-charts:
   ## The interval between refreshing metrics
   pollIntervalSeconds: 15
 
+strimzi-kafka-operator:
+  createGlobalResources: true
+
 strimzi:
   enabled: true
   kafka:
@@ -193,6 +199,10 @@ strimzi:
 # Prometheus Monitoring Stack (Prometheus Operator)
 ###
 kube-prometheus-stack:
+  global:
+    rbac:
+      create: false
+
   commonLabels:
     appScope: titan-ccp
   
@@ -238,7 +248,14 @@ kube-prometheus-stack:
       releaseNamespace: true
       additional: []
     nodeSelector: {}
+    admissionWebhooks:
+      enabled: false
+    tls:
+      enabled: false
+    serviceAccount:
+      create: true
   
+  # We use our own Prometheus
   prometheus:
     enabled: false
 
@@ -250,12 +267,11 @@ prometheus:
   enabled: true
   nodeSelector: {}
   
-  # depends on your cluster security and permission settings, you may need to create the following resources
   serviceAccount:
     enabled: true
-  clusterRole:
+  role:
     enabled: true
-  clusterRoleBinding:
+  roleBinding:
     enabled: true
 
 ###
@@ -349,7 +365,7 @@ rbac:
   additionalRules: []
 
 randomScheduler:
-  enabled: true
+  enabled: false
   image: ghcr.io/cau-se/theodolite-random-scheduler
   imageTag: latest
   imagePullPolicy: Always
diff --git a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.hazelcastjet.gradle b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.hazelcastjet.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..b092c97cf0e79895d4d6aafc594979b8f48dd167
--- /dev/null
+++ b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.hazelcastjet.gradle
@@ -0,0 +1,45 @@
+plugins {
+  // common java conventions
+  id 'theodolite.java-conventions'
+
+  // make executable
+  id 'application'
+}
+
+repositories {
+  mavenCentral()
+  maven {
+    url "https://oss.sonatype.org/content/repositories/snapshots/"
+  }
+  maven {
+    url 'https://packages.confluent.io/maven/'
+  }
+}
+
+dependencies {
+  implementation('org.industrial-devops:titan-ccp-common:0.1.0-SNAPSHOT') { changing = true }
+  implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true }
+  implementation 'com.google.guava:guava:24.1-jre'
+  implementation 'org.slf4j:slf4j-api:1.7.30'
+  implementation 'org.slf4j:slf4j-simple:1.7.30'
+
+
+  implementation 'io.confluent:kafka-avro-serializer:5.3.0'
+
+  implementation 'com.hazelcast.jet:hazelcast-jet:4.5'
+  implementation 'com.hazelcast.jet:hazelcast-jet-kafka:4.5'
+  implementation 'com.hazelcast:hazelcast-kubernetes:2.2.2'
+ 	
+  implementation project(':hazelcastjet-commons')
+
+  testImplementation("junit:junit:4.13.2")
+  testRuntimeOnly("org.junit.vintage:junit-vintage-engine:5.8.2")
+
+  testImplementation 'com.hazelcast:hazelcast:4.2:tests'
+  testImplementation 'com.hazelcast.jet:hazelcast-jet-core:4.5:tests'
+
+}
+
+test {
+  useJUnitPlatform()
+}
diff --git a/theodolite-benchmarks/definitions/install-configmaps.sh b/theodolite-benchmarks/definitions/install-configmaps.sh
index 03d87bf971e98693be7936368421c91a83fd16bb..e6630ca08154631a395c151fac376859fc885495 100755
--- a/theodolite-benchmarks/definitions/install-configmaps.sh
+++ b/theodolite-benchmarks/definitions/install-configmaps.sh
@@ -4,6 +4,12 @@ kubectl create configmap benchmark-resources-uc2-flink --from-file uc2-flink/res
 kubectl create configmap benchmark-resources-uc3-flink --from-file uc3-flink/resources
 kubectl create configmap benchmark-resources-uc4-flink --from-file uc4-flink/resources
 
+kubectl create configmap benchmark-resources-uc1-hazelcastjet --from-file uc1-hazelcastjet/resources
+kubectl create configmap benchmark-resources-uc2-hazelcastjet --from-file uc2-hazelcastjet/resources
+kubectl create configmap benchmark-resources-uc3-hazelcastjet --from-file uc3-hazelcastjet/resources
+kubectl create configmap benchmark-resources-uc4-hazelcastjet --from-file uc4-hazelcastjet/resources
+
+
 # Kafka Streams
 kubectl create configmap benchmark-resources-uc1-kstreams --from-file uc1-kstreams/resources
 kubectl create configmap benchmark-resources-uc2-kstreams --from-file uc2-kstreams/resources
diff --git a/theodolite-benchmarks/definitions/uc1-hazelcastjet/resources/uc1-hazelcastjet-deployment.yaml b/theodolite-benchmarks/definitions/uc1-hazelcastjet/resources/uc1-hazelcastjet-deployment.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..cab5a625c323628ec6d6a7152e53d3ff8393a8ba
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc1-hazelcastjet/resources/uc1-hazelcastjet-deployment.yaml
@@ -0,0 +1,36 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: titan-ccp-aggregation
+spec:
+  selector:
+    matchLabels:
+      app: titan-ccp-aggregation
+  replicas: 1
+  template:
+    metadata:
+      labels:
+        app: titan-ccp-aggregation
+    spec:
+      terminationGracePeriodSeconds: 0
+      containers:
+        - name: uc-application
+          image: uc1-hazelcastjet
+          imagePullPolicy: "Never"
+          env:
+            - name: KAFKA_BOOTSTRAP_SERVERS
+              value: "theodolite-kafka-kafka-bootstrap:9092"
+            - name: SCHEMA_REGISTRY_URL
+              value: "http://theodolite-cp-schema-registry:8081"
+            - name: COMMIT_INTERVAL_MS # Set as default for the applications
+              value: "100"
+            - name: KUBERNETES_NAMESPACE
+              valueFrom:
+                fieldRef:
+                  fieldPath: metadata.namespace
+            - name: KUBERNETES_DNS_NAME
+              value: "titan-ccp-aggregation.$(KUBERNETES_NAMESPACE).svc.cluster.local"
+          resources:
+            limits:
+              memory: 4Gi
+              cpu: 1000m
\ No newline at end of file
diff --git a/theodolite-benchmarks/definitions/uc1-hazelcastjet/resources/uc1-hazelcastjet-service.yaml b/theodolite-benchmarks/definitions/uc1-hazelcastjet/resources/uc1-hazelcastjet-service.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..845ce7dd55c6e5d45724ec1eeabf8789e704fe77
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc1-hazelcastjet/resources/uc1-hazelcastjet-service.yaml
@@ -0,0 +1,16 @@
+apiVersion: v1
+kind: Service
+metadata:  
+  name: titan-ccp-aggregation
+  labels:
+    app: titan-ccp-aggregation
+spec:
+  type: ClusterIP
+  clusterIP: None
+  selector:    
+    app: titan-ccp-aggregation
+  ports:  
+    - name: coordination
+      port: 5701
+      targetPort: 5701
+      protocol: TCP
diff --git a/theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-benchmark-operator.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..ca8994c40350ed55e2a6b927c370b3d11a6d4bfa
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc1-hazelcastjet/uc1-benchmark-operator.yaml
@@ -0,0 +1,42 @@
+apiVersion: theodolite.com/v1
+kind: benchmark
+metadata:
+  name: uc1-hazelcastjet
+spec:
+  sut:
+    resources:
+      - configMap:
+          name: "benchmark-resources-uc1-hazelcastjet"
+          files:
+          - "uc1-hazelcastjet-deployment.yaml"
+          - "uc1-hazelcastjet-service.yaml"
+  loadGenerator:
+    resources:
+      - configMap:
+          name: "benchmark-resources-uc1-load-generator"
+          files:
+          - "uc1-load-generator-deployment.yaml"
+          - "uc1-load-generator-service.yaml"
+  resourceTypes:
+    - typeName: "Instances"
+      patchers:
+        - type: "ReplicaPatcher"
+          resource: "uc1-hazelcastjet-deployment.yaml"
+  loadTypes:
+    - typeName: "NumSensors"
+      patchers:
+        - type: "EnvVarPatcher"
+          resource: "uc1-load-generator-deployment.yaml"
+          properties:
+            container: "workload-generator"
+            variableName: "NUM_SENSORS"
+        - type: NumSensorsLoadGeneratorReplicaPatcher
+          resource: "uc1-load-generator-deployment.yaml"
+          properties:
+            loadGenMaxRecords: "150000"
+  kafkaConfig:
+    bootstrapServer: "theodolite-kafka-kafka-bootstrap:9092"
+    topics:
+      - name: "input"
+        numPartitions: 40
+        replicationFactor: 1
diff --git a/theodolite-benchmarks/definitions/uc2-hazelcastjet/resources/uc2-hazelcastjet-deployment.yaml b/theodolite-benchmarks/definitions/uc2-hazelcastjet/resources/uc2-hazelcastjet-deployment.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..0f5000902ddf9c12f67643cc35ffc3c882970a72
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc2-hazelcastjet/resources/uc2-hazelcastjet-deployment.yaml
@@ -0,0 +1,40 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: titan-ccp-aggregation
+spec:
+  selector:
+    matchLabels:
+      app: titan-ccp-aggregation
+  replicas: 1
+  template:
+    metadata:
+      labels:
+        app: titan-ccp-aggregation
+    spec:
+      terminationGracePeriodSeconds: 0
+      containers:
+        - name: uc-application
+          image: uc2-hazelcastjet
+          imagePullPolicy: "Never"
+          env:
+            - name: KAFKA_BOOTSTRAP_SERVERS
+              value: "theodolite-kafka-kafka-bootstrap:9092"
+            - name: SCHEMA_REGISTRY_URL
+              value: "http://theodolite-cp-schema-registry:8081"
+            - name: COMMIT_INTERVAL_MS # Set as default for the applications
+              value: "100"
+            - name: DOWNSAMPLE_INTERVAL
+              value: "5000"
+            #- name: KUBERNETES_DNS_NAME
+            #  value: "titan-ccp-aggregation"
+            - name: KUBERNETES_NAMESPACE
+              valueFrom:
+                fieldRef:
+                  fieldPath: metadata.namespace
+            - name: KUBERNETES_DNS_NAME
+              value: "titan-ccp-aggregation.$(KUBERNETES_NAMESPACE).svc.cluster.local"
+          resources:
+            limits:
+              memory: 4Gi
+              cpu: 1000m
\ No newline at end of file
diff --git a/theodolite-benchmarks/definitions/uc2-hazelcastjet/resources/uc2-hazelcastjet-service.yaml b/theodolite-benchmarks/definitions/uc2-hazelcastjet/resources/uc2-hazelcastjet-service.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..845ce7dd55c6e5d45724ec1eeabf8789e704fe77
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc2-hazelcastjet/resources/uc2-hazelcastjet-service.yaml
@@ -0,0 +1,16 @@
+apiVersion: v1
+kind: Service
+metadata:  
+  name: titan-ccp-aggregation
+  labels:
+    app: titan-ccp-aggregation
+spec:
+  type: ClusterIP
+  clusterIP: None
+  selector:    
+    app: titan-ccp-aggregation
+  ports:  
+    - name: coordination
+      port: 5701
+      targetPort: 5701
+      protocol: TCP
diff --git a/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-benchmark-operator.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..436bcc790c50c86e96b3b1853b198a0f6da1aec9
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc2-hazelcastjet/uc2-benchmark-operator.yaml
@@ -0,0 +1,47 @@
+apiVersion: theodolite.com/v1
+kind: benchmark
+metadata:
+  name: uc2-hazelcastjet
+spec:
+  sut:
+    resources:
+      - configMap:
+          name: "benchmark-resources-uc2-hazelcastjet"
+          files:
+            - "uc2-hazelcastjet-deployment.yaml"
+            - "uc2-hazelcastjet-service.yaml"
+  loadGenerator:
+    resources:
+      - configMap:
+          name: "benchmark-resources-uc2-load-generator"
+          files:
+            - "uc2-load-generator-deployment.yaml"
+            - "uc2-load-generator-service.yaml"
+  resourceTypes:
+    - typeName: "Instances"
+      patchers:
+        - type: "ReplicaPatcher"
+          resource: "uc2-hazelcastjet-deployment.yaml"
+  loadTypes:
+    - typeName: "NumSensors"
+      patchers:
+        - type: "EnvVarPatcher"
+          resource: "uc2-load-generator-deployment.yaml"
+          properties:
+            container: "workload-generator"
+            variableName: "NUM_SENSORS"
+        - type: NumSensorsLoadGeneratorReplicaPatcher
+          resource: "uc2-load-generator-deployment.yaml"
+          properties:
+            loadGenMaxRecords: "150000"
+  kafkaConfig:
+    bootstrapServer: "theodolite-kafka-kafka-bootstrap:9092"
+    topics:
+      - name: "input"
+        numPartitions: 40
+        replicationFactor: 1
+      - name: "output"
+        numPartitions: 40
+        replicationFactor: 1
+      - name: "theodolite-.*"
+        removeOnly: True
diff --git a/theodolite-benchmarks/definitions/uc3-hazelcastjet/resources/uc3-hazelcastjet-deployment.yaml b/theodolite-benchmarks/definitions/uc3-hazelcastjet/resources/uc3-hazelcastjet-deployment.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..317d30328ec9ba3c0c30bf87733b3801e73d2477
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc3-hazelcastjet/resources/uc3-hazelcastjet-deployment.yaml
@@ -0,0 +1,42 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: titan-ccp-aggregation
+spec:
+  selector:
+    matchLabels:
+      app: titan-ccp-aggregation
+  replicas: 1
+  template:
+    metadata:
+      labels:
+        app: titan-ccp-aggregation
+    spec:
+      terminationGracePeriodSeconds: 0
+      containers:
+        - name: uc-application
+          image: uc3-hazelcastjet
+          imagePullPolicy: "Never"
+          env:
+            - name: KAFKA_BOOTSTRAP_SERVERS
+              value: "theodolite-kafka-kafka-bootstrap:9092"
+            - name: SCHEMA_REGISTRY_URL
+              value: "http://theodolite-cp-schema-registry:8081"
+            - name: COMMIT_INTERVAL_MS # Set as default for the applications
+              value: "100"
+            - name: WINDOW_SIZE_IN_SECONDS
+              value: "50"
+            - name: HOPPING_SIZE_IN_SECONDS
+              value: "1"
+            #- name: KUBERNETES_DNS_NAME
+            #  value: "titan-ccp-aggregation"
+            - name: KUBERNETES_NAMESPACE
+              valueFrom:
+                fieldRef:
+                  fieldPath: metadata.namespace
+            - name: KUBERNETES_DNS_NAME
+              value: "titan-ccp-aggregation.$(KUBERNETES_NAMESPACE).svc.cluster.local"
+          resources:
+            limits:
+              memory: 4Gi
+              cpu: 1000m
\ No newline at end of file
diff --git a/theodolite-benchmarks/definitions/uc3-hazelcastjet/resources/uc3-hazelcastjet-service.yaml b/theodolite-benchmarks/definitions/uc3-hazelcastjet/resources/uc3-hazelcastjet-service.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..845ce7dd55c6e5d45724ec1eeabf8789e704fe77
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc3-hazelcastjet/resources/uc3-hazelcastjet-service.yaml
@@ -0,0 +1,16 @@
+apiVersion: v1
+kind: Service
+metadata:  
+  name: titan-ccp-aggregation
+  labels:
+    app: titan-ccp-aggregation
+spec:
+  type: ClusterIP
+  clusterIP: None
+  selector:    
+    app: titan-ccp-aggregation
+  ports:  
+    - name: coordination
+      port: 5701
+      targetPort: 5701
+      protocol: TCP
diff --git a/theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-benchmark-operator.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..3d9f755dc741458b0c8a27fe5ef450b09478b8cb
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc3-hazelcastjet/uc3-benchmark-operator.yaml
@@ -0,0 +1,47 @@
+apiVersion: theodolite.com/v1
+kind: benchmark
+metadata:
+  name: uc3-hazelcastjet
+spec:
+  sut:
+    resources:
+      - configMap:
+          name: "benchmark-resources-uc3-hazelcastjet"
+          files:
+            - "uc3-hazelcastjet-deployment.yaml"
+            - "uc3-hazelcastjet-service.yaml"
+  loadGenerator:
+    resources:
+      - configMap:
+          name: "benchmark-resources-uc3-load-generator"
+          files:
+            - "uc3-load-generator-deployment.yaml"
+            - "uc3-load-generator-service.yaml"
+  resourceTypes:
+    - typeName: "Instances"
+      patchers:
+        - type: "ReplicaPatcher"
+          resource: "uc3-hazelcastjet-deployment.yaml"
+  loadTypes:
+    - typeName: "NumSensors"
+      patchers:
+        - type: "EnvVarPatcher"
+          resource: "uc3-load-generator-deployment.yaml"
+          properties:
+            container: "workload-generator"
+            variableName: "NUM_SENSORS"
+        - type: NumSensorsLoadGeneratorReplicaPatcher
+          resource: "uc3-load-generator-deployment.yaml"
+          properties:
+            loadGenMaxRecords: "150000"
+  kafkaConfig:
+    bootstrapServer: "theodolite-kafka-kafka-bootstrap:9092"
+    topics:
+      - name: "input"
+        numPartitions: 40
+        replicationFactor: 1
+      - name: "output"
+        numPartitions: 40
+        replicationFactor: 1
+      - name: "theodolite-.*"
+        removeOnly: True
diff --git a/theodolite-benchmarks/definitions/uc4-flink/resources/taskmanager-deployment.yaml b/theodolite-benchmarks/definitions/uc4-flink/resources/taskmanager-deployment.yaml
index 7db560db53bac827dd92386e0de5621a2b911e35..cc1efa23c32220c7c664d8aaa4669f3af6492d15 100644
--- a/theodolite-benchmarks/definitions/uc4-flink/resources/taskmanager-deployment.yaml
+++ b/theodolite-benchmarks/definitions/uc4-flink/resources/taskmanager-deployment.yaml
@@ -17,7 +17,7 @@ spec:
       terminationGracePeriodSeconds: 0
       containers:
         - name: taskmanager
-          image: ghcr.io/cau-se/theodolite-uc4-flink:latest
+          image: ghcr.io/cau-se/theodolite-uc3-flink:latest
           env:
             - name: KAFKA_BOOTSTRAP_SERVERS
               value: "theodolite-kafka-kafka-bootstrap:9092"
diff --git a/theodolite-benchmarks/definitions/uc4-hazelcastjet/resources/uc4-hazelcastjet-deployment.yaml b/theodolite-benchmarks/definitions/uc4-hazelcastjet/resources/uc4-hazelcastjet-deployment.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..f0736e585400d26481272d5b3d75cd216d55527d
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc4-hazelcastjet/resources/uc4-hazelcastjet-deployment.yaml
@@ -0,0 +1,40 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: titan-ccp-aggregation
+spec:
+  selector:
+    matchLabels:
+      app: titan-ccp-aggregation
+  replicas: 1
+  template:
+    metadata:
+      labels:
+        app: titan-ccp-aggregation
+    spec:
+      terminationGracePeriodSeconds: 0
+      containers:
+        - name: uc-application
+          image: uc4-hazelcastjet
+          imagePullPolicy: "Never"
+          env:
+            - name: KAFKA_BOOTSTRAP_SERVERS
+              value: "theodolite-kafka-kafka-bootstrap:9092"
+            - name: SCHEMA_REGISTRY_URL
+              value: "http://theodolite-cp-schema-registry:8081"
+            - name: COMMIT_INTERVAL_MS # Set as default for the applications
+              value: "100"
+            - name: WINDOW_SIZE
+              value: "5000"
+            #- name: KUBERNETES_DNS_NAME
+            #  value: "titan-ccp-aggregation"
+            - name: KUBERNETES_NAMESPACE
+              valueFrom:
+                fieldRef:
+                  fieldPath: metadata.namespace
+            - name: KUBERNETES_DNS_NAME
+              value: "titan-ccp-aggregation.$(KUBERNETES_NAMESPACE).svc.cluster.local"
+          resources:
+            limits:
+              memory: 4Gi
+              cpu: 1000m
\ No newline at end of file
diff --git a/theodolite-benchmarks/definitions/uc4-hazelcastjet/resources/uc4-hazelcastjet-service.yaml b/theodolite-benchmarks/definitions/uc4-hazelcastjet/resources/uc4-hazelcastjet-service.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..845ce7dd55c6e5d45724ec1eeabf8789e704fe77
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc4-hazelcastjet/resources/uc4-hazelcastjet-service.yaml
@@ -0,0 +1,16 @@
+apiVersion: v1
+kind: Service
+metadata:  
+  name: titan-ccp-aggregation
+  labels:
+    app: titan-ccp-aggregation
+spec:
+  type: ClusterIP
+  clusterIP: None
+  selector:    
+    app: titan-ccp-aggregation
+  ports:  
+    - name: coordination
+      port: 5701
+      targetPort: 5701
+      protocol: TCP
diff --git a/theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-benchmark-operator.yaml b/theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-benchmark-operator.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..f0151969ccb2ff34558c4a56c78f17db9fd3678e
--- /dev/null
+++ b/theodolite-benchmarks/definitions/uc4-hazelcastjet/uc4-benchmark-operator.yaml
@@ -0,0 +1,54 @@
+apiVersion: theodolite.com/v1
+kind: benchmark
+metadata:
+  name: uc4-hazelcastjet
+spec:
+  sut:
+    resources:
+      - configMap:
+          name: "benchmark-resources-uc4-hazelcastjet"
+          files:
+            - "uc4-hazelcastjet-deployment.yaml"
+            - "uc4-hazelcastjet-service.yaml"
+  loadGenerator:
+    resources:
+      - configMap:
+          name: "benchmark-resources-uc4-load-generator"
+          files:
+            - "uc4-load-generator-deployment.yaml"
+            - "uc4-load-generator-service.yaml"
+  resourceTypes:
+    - typeName: "Instances"
+      patchers:
+        - type: "ReplicaPatcher"
+          resource: "uc4-hazelcastjet-deployment.yaml"
+  loadTypes:
+    - typeName: "NumNestedGroups"
+      patchers:
+        - type: "EnvVarPatcher"
+          resource: "uc4-load-generator-deployment.yaml"
+          properties:
+            container: "workload-generator"
+            variableName: "NUM_SENSORS"
+        - type: NumNestedGroupsLoadGeneratorReplicaPatcher
+          resource: "uc4-load-generator-deployment.yaml"
+          properties:
+            loadGenMaxRecords: "150000"
+            numSensors: "4.0"
+  kafkaConfig:
+    bootstrapServer: "theodolite-kafka-kafka-bootstrap:9092"
+    topics:
+      - name: "input"
+        numPartitions: 40
+        replicationFactor: 1
+      - name: "output"
+        numPartitions: 40
+        replicationFactor: 1
+      - name: "configuration"
+        numPartitions: 40
+        replicationFactor: 1
+      - name: "aggregation-feedback"
+        numPartitions: 40
+        replicationFactor: 1
+      - name: "theodolite-.*"
+        removeOnly: True
diff --git a/theodolite-benchmarks/docker-test/uc1-hazelcastjet/docker-compose.yml b/theodolite-benchmarks/docker-test/uc1-hazelcastjet/docker-compose.yml
new file mode 100644
index 0000000000000000000000000000000000000000..fd91f14e241f952dc0d0ba3b4230db2f5deffa30
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc1-hazelcastjet/docker-compose.yml
@@ -0,0 +1,63 @@
+version: '2.2'
+services:
+  zookeeper:
+    image: confluentinc/cp-zookeeper
+    expose:
+      - "2181"
+    environment:
+      ZOOKEEPER_CLIENT_PORT: 2181
+  kafka:
+    image: wurstmeister/kafka
+    expose:
+      - "9092"
+    #ports:
+    #  - 19092:19092
+    environment:
+      KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092
+      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092
+      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
+      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
+      KAFKA_CREATE_TOPICS: "input:3:1"
+  kcat:
+    image: edenhill/kcat:1.7.1
+    entrypoint: "sh"
+    tty: true
+  schema-registry:
+    image: confluentinc/cp-schema-registry:7.0.1
+    depends_on:
+      - zookeeper
+      - kafka
+    restart: "on-failure"
+    expose:
+      - "8081"
+    #ports:
+    #  - 8081:8081
+    environment:
+      SCHEMA_REGISTRY_HOST_NAME: schema-registry
+      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
+  benchmark:
+    image: ghcr.io/cau-se/theodolite-uc1-hazelcastjet:${THEODOLITE_TAG:-latest}
+    depends_on:
+      - schema-registry
+      - kafka
+    expose:
+      - 5701
+    #ports:
+    #  - 5701:5701
+    environment:
+      BOOTSTRAP_SERVER: benchmark:5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+  load-generator: 
+    image: ghcr.io/cau-se/theodolite-uc1-workload-generator:${THEODOLITE_TAG:-latest}
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: load-generator:5701
+      PORT: 5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      NUM_SENSORS: 10
diff --git a/theodolite-benchmarks/docker-test/uc1-hazelcastjet/test.sh b/theodolite-benchmarks/docker-test/uc1-hazelcastjet/test.sh
new file mode 100755
index 0000000000000000000000000000000000000000..3dbf43a74a9ad17784ca9e4e476dc70ed11c731f
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc1-hazelcastjet/test.sh
@@ -0,0 +1,12 @@
+#!/bin/sh
+
+sleep 55s # to let the benchmark and produce some output
+
+docker-compose logs --tail 100 benchmark |
+    sed -n "s/^.*Record:\s\(\S*\)$/\1/p" |
+    tee /dev/stderr |
+    jq .identifier |
+    sort |
+    uniq |
+    wc -l |
+    grep "\b10\b"
diff --git a/theodolite-benchmarks/docker-test/uc1-kstreams/docker-compose.yml b/theodolite-benchmarks/docker-test/uc1-kstreams/docker-compose.yml
index c85ce305c2f1383a77d4c405d52089ec1d2b02a6..a673db5ced5b834632a1bca6f3fb4a2da2b68296 100755
--- a/theodolite-benchmarks/docker-test/uc1-kstreams/docker-compose.yml
+++ b/theodolite-benchmarks/docker-test/uc1-kstreams/docker-compose.yml
@@ -38,7 +38,7 @@ services:
       SCHEMA_REGISTRY_HOST_NAME: schema-registry
       SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
   benchmark:
-    image: ghcr.io/cau-se/theodolite-uc1-kstreams-app:latest
+    image: ghcr.io/cau-se/theodolite-uc1-kstreams-app:${THEODOLITE_TAG:-latest}
     depends_on:
       - schema-registry
       - kafka
diff --git a/theodolite-benchmarks/docker-test/uc2-hazelcastjet/docker-compose.yml b/theodolite-benchmarks/docker-test/uc2-hazelcastjet/docker-compose.yml
new file mode 100644
index 0000000000000000000000000000000000000000..92b90823e31f79e68b301ff039618c9520c92019
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc2-hazelcastjet/docker-compose.yml
@@ -0,0 +1,64 @@
+version: '2.2'
+services:
+  zookeeper:
+    image: confluentinc/cp-zookeeper
+    expose:
+      - "2181"
+    environment:
+      ZOOKEEPER_CLIENT_PORT: 2181
+  kafka:
+    image: wurstmeister/kafka
+    expose:
+      - "9092"
+    #ports:
+    #  - 19092:19092
+    environment:
+      KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092
+      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092
+      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
+      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
+  kcat:
+    image: edenhill/kcat:1.7.1
+    entrypoint: "sh"
+    tty: true
+  schema-registry:
+    image: confluentinc/cp-schema-registry:7.0.1
+    depends_on:
+      - zookeeper
+      - kafka
+    restart: "on-failure"
+    expose:
+      - "8081"
+    #ports:
+    #  - 8081:8081
+    environment:
+      SCHEMA_REGISTRY_HOST_NAME: schema-registry
+      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
+  benchmark:
+    image: ghcr.io/cau-se/theodolite-uc2-hazelcastjet:${THEODOLITE_TAG:-latest}
+    depends_on:
+      - schema-registry
+      - kafka
+    expose:
+      - 5701
+    #ports:
+    #  - 5701:5701
+    environment:
+      BOOTSTRAP_SERVER: benchmark:5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      DOWNSAMPLE_INTERVAL: 5000
+  load-generator: 
+    image: ghcr.io/cau-se/theodolite-uc2-workload-generator:${THEODOLITE_TAG:-latest}
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: load-generator:5701
+      PORT: 5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      NUM_SENSORS: 10
diff --git a/theodolite-benchmarks/docker-test/uc2-hazelcastjet/test.sh b/theodolite-benchmarks/docker-test/uc2-hazelcastjet/test.sh
new file mode 100755
index 0000000000000000000000000000000000000000..be411755b74249d90756e445f7e67dc07bf5ebab
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc2-hazelcastjet/test.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+until docker-compose exec -T kcat kcat -L -b kafka:9092 -t output -J | jq -r '.topics[0].partitions | length' | grep "\b3\b"; do sleep 5s; done
+
+docker-compose exec -T kcat kcat -C -b kafka:9092 -t output -s key=s -s value=s -r http://schema-registry:8081 -f '%k:%s\n' -c 20 |
+    tee /dev/stderr |
+    awk -F ':' '!/^%/ {print $1}' |
+    sort |
+    uniq |
+    wc -l |
+    grep "\b10\b"
\ No newline at end of file
diff --git a/theodolite-benchmarks/docker-test/uc3-hazelcastjet/docker-compose.yml b/theodolite-benchmarks/docker-test/uc3-hazelcastjet/docker-compose.yml
new file mode 100644
index 0000000000000000000000000000000000000000..d7c3fe7a017c24e0b212661f0b0b34c2a1fee32c
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc3-hazelcastjet/docker-compose.yml
@@ -0,0 +1,65 @@
+version: '2.2'
+services:
+  zookeeper:
+    image: confluentinc/cp-zookeeper
+    expose:
+      - "2181"
+    environment:
+      ZOOKEEPER_CLIENT_PORT: 2181
+  kafka:
+    image: wurstmeister/kafka
+    expose:
+      - "9092"
+    #ports:
+    #  - 19092:19092
+    environment:
+      KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092
+      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092
+      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
+      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
+  kcat:
+    image: edenhill/kcat:1.7.1
+    entrypoint: "sh"
+    tty: true
+  schema-registry:
+    image: confluentinc/cp-schema-registry:7.0.1
+    depends_on:
+      - zookeeper
+      - kafka
+    restart: "on-failure"
+    expose:
+      - "8081"
+    #ports:
+    #  - 8081:8081
+    environment:
+      SCHEMA_REGISTRY_HOST_NAME: schema-registry
+      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
+  benchmark:
+    image: ghcr.io/cau-se/theodolite-uc3-hazelcastjet:${THEODOLITE_TAG:-latest}
+    depends_on:
+      - schema-registry
+      - kafka
+    expose:
+      - 5701
+    #ports:
+    #  - 5701:5701
+    environment:
+      BOOTSTRAP_SERVER: benchmark:5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      WINDOW_SIZE_IN_SECONDS: 50
+      HOPPING_SIZE_IN_SECONDS: 5
+  load-generator:
+    image: ghcr.io/cau-se/theodolite-uc3-workload-generator:${THEODOLITE_TAG:-latest}
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: load-generator:5701
+      PORT: 5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      NUM_SENSORS: 10
diff --git a/theodolite-benchmarks/docker-test/uc3-hazelcastjet/test.sh b/theodolite-benchmarks/docker-test/uc3-hazelcastjet/test.sh
new file mode 100755
index 0000000000000000000000000000000000000000..217a730f73fa1fee3f875da34edd9047ed9221db
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc3-hazelcastjet/test.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+until docker-compose exec -T kcat kcat -L -b kafka:9092 -t output -J | jq -r '.topics[0].partitions | length' | grep "\b3\b"; do sleep 5s; done
+
+docker-compose exec -T kcat kcat -C -b kafka:9092 -t output -s key=s -s value=s -r http://schema-registry:8081 -f '%k:%s\n' -c 600 |
+    tee /dev/stderr |
+    awk -F ':' '!/^%/ {print $1}' |
+    sort |
+    uniq |
+    wc -l |
+    grep "\b10\b"
\ No newline at end of file
diff --git a/theodolite-benchmarks/docker-test/uc4-hazelcastjet/docker-compose.yml b/theodolite-benchmarks/docker-test/uc4-hazelcastjet/docker-compose.yml
new file mode 100644
index 0000000000000000000000000000000000000000..9a254f5228677322ed98afcd73349cf7a50d80bc
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc4-hazelcastjet/docker-compose.yml
@@ -0,0 +1,65 @@
+version: '2.2'
+services:
+  zookeeper:
+    image: confluentinc/cp-zookeeper
+    expose:
+      - "2181"
+    environment:
+      ZOOKEEPER_CLIENT_PORT: 2181
+  kafka:
+    image: wurstmeister/kafka
+    expose:
+      - "9092"
+    #ports:
+    #  - 19092:19092
+    environment:
+      KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092
+      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092
+      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
+      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
+      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
+  kcat:
+    image: edenhill/kcat:1.7.1
+    entrypoint: "sh"
+    tty: true
+  schema-registry:
+    image: confluentinc/cp-schema-registry:7.0.1
+    depends_on:
+      - zookeeper
+      - kafka
+    restart: "on-failure"
+    expose:
+      - "8081"
+    #ports:
+    #  - 8081:8081
+    environment:
+      SCHEMA_REGISTRY_HOST_NAME: schema-registry
+      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
+  benchmark:
+    image: ghcr.io/cau-se/theodolite-uc1-hazelcastjet:${THEODOLITE_TAG:-latest}
+    depends_on:
+      - schema-registry
+      - kafka
+    expose:
+      - 5701
+    #ports:
+    #  - 5701:5701
+    environment:
+      BOOTSTRAP_SERVER: benchmark:5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      WINDOW_SIZE_UC4: 5000
+  load-generator: 
+    image: ghcr.io/cau-se/theodolite-uc4-workload-generator:${THEODOLITE_TAG:-latest}
+    depends_on:
+      - schema-registry
+      - kafka
+    environment:
+      BOOTSTRAP_SERVER: load-generator:5701
+      PORT: 5701
+      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
+      SCHEMA_REGISTRY_URL: http://schema-registry:8081
+      NUM_SENSORS: 4
+      NUM_NESTED_GROUPS: 3
diff --git a/theodolite-benchmarks/docker-test/uc4-hazelcastjet/test.sh b/theodolite-benchmarks/docker-test/uc4-hazelcastjet/test.sh
new file mode 100755
index 0000000000000000000000000000000000000000..d9bb6ccf241935c39df63ea5e2f0fce02476e976
--- /dev/null
+++ b/theodolite-benchmarks/docker-test/uc4-hazelcastjet/test.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+until docker-compose exec -T kcat kcat -L -b kafka:9092 -t output -J | jq -r '.topics[0].partitions | length' | grep "\b3\b"; do sleep 5s; done
+
+docker-compose exec -T kcat kcat -C -b kafka:9092 -t output -s key=s -s value=avro -r http://schema-registry:8081 -f '%k:%s\n' -c 600 |
+    tee /dev/stderr |
+    awk -F ':' '!/^%/ {print $1}' |
+    sort |
+    uniq |
+    wc -l|
+    grep "\b21\b"
\ No newline at end of file
diff --git a/theodolite-benchmarks/hazelcastjet-commons/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/hazelcastjet-commons/.settings/org.eclipse.jdt.ui.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..b2a15f439cf1844efe56f1ac0d82a2884e66cb9d
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/.settings/org.eclipse.jdt.ui.prefs
@@ -0,0 +1,286 @@
+cleanup.add_all=false
+cleanup.add_default_serial_version_id=true
+cleanup.add_generated_serial_version_id=false
+cleanup.add_missing_annotations=true
+cleanup.add_missing_deprecated_annotations=true
+cleanup.add_missing_methods=false
+cleanup.add_missing_nls_tags=false
+cleanup.add_missing_override_annotations=true
+cleanup.add_missing_override_annotations_interface_methods=true
+cleanup.add_serial_version_id=false
+cleanup.always_use_blocks=true
+cleanup.always_use_parentheses_in_expressions=false
+cleanup.always_use_this_for_non_static_field_access=true
+cleanup.always_use_this_for_non_static_method_access=true
+cleanup.array_with_curly=false
+cleanup.arrays_fill=false
+cleanup.bitwise_conditional_expression=false
+cleanup.boolean_literal=false
+cleanup.boolean_value_rather_than_comparison=true
+cleanup.break_loop=false
+cleanup.collection_cloning=false
+cleanup.comparing_on_criteria=false
+cleanup.comparison_statement=false
+cleanup.controlflow_merge=false
+cleanup.convert_functional_interfaces=false
+cleanup.convert_to_enhanced_for_loop=true
+cleanup.convert_to_enhanced_for_loop_if_loop_var_used=true
+cleanup.convert_to_switch_expressions=false
+cleanup.correct_indentation=true
+cleanup.do_while_rather_than_while=true
+cleanup.double_negation=false
+cleanup.else_if=false
+cleanup.embedded_if=false
+cleanup.evaluate_nullable=false
+cleanup.extract_increment=false
+cleanup.format_source_code=true
+cleanup.format_source_code_changes_only=false
+cleanup.hash=false
+cleanup.if_condition=false
+cleanup.insert_inferred_type_arguments=false
+cleanup.instanceof=false
+cleanup.instanceof_keyword=false
+cleanup.invert_equals=false
+cleanup.join=false
+cleanup.lazy_logical_operator=false
+cleanup.make_local_variable_final=true
+cleanup.make_parameters_final=true
+cleanup.make_private_fields_final=true
+cleanup.make_type_abstract_if_missing_method=false
+cleanup.make_variable_declarations_final=true
+cleanup.map_cloning=false
+cleanup.merge_conditional_blocks=false
+cleanup.multi_catch=false
+cleanup.never_use_blocks=false
+cleanup.never_use_parentheses_in_expressions=true
+cleanup.no_string_creation=false
+cleanup.no_super=false
+cleanup.number_suffix=false
+cleanup.objects_equals=false
+cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=true
+cleanup.operand_factorization=false
+cleanup.organize_imports=true
+cleanup.overridden_assignment=false
+cleanup.plain_replacement=false
+cleanup.precompile_regex=false
+cleanup.primitive_comparison=false
+cleanup.primitive_parsing=false
+cleanup.primitive_rather_than_wrapper=true
+cleanup.primitive_serialization=false
+cleanup.pull_out_if_from_if_else=false
+cleanup.pull_up_assignment=false
+cleanup.push_down_negation=false
+cleanup.qualify_static_field_accesses_with_declaring_class=false
+cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+cleanup.qualify_static_member_accesses_with_declaring_class=true
+cleanup.qualify_static_method_accesses_with_declaring_class=false
+cleanup.reduce_indentation=false
+cleanup.redundant_comparator=false
+cleanup.redundant_falling_through_block_end=false
+cleanup.remove_private_constructors=true
+cleanup.remove_redundant_modifiers=false
+cleanup.remove_redundant_semicolons=true
+cleanup.remove_redundant_type_arguments=true
+cleanup.remove_trailing_whitespaces=true
+cleanup.remove_trailing_whitespaces_all=true
+cleanup.remove_trailing_whitespaces_ignore_empty=false
+cleanup.remove_unnecessary_array_creation=false
+cleanup.remove_unnecessary_casts=true
+cleanup.remove_unnecessary_nls_tags=true
+cleanup.remove_unused_imports=true
+cleanup.remove_unused_local_variables=false
+cleanup.remove_unused_private_fields=true
+cleanup.remove_unused_private_members=false
+cleanup.remove_unused_private_methods=true
+cleanup.remove_unused_private_types=true
+cleanup.return_expression=false
+cleanup.simplify_lambda_expression_and_method_ref=false
+cleanup.single_used_field=false
+cleanup.sort_members=false
+cleanup.sort_members_all=false
+cleanup.standard_comparison=false
+cleanup.static_inner_class=false
+cleanup.strictly_equal_or_different=false
+cleanup.stringbuffer_to_stringbuilder=false
+cleanup.stringbuilder=false
+cleanup.stringbuilder_for_local_vars=true
+cleanup.stringconcat_to_textblock=false
+cleanup.substring=false
+cleanup.switch=false
+cleanup.system_property=false
+cleanup.system_property_boolean=false
+cleanup.system_property_file_encoding=false
+cleanup.system_property_file_separator=false
+cleanup.system_property_line_separator=false
+cleanup.system_property_path_separator=false
+cleanup.ternary_operator=false
+cleanup.try_with_resource=false
+cleanup.unlooped_while=false
+cleanup.unreachable_block=false
+cleanup.use_anonymous_class_creation=false
+cleanup.use_autoboxing=false
+cleanup.use_blocks=true
+cleanup.use_blocks_only_for_return_and_throw=false
+cleanup.use_directly_map_method=false
+cleanup.use_lambda=true
+cleanup.use_parentheses_in_expressions=true
+cleanup.use_string_is_blank=false
+cleanup.use_this_for_non_static_field_access=true
+cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+cleanup.use_this_for_non_static_method_access=true
+cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+cleanup.use_unboxing=false
+cleanup.use_var=false
+cleanup.useless_continue=false
+cleanup.useless_return=false
+cleanup.valueof_rather_than_instantiation=false
+cleanup_profile=_CAU-SE-Style
+cleanup_settings_version=2
+eclipse.preferences.version=1
+editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
+formatter_profile=_CAU-SE-Style
+formatter_settings_version=21
+org.eclipse.jdt.ui.ignorelowercasenames=true
+org.eclipse.jdt.ui.importorder=
+org.eclipse.jdt.ui.ondemandthreshold=99
+org.eclipse.jdt.ui.staticondemandthreshold=99
+org.eclipse.jdt.ui.text.custom_code_templates=
+sp_cleanup.add_all=false
+sp_cleanup.add_default_serial_version_id=true
+sp_cleanup.add_generated_serial_version_id=false
+sp_cleanup.add_missing_annotations=true
+sp_cleanup.add_missing_deprecated_annotations=true
+sp_cleanup.add_missing_methods=false
+sp_cleanup.add_missing_nls_tags=false
+sp_cleanup.add_missing_override_annotations=true
+sp_cleanup.add_missing_override_annotations_interface_methods=true
+sp_cleanup.add_serial_version_id=false
+sp_cleanup.always_use_blocks=true
+sp_cleanup.always_use_parentheses_in_expressions=false
+sp_cleanup.always_use_this_for_non_static_field_access=true
+sp_cleanup.always_use_this_for_non_static_method_access=true
+sp_cleanup.array_with_curly=false
+sp_cleanup.arrays_fill=false
+sp_cleanup.bitwise_conditional_expression=false
+sp_cleanup.boolean_literal=false
+sp_cleanup.boolean_value_rather_than_comparison=false
+sp_cleanup.break_loop=false
+sp_cleanup.collection_cloning=false
+sp_cleanup.comparing_on_criteria=true
+sp_cleanup.comparison_statement=false
+sp_cleanup.controlflow_merge=false
+sp_cleanup.convert_functional_interfaces=false
+sp_cleanup.convert_to_enhanced_for_loop=true
+sp_cleanup.convert_to_enhanced_for_loop_if_loop_var_used=false
+sp_cleanup.convert_to_switch_expressions=false
+sp_cleanup.correct_indentation=true
+sp_cleanup.do_while_rather_than_while=false
+sp_cleanup.double_negation=false
+sp_cleanup.else_if=false
+sp_cleanup.embedded_if=false
+sp_cleanup.evaluate_nullable=false
+sp_cleanup.extract_increment=false
+sp_cleanup.format_source_code=true
+sp_cleanup.format_source_code_changes_only=false
+sp_cleanup.hash=false
+sp_cleanup.if_condition=false
+sp_cleanup.insert_inferred_type_arguments=false
+sp_cleanup.instanceof=false
+sp_cleanup.instanceof_keyword=false
+sp_cleanup.invert_equals=false
+sp_cleanup.join=false
+sp_cleanup.lazy_logical_operator=false
+sp_cleanup.make_local_variable_final=true
+sp_cleanup.make_parameters_final=true
+sp_cleanup.make_private_fields_final=true
+sp_cleanup.make_type_abstract_if_missing_method=false
+sp_cleanup.make_variable_declarations_final=true
+sp_cleanup.map_cloning=false
+sp_cleanup.merge_conditional_blocks=false
+sp_cleanup.multi_catch=false
+sp_cleanup.never_use_blocks=false
+sp_cleanup.never_use_parentheses_in_expressions=true
+sp_cleanup.no_string_creation=false
+sp_cleanup.no_super=false
+sp_cleanup.number_suffix=false
+sp_cleanup.objects_equals=false
+sp_cleanup.on_save_use_additional_actions=true
+sp_cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=false
+sp_cleanup.operand_factorization=false
+sp_cleanup.organize_imports=true
+sp_cleanup.overridden_assignment=false
+sp_cleanup.plain_replacement=false
+sp_cleanup.precompile_regex=false
+sp_cleanup.primitive_comparison=false
+sp_cleanup.primitive_parsing=false
+sp_cleanup.primitive_rather_than_wrapper=false
+sp_cleanup.primitive_serialization=false
+sp_cleanup.pull_out_if_from_if_else=false
+sp_cleanup.pull_up_assignment=false
+sp_cleanup.push_down_negation=false
+sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
+sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_with_declaring_class=true
+sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
+sp_cleanup.reduce_indentation=false
+sp_cleanup.redundant_comparator=false
+sp_cleanup.redundant_falling_through_block_end=false
+sp_cleanup.remove_private_constructors=true
+sp_cleanup.remove_redundant_modifiers=false
+sp_cleanup.remove_redundant_semicolons=false
+sp_cleanup.remove_redundant_type_arguments=false
+sp_cleanup.remove_trailing_whitespaces=true
+sp_cleanup.remove_trailing_whitespaces_all=true
+sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
+sp_cleanup.remove_unnecessary_array_creation=false
+sp_cleanup.remove_unnecessary_casts=true
+sp_cleanup.remove_unnecessary_nls_tags=true
+sp_cleanup.remove_unused_imports=true
+sp_cleanup.remove_unused_local_variables=false
+sp_cleanup.remove_unused_private_fields=true
+sp_cleanup.remove_unused_private_members=false
+sp_cleanup.remove_unused_private_methods=true
+sp_cleanup.remove_unused_private_types=true
+sp_cleanup.return_expression=false
+sp_cleanup.simplify_lambda_expression_and_method_ref=false
+sp_cleanup.single_used_field=false
+sp_cleanup.sort_members=false
+sp_cleanup.sort_members_all=false
+sp_cleanup.standard_comparison=false
+sp_cleanup.static_inner_class=false
+sp_cleanup.strictly_equal_or_different=false
+sp_cleanup.stringbuffer_to_stringbuilder=false
+sp_cleanup.stringbuilder=false
+sp_cleanup.stringbuilder_for_local_vars=false
+sp_cleanup.stringconcat_to_textblock=false
+sp_cleanup.substring=false
+sp_cleanup.switch=false
+sp_cleanup.system_property=false
+sp_cleanup.system_property_boolean=false
+sp_cleanup.system_property_file_encoding=false
+sp_cleanup.system_property_file_separator=false
+sp_cleanup.system_property_line_separator=false
+sp_cleanup.system_property_path_separator=false
+sp_cleanup.ternary_operator=false
+sp_cleanup.try_with_resource=false
+sp_cleanup.unlooped_while=false
+sp_cleanup.unreachable_block=false
+sp_cleanup.use_anonymous_class_creation=false
+sp_cleanup.use_autoboxing=false
+sp_cleanup.use_blocks=true
+sp_cleanup.use_blocks_only_for_return_and_throw=false
+sp_cleanup.use_directly_map_method=false
+sp_cleanup.use_lambda=true
+sp_cleanup.use_parentheses_in_expressions=true
+sp_cleanup.use_string_is_blank=false
+sp_cleanup.use_this_for_non_static_field_access=true
+sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+sp_cleanup.use_this_for_non_static_method_access=true
+sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+sp_cleanup.use_unboxing=false
+sp_cleanup.use_var=false
+sp_cleanup.useless_continue=true
+sp_cleanup.useless_return=true
+sp_cleanup.valueof_rather_than_instantiation=false
diff --git a/theodolite-benchmarks/hazelcastjet-commons/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/hazelcastjet-commons/.settings/qa.eclipse.plugin.checkstyle.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..4fa4266c755f4ff8da465ab7341cd70ffb24ecf7
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/.settings/qa.eclipse.plugin.checkstyle.prefs
@@ -0,0 +1,4 @@
+configFilePath=../config/checkstyle.xml
+customModulesJarPaths=
+eclipse.preferences.version=1
+enabled=false
diff --git a/theodolite-benchmarks/hazelcastjet-commons/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/hazelcastjet-commons/.settings/qa.eclipse.plugin.pmd.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..40bfd0ecdbbe324bb54e4b9f9f32ba95cf5b0c2a
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/.settings/qa.eclipse.plugin.pmd.prefs
@@ -0,0 +1,4 @@
+customRulesJars=
+eclipse.preferences.version=1
+enabled=false
+ruleSetFilePath=../config/pmd.xml
diff --git a/theodolite-benchmarks/hazelcastjet-commons/build.gradle b/theodolite-benchmarks/hazelcastjet-commons/build.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..934ebc5fae39eea90a1c0ab47f989ee3cc59d6f9
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/build.gradle
@@ -0,0 +1,23 @@
+plugins {
+    id 'theodolite.java-commons'
+}
+
+repositories {
+  mavenCentral()
+  maven {
+    url "https://oss.sonatype.org/content/repositories/snapshots/"
+  }
+  maven {
+      url 'https://packages.confluent.io/maven/'
+  }
+}
+
+dependencies {
+    implementation('org.industrial-devops:titan-ccp-common:0.1.0-SNAPSHOT') { changing = true }
+    implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true }
+    implementation 'com.hazelcast.jet:hazelcast-jet:4.5'
+    implementation 'com.hazelcast.jet:hazelcast-jet-kafka:4.5'
+    implementation 'com.hazelcast:hazelcast-kubernetes:2.2.2'
+    implementation 'io.confluent:kafka-avro-serializer:5.3.0'
+    implementation 'org.slf4j:slf4j-api:1.7.25'
+}
\ No newline at end of file
diff --git a/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/BenchmarkConfigBuilder.java b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/BenchmarkConfigBuilder.java
new file mode 100644
index 0000000000000000000000000000000000000000..46dd56a8669cce4d29e8dace1bd6c2649a71e1f0
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/BenchmarkConfigBuilder.java
@@ -0,0 +1,71 @@
+package rocks.theodolite.benchmarks.commons.hazelcastjet;
+
+import com.hazelcast.config.Config;
+import com.hazelcast.config.JoinConfig;
+import org.slf4j.Logger;
+
+/**
+ * Build a Config Object for Benchmarks implemented in Hazelcast Jet.
+ *
+ */
+public class BenchmarkConfigBuilder {
+
+  /**
+   * Builds a Config Object for Benchmarks implemented in Hazelcast Jet using data from the
+   * environment.
+   */
+  public Config buildFromEnv(final Logger logger, final String bootstrapServerDefault,
+      final String hzKubernetesServiceDnsKey) {
+
+    final String bootstrapServer = System.getenv(ConfigurationKeys.BOOTSTRAP_SERVER);
+    final String kubernetesDnsName = System.getenv(ConfigurationKeys.KUBERNETES_DNS_NAME);
+
+    ClusterConfig clusterConfig;
+    if (bootstrapServer != null) { // NOPMD
+      clusterConfig = ClusterConfig.fromBootstrapServer(bootstrapServer);
+      logger.info("Use bootstrap server '{}'.", bootstrapServer);
+    } else if (kubernetesDnsName != null) { // NOPMD
+      clusterConfig = ClusterConfig.fromKubernetesDnsName(kubernetesDnsName);
+      logger.info("Use Kubernetes DNS name '{}'.", kubernetesDnsName);
+    } else {
+      clusterConfig = ClusterConfig.fromBootstrapServer(bootstrapServerDefault);
+      logger.info(
+          "Neither a bootstrap server nor a Kubernetes DNS name was provided. Use default bootstrap server '{}'.", // NOCS
+          bootstrapServerDefault);
+    }
+
+    final String port = System.getenv(ConfigurationKeys.PORT);
+    if (port != null) {
+      clusterConfig.setPort(Integer.parseInt(port));
+    }
+
+    final String portAutoIncrement = System.getenv(ConfigurationKeys.PORT_AUTO_INCREMENT);
+    if (portAutoIncrement != null) {
+      clusterConfig.setPortAutoIncrement(Boolean.parseBoolean(portAutoIncrement));
+    }
+
+    final String clusterNamePrefix = System.getenv(ConfigurationKeys.CLUSTER_NAME_PREFIX);
+    if (clusterNamePrefix != null) {
+      clusterConfig.setClusterNamePrefix(clusterNamePrefix);
+    }
+
+    // Set network config for this hazelcast jet instance
+    final Config config = new Config()
+        .setClusterName(clusterConfig.getClusterNamePrefix());
+    final JoinConfig joinConfig = config.getNetworkConfig()
+        .setPort(clusterConfig.getPort())
+        .setPortAutoIncrement(clusterConfig.isPortAutoIncrement())
+        .getJoin();
+    joinConfig.getMulticastConfig().setEnabled(false);
+    if (clusterConfig.hasBootstrapServer()) {
+      joinConfig.getTcpIpConfig().addMember(clusterConfig.getBootstrapServer());
+    } else if (clusterConfig.hasKubernetesDnsName()) {
+      joinConfig.getKubernetesConfig()
+          .setEnabled(true)
+          .setProperty(hzKubernetesServiceDnsKey, clusterConfig.getKubernetesDnsName());
+    }
+
+    return config;
+  }
+
+}
diff --git a/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/ClusterConfig.java b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/ClusterConfig.java
new file mode 100644
index 0000000000000000000000000000000000000000..c5a2141799eb97dbedc5fc82fa456b2efee3a813
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/ClusterConfig.java
@@ -0,0 +1,76 @@
+package rocks.theodolite.benchmarks.commons.hazelcastjet;
+
+/**
+ * Configuration of a load generator cluster.
+ */
+public final class ClusterConfig {
+
+  private static final int PORT_DEFAULT = 5701;
+  private static final String CLUSTER_NAME_PREFIX_DEFAULT = "theodolite-hazelcastjet";
+
+  private final String bootstrapServer;
+  private final String kubernetesDnsName;
+  private int port = PORT_DEFAULT;
+  private boolean portAutoIncrement = true;
+  private String clusterNamePrefix = CLUSTER_NAME_PREFIX_DEFAULT;
+
+  /**
+   * Create a new {@link ClusterConfig} with the given parameter values.
+   */
+  private ClusterConfig(final String bootstrapServer, final String kubernetesDnsName) {
+    this.bootstrapServer = bootstrapServer;
+    this.kubernetesDnsName = kubernetesDnsName;
+  }
+
+  public boolean hasBootstrapServer() {
+    return this.bootstrapServer != null;
+  }
+
+  public String getBootstrapServer() {
+    return this.bootstrapServer;
+  }
+
+  public boolean hasKubernetesDnsName() {
+    return this.kubernetesDnsName != null;
+  }
+
+  public String getKubernetesDnsName() {
+    return this.kubernetesDnsName;
+  }
+
+  public int getPort() {
+    return this.port;
+  }
+
+  public boolean isPortAutoIncrement() {
+    return this.portAutoIncrement;
+  }
+
+  public ClusterConfig setPortAutoIncrement(final boolean portAutoIncrement) { // NOPMD
+    this.portAutoIncrement = portAutoIncrement;
+    return this;
+  }
+
+  public ClusterConfig setPort(final int port) { // NOPMD
+    this.port = port;
+    return this;
+  }
+
+  public String getClusterNamePrefix() {
+    return this.clusterNamePrefix;
+  }
+
+  public ClusterConfig setClusterNamePrefix(final String clusterNamePrefix) { // NOPMD
+    this.clusterNamePrefix = clusterNamePrefix;
+    return this;
+  }
+
+  public static ClusterConfig fromBootstrapServer(final String bootstrapServer) {
+    return new ClusterConfig(bootstrapServer, null);
+  }
+
+  public static ClusterConfig fromKubernetesDnsName(final String kubernetesDnsName) {
+    return new ClusterConfig(null, kubernetesDnsName);
+  }
+
+}
diff --git a/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/ConfigurationKeys.java b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/ConfigurationKeys.java
new file mode 100644
index 0000000000000000000000000000000000000000..d1705888430c92ee0cec50ea06871746bbe06cb5
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/ConfigurationKeys.java
@@ -0,0 +1,33 @@
+package rocks.theodolite.benchmarks.commons.hazelcastjet;
+
+/**
+ * Configuration Keys used for Hazelcast Jet Benchmark implementations.
+ */
+public class ConfigurationKeys {
+
+  // Common Keys
+  public static final String BOOTSTRAP_SERVER = "BOOTSTRAP_SERVER";
+  public static final String KUBERNETES_DNS_NAME = "KUBERNETES_DNS_NAME";
+  public static final String PORT = "PORT";
+  public static final String PORT_AUTO_INCREMENT = "PORT_AUTO_INCREMENT";
+  public static final String CLUSTER_NAME_PREFIX = "CLUSTER_NAME_PREFIX";
+  public static final String KAFKA_BOOTSTRAP_SERVERS = "KAFKA_BOOTSTRAP_SERVERS";
+  public static final String SCHEMA_REGISTRY_URL = "SCHEMA_REGISTRY_URL";
+  public static final String KAFKA_INPUT_TOPIC = "KAFKA_INPUT_TOPIC";
+
+  // Additional topics
+  public static final String KAFKA_OUTPUT_TOPIC = "KAFKA_OUTPUT_TOPIC";
+
+  // UC2
+  public static final String DOWNSAMPLE_INTERVAL = "DOWNSAMPLE_INTERVAL";
+
+  // UC3
+  public static final String WINDOW_SIZE_IN_SECONDS = "WINDOW_SIZE_IN_SECONDS";
+  public static final String HOPPING_SIZE_IN_SECONDS = "HOPPING_SIZE_IN_SECONDS";
+
+  // UC4
+  public static final String KAFKA_CONFIGURATION_TOPIC = "KAFKA_CONFIGURATION_TOPIC";
+  public static final String KAFKA_FEEDBACK_TOPIC = "KAFKA_FEEDBACK_TOPIC";
+  public static final String WINDOW_SIZE_UC4 = "WINDOW_SIZE";
+  
+}
diff --git a/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/JetInstanceBuilder.java b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/JetInstanceBuilder.java
new file mode 100644
index 0000000000000000000000000000000000000000..cc2ee052d5e2ed7e7b372baf7b59f24ef3e26e8f
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/JetInstanceBuilder.java
@@ -0,0 +1,61 @@
+package rocks.theodolite.benchmarks.commons.hazelcastjet;
+
+import com.hazelcast.config.Config;
+import com.hazelcast.jet.Jet;
+import com.hazelcast.jet.JetInstance;
+import org.slf4j.Logger;
+
+/**
+ * Builds JetInstances for Benchmark Implementations in Hazelcast Jet.
+ */
+public class JetInstanceBuilder {
+
+  private Config config;
+
+  /**
+   * Set Hazelcast Config for the JetInstance to be built.
+   *
+   * @param hazelcastConfig Config for this JetInstance to be built.
+   * @return A Uc1JetInstanceBuilder with a set Config.
+   */
+  public JetInstanceBuilder setCustomConfig(final Config hazelcastConfig) { // NOPMD
+    this.config = hazelcastConfig;
+    return this;
+  }
+
+  /**
+   * Sets the ClusterConfig for this builder using the clusterConfigBuilder and environment
+   * variables.
+   *
+   * @param logger A specified logger to log procedures
+   * @param bootstrapServerDefault The default bootstrap server used in case no definition by the
+   *        environment is provided.
+   * @return The Uc1HazelcastJetBuilder factory with a set ClusterConfig.
+   */
+  public JetInstanceBuilder setConfigFromEnv(final Logger logger, // NOPMD
+      final String bootstrapServerDefault, final String hzKubernetesServiceDnsKey) {
+    // Use ClusterConfigBuilder to build a cluster config for this microservice
+    final BenchmarkConfigBuilder configBuilder = new BenchmarkConfigBuilder();
+    this.config =
+        configBuilder.buildFromEnv(logger, bootstrapServerDefault, hzKubernetesServiceDnsKey);
+    return this;
+  }
+
+  /**
+   * Builds and returns a JetInstance. If a config is set, the JetInstance will contain the set
+   * config.
+   *
+   * @return JetInstance
+   */
+  public JetInstance build() {
+    final JetInstance jet = Jet.newJetInstance();
+    if (this.config == null) {
+      return jet;
+    } else {
+      jet.getConfig().setHazelcastConfig(this.config);
+      return jet;
+    }
+
+  }
+
+}
diff --git a/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/KafkaPropertiesBuilder.java b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/KafkaPropertiesBuilder.java
new file mode 100644
index 0000000000000000000000000000000000000000..9bce60f57a6ecb9da4578e08d8f49bbb34af934a
--- /dev/null
+++ b/theodolite-benchmarks/hazelcastjet-commons/src/main/java/rocks/theodolite/benchmarks/commons/hazelcastjet/KafkaPropertiesBuilder.java
@@ -0,0 +1,99 @@
+package rocks.theodolite.benchmarks.commons.hazelcastjet;
+
+import io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig;
+import io.confluent.kafka.serializers.KafkaAvroDeserializerConfig;
+import java.util.Objects;
+import java.util.Properties;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+import org.apache.kafka.clients.producer.ProducerConfig;
+
+
+/**
+ * Generalized builder for Kafka properties.
+ * Will always set AUTO_OFFSET_RESET_CONFIG to earliest
+ *
+ */
+public class KafkaPropertiesBuilder {
+
+  private static final String TRUE = "true";
+  private static final String AUTO_OFFSET_RESET_CONFIG = "earliest";
+
+
+  /**
+   * Builds Kafka Properties used for the UC4 Benchmark pipeline.
+   *
+   * @param kafkaBootstrapServerDefault Default bootstrap server if not set by environment.
+   * @param schemaRegistryUrlDefault Default schema registry URL if not set by environment.
+   * @param applicationName Used to set the group id to commit the offsets
+   * @param keyDeserializer Classname for the key deserializer.
+   * @param valueDeserializer Classname for the value deserializer.
+   * @return A Kafka Properties Object containing the values needed for a Pipeline.
+   */
+  public Properties buildKafkaInputReadPropsFromEnv(final String kafkaBootstrapServerDefault,//NOPMD
+                                                    final String schemaRegistryUrlDefault,
+                                                    final String applicationName,
+                                                    final String keyDeserializer,
+                                                    final String valueDeserializer) {
+
+    final String kafkaBootstrapServers = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS),
+        kafkaBootstrapServerDefault);
+    final String schemaRegistryUrl = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.SCHEMA_REGISTRY_URL),
+        schemaRegistryUrlDefault);
+
+    final Properties props = new Properties();
+    props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaBootstrapServers);
+    props.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
+        keyDeserializer);
+    props.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
+        valueDeserializer);
+    props.setProperty(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
+    props.setProperty(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, TRUE);
+    props.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, AUTO_OFFSET_RESET_CONFIG);
+
+    props.setProperty(ConsumerConfig.GROUP_ID_CONFIG,applicationName);
+    props.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,TRUE);
+
+    return props;
+  }
+
+  /**
+   * Builds Kafka Properties used for the UC4 Benchmark pipeline.
+   *
+   * @param kafkaBootstrapServerDefault Default bootstrap server if not set by environment.
+   * @param schemaRegistryUrlDefault Default schema registry URL if not set by environment.
+   * @param keySerializer Classname for the key serializer.
+   * @param valueSerializer Classname for the value serializer.
+   * @return A Kafka Properties Object containing the values needed for a Hazelcast Jet UC4
+   *         Pipeline.
+   */
+  public Properties buildKafkaWritePropsFromEnv(final String kafkaBootstrapServerDefault,//NOPMD
+                                                final String schemaRegistryUrlDefault,
+                                                final String keySerializer,
+                                                final String valueSerializer) {
+
+    final String kafkaBootstrapServers = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS),
+        kafkaBootstrapServerDefault);
+    final String schemaRegistryUrl = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.SCHEMA_REGISTRY_URL),
+        schemaRegistryUrlDefault);
+
+    final Properties props = new Properties();
+    props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaBootstrapServers);
+    props.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
+        keySerializer);
+    props.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
+        valueSerializer);
+    props.setProperty(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
+    props.setProperty("specific.avro.writer", TRUE);
+
+    return props;
+  }
+
+
+
+
+
+}
diff --git a/theodolite-benchmarks/settings.gradle b/theodolite-benchmarks/settings.gradle
index 776e7d8e4fe132839b6e27c70c368720415721ea..0040989a8b3b02487c2d7328726b7caadb90f32f 100644
--- a/theodolite-benchmarks/settings.gradle
+++ b/theodolite-benchmarks/settings.gradle
@@ -3,12 +3,14 @@ rootProject.name = 'theodolite-benchmarks'
 include 'load-generator-commons'
 include 'kstreams-commons'
 include 'flink-commons'
+include 'hazelcastjet-commons'
 include 'beam-commons'
 
 include 'uc1-load-generator'
 include 'uc1-commons'
 include 'uc1-kstreams'
 include 'uc1-flink'
+include 'uc1-hazelcastjet'
 include 'uc1-beam'
 include 'uc1-beam-flink'
 include 'uc1-beam-samza'
@@ -16,6 +18,7 @@ include 'uc1-beam-samza'
 include 'uc2-load-generator'
 include 'uc2-kstreams'
 include 'uc2-flink'
+include 'uc2-hazelcastjet'
 include 'uc2-beam'
 include 'uc2-beam-flink'
 include 'uc2-beam-samza'
@@ -23,6 +26,7 @@ include 'uc2-beam-samza'
 include 'uc3-load-generator'
 include 'uc3-kstreams'
 include 'uc3-flink'
+include 'uc3-hazelcastjet'
 include 'uc3-beam'
 include 'uc3-beam-flink'
 include 'uc3-beam-samza'
@@ -30,6 +34,7 @@ include 'uc3-beam-samza'
 include 'uc4-load-generator'
 include 'uc4-kstreams'
 include 'uc4-flink'
+include 'uc4-hazelcastjet'
 include 'uc4-beam'
 include 'uc4-beam-flink'
 include 'uc4-beam-samza'
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc1-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..b2a15f439cf1844efe56f1ac0d82a2884e66cb9d
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs
@@ -0,0 +1,286 @@
+cleanup.add_all=false
+cleanup.add_default_serial_version_id=true
+cleanup.add_generated_serial_version_id=false
+cleanup.add_missing_annotations=true
+cleanup.add_missing_deprecated_annotations=true
+cleanup.add_missing_methods=false
+cleanup.add_missing_nls_tags=false
+cleanup.add_missing_override_annotations=true
+cleanup.add_missing_override_annotations_interface_methods=true
+cleanup.add_serial_version_id=false
+cleanup.always_use_blocks=true
+cleanup.always_use_parentheses_in_expressions=false
+cleanup.always_use_this_for_non_static_field_access=true
+cleanup.always_use_this_for_non_static_method_access=true
+cleanup.array_with_curly=false
+cleanup.arrays_fill=false
+cleanup.bitwise_conditional_expression=false
+cleanup.boolean_literal=false
+cleanup.boolean_value_rather_than_comparison=true
+cleanup.break_loop=false
+cleanup.collection_cloning=false
+cleanup.comparing_on_criteria=false
+cleanup.comparison_statement=false
+cleanup.controlflow_merge=false
+cleanup.convert_functional_interfaces=false
+cleanup.convert_to_enhanced_for_loop=true
+cleanup.convert_to_enhanced_for_loop_if_loop_var_used=true
+cleanup.convert_to_switch_expressions=false
+cleanup.correct_indentation=true
+cleanup.do_while_rather_than_while=true
+cleanup.double_negation=false
+cleanup.else_if=false
+cleanup.embedded_if=false
+cleanup.evaluate_nullable=false
+cleanup.extract_increment=false
+cleanup.format_source_code=true
+cleanup.format_source_code_changes_only=false
+cleanup.hash=false
+cleanup.if_condition=false
+cleanup.insert_inferred_type_arguments=false
+cleanup.instanceof=false
+cleanup.instanceof_keyword=false
+cleanup.invert_equals=false
+cleanup.join=false
+cleanup.lazy_logical_operator=false
+cleanup.make_local_variable_final=true
+cleanup.make_parameters_final=true
+cleanup.make_private_fields_final=true
+cleanup.make_type_abstract_if_missing_method=false
+cleanup.make_variable_declarations_final=true
+cleanup.map_cloning=false
+cleanup.merge_conditional_blocks=false
+cleanup.multi_catch=false
+cleanup.never_use_blocks=false
+cleanup.never_use_parentheses_in_expressions=true
+cleanup.no_string_creation=false
+cleanup.no_super=false
+cleanup.number_suffix=false
+cleanup.objects_equals=false
+cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=true
+cleanup.operand_factorization=false
+cleanup.organize_imports=true
+cleanup.overridden_assignment=false
+cleanup.plain_replacement=false
+cleanup.precompile_regex=false
+cleanup.primitive_comparison=false
+cleanup.primitive_parsing=false
+cleanup.primitive_rather_than_wrapper=true
+cleanup.primitive_serialization=false
+cleanup.pull_out_if_from_if_else=false
+cleanup.pull_up_assignment=false
+cleanup.push_down_negation=false
+cleanup.qualify_static_field_accesses_with_declaring_class=false
+cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+cleanup.qualify_static_member_accesses_with_declaring_class=true
+cleanup.qualify_static_method_accesses_with_declaring_class=false
+cleanup.reduce_indentation=false
+cleanup.redundant_comparator=false
+cleanup.redundant_falling_through_block_end=false
+cleanup.remove_private_constructors=true
+cleanup.remove_redundant_modifiers=false
+cleanup.remove_redundant_semicolons=true
+cleanup.remove_redundant_type_arguments=true
+cleanup.remove_trailing_whitespaces=true
+cleanup.remove_trailing_whitespaces_all=true
+cleanup.remove_trailing_whitespaces_ignore_empty=false
+cleanup.remove_unnecessary_array_creation=false
+cleanup.remove_unnecessary_casts=true
+cleanup.remove_unnecessary_nls_tags=true
+cleanup.remove_unused_imports=true
+cleanup.remove_unused_local_variables=false
+cleanup.remove_unused_private_fields=true
+cleanup.remove_unused_private_members=false
+cleanup.remove_unused_private_methods=true
+cleanup.remove_unused_private_types=true
+cleanup.return_expression=false
+cleanup.simplify_lambda_expression_and_method_ref=false
+cleanup.single_used_field=false
+cleanup.sort_members=false
+cleanup.sort_members_all=false
+cleanup.standard_comparison=false
+cleanup.static_inner_class=false
+cleanup.strictly_equal_or_different=false
+cleanup.stringbuffer_to_stringbuilder=false
+cleanup.stringbuilder=false
+cleanup.stringbuilder_for_local_vars=true
+cleanup.stringconcat_to_textblock=false
+cleanup.substring=false
+cleanup.switch=false
+cleanup.system_property=false
+cleanup.system_property_boolean=false
+cleanup.system_property_file_encoding=false
+cleanup.system_property_file_separator=false
+cleanup.system_property_line_separator=false
+cleanup.system_property_path_separator=false
+cleanup.ternary_operator=false
+cleanup.try_with_resource=false
+cleanup.unlooped_while=false
+cleanup.unreachable_block=false
+cleanup.use_anonymous_class_creation=false
+cleanup.use_autoboxing=false
+cleanup.use_blocks=true
+cleanup.use_blocks_only_for_return_and_throw=false
+cleanup.use_directly_map_method=false
+cleanup.use_lambda=true
+cleanup.use_parentheses_in_expressions=true
+cleanup.use_string_is_blank=false
+cleanup.use_this_for_non_static_field_access=true
+cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+cleanup.use_this_for_non_static_method_access=true
+cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+cleanup.use_unboxing=false
+cleanup.use_var=false
+cleanup.useless_continue=false
+cleanup.useless_return=false
+cleanup.valueof_rather_than_instantiation=false
+cleanup_profile=_CAU-SE-Style
+cleanup_settings_version=2
+eclipse.preferences.version=1
+editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
+formatter_profile=_CAU-SE-Style
+formatter_settings_version=21
+org.eclipse.jdt.ui.ignorelowercasenames=true
+org.eclipse.jdt.ui.importorder=
+org.eclipse.jdt.ui.ondemandthreshold=99
+org.eclipse.jdt.ui.staticondemandthreshold=99
+org.eclipse.jdt.ui.text.custom_code_templates=
+sp_cleanup.add_all=false
+sp_cleanup.add_default_serial_version_id=true
+sp_cleanup.add_generated_serial_version_id=false
+sp_cleanup.add_missing_annotations=true
+sp_cleanup.add_missing_deprecated_annotations=true
+sp_cleanup.add_missing_methods=false
+sp_cleanup.add_missing_nls_tags=false
+sp_cleanup.add_missing_override_annotations=true
+sp_cleanup.add_missing_override_annotations_interface_methods=true
+sp_cleanup.add_serial_version_id=false
+sp_cleanup.always_use_blocks=true
+sp_cleanup.always_use_parentheses_in_expressions=false
+sp_cleanup.always_use_this_for_non_static_field_access=true
+sp_cleanup.always_use_this_for_non_static_method_access=true
+sp_cleanup.array_with_curly=false
+sp_cleanup.arrays_fill=false
+sp_cleanup.bitwise_conditional_expression=false
+sp_cleanup.boolean_literal=false
+sp_cleanup.boolean_value_rather_than_comparison=false
+sp_cleanup.break_loop=false
+sp_cleanup.collection_cloning=false
+sp_cleanup.comparing_on_criteria=true
+sp_cleanup.comparison_statement=false
+sp_cleanup.controlflow_merge=false
+sp_cleanup.convert_functional_interfaces=false
+sp_cleanup.convert_to_enhanced_for_loop=true
+sp_cleanup.convert_to_enhanced_for_loop_if_loop_var_used=false
+sp_cleanup.convert_to_switch_expressions=false
+sp_cleanup.correct_indentation=true
+sp_cleanup.do_while_rather_than_while=false
+sp_cleanup.double_negation=false
+sp_cleanup.else_if=false
+sp_cleanup.embedded_if=false
+sp_cleanup.evaluate_nullable=false
+sp_cleanup.extract_increment=false
+sp_cleanup.format_source_code=true
+sp_cleanup.format_source_code_changes_only=false
+sp_cleanup.hash=false
+sp_cleanup.if_condition=false
+sp_cleanup.insert_inferred_type_arguments=false
+sp_cleanup.instanceof=false
+sp_cleanup.instanceof_keyword=false
+sp_cleanup.invert_equals=false
+sp_cleanup.join=false
+sp_cleanup.lazy_logical_operator=false
+sp_cleanup.make_local_variable_final=true
+sp_cleanup.make_parameters_final=true
+sp_cleanup.make_private_fields_final=true
+sp_cleanup.make_type_abstract_if_missing_method=false
+sp_cleanup.make_variable_declarations_final=true
+sp_cleanup.map_cloning=false
+sp_cleanup.merge_conditional_blocks=false
+sp_cleanup.multi_catch=false
+sp_cleanup.never_use_blocks=false
+sp_cleanup.never_use_parentheses_in_expressions=true
+sp_cleanup.no_string_creation=false
+sp_cleanup.no_super=false
+sp_cleanup.number_suffix=false
+sp_cleanup.objects_equals=false
+sp_cleanup.on_save_use_additional_actions=true
+sp_cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=false
+sp_cleanup.operand_factorization=false
+sp_cleanup.organize_imports=true
+sp_cleanup.overridden_assignment=false
+sp_cleanup.plain_replacement=false
+sp_cleanup.precompile_regex=false
+sp_cleanup.primitive_comparison=false
+sp_cleanup.primitive_parsing=false
+sp_cleanup.primitive_rather_than_wrapper=false
+sp_cleanup.primitive_serialization=false
+sp_cleanup.pull_out_if_from_if_else=false
+sp_cleanup.pull_up_assignment=false
+sp_cleanup.push_down_negation=false
+sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
+sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_with_declaring_class=true
+sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
+sp_cleanup.reduce_indentation=false
+sp_cleanup.redundant_comparator=false
+sp_cleanup.redundant_falling_through_block_end=false
+sp_cleanup.remove_private_constructors=true
+sp_cleanup.remove_redundant_modifiers=false
+sp_cleanup.remove_redundant_semicolons=false
+sp_cleanup.remove_redundant_type_arguments=false
+sp_cleanup.remove_trailing_whitespaces=true
+sp_cleanup.remove_trailing_whitespaces_all=true
+sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
+sp_cleanup.remove_unnecessary_array_creation=false
+sp_cleanup.remove_unnecessary_casts=true
+sp_cleanup.remove_unnecessary_nls_tags=true
+sp_cleanup.remove_unused_imports=true
+sp_cleanup.remove_unused_local_variables=false
+sp_cleanup.remove_unused_private_fields=true
+sp_cleanup.remove_unused_private_members=false
+sp_cleanup.remove_unused_private_methods=true
+sp_cleanup.remove_unused_private_types=true
+sp_cleanup.return_expression=false
+sp_cleanup.simplify_lambda_expression_and_method_ref=false
+sp_cleanup.single_used_field=false
+sp_cleanup.sort_members=false
+sp_cleanup.sort_members_all=false
+sp_cleanup.standard_comparison=false
+sp_cleanup.static_inner_class=false
+sp_cleanup.strictly_equal_or_different=false
+sp_cleanup.stringbuffer_to_stringbuilder=false
+sp_cleanup.stringbuilder=false
+sp_cleanup.stringbuilder_for_local_vars=false
+sp_cleanup.stringconcat_to_textblock=false
+sp_cleanup.substring=false
+sp_cleanup.switch=false
+sp_cleanup.system_property=false
+sp_cleanup.system_property_boolean=false
+sp_cleanup.system_property_file_encoding=false
+sp_cleanup.system_property_file_separator=false
+sp_cleanup.system_property_line_separator=false
+sp_cleanup.system_property_path_separator=false
+sp_cleanup.ternary_operator=false
+sp_cleanup.try_with_resource=false
+sp_cleanup.unlooped_while=false
+sp_cleanup.unreachable_block=false
+sp_cleanup.use_anonymous_class_creation=false
+sp_cleanup.use_autoboxing=false
+sp_cleanup.use_blocks=true
+sp_cleanup.use_blocks_only_for_return_and_throw=false
+sp_cleanup.use_directly_map_method=false
+sp_cleanup.use_lambda=true
+sp_cleanup.use_parentheses_in_expressions=true
+sp_cleanup.use_string_is_blank=false
+sp_cleanup.use_this_for_non_static_field_access=true
+sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+sp_cleanup.use_this_for_non_static_method_access=true
+sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+sp_cleanup.use_unboxing=false
+sp_cleanup.use_var=false
+sp_cleanup.useless_continue=true
+sp_cleanup.useless_return=true
+sp_cleanup.valueof_rather_than_instantiation=false
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/uc1-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..4fa4266c755f4ff8da465ab7341cd70ffb24ecf7
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs
@@ -0,0 +1,4 @@
+configFilePath=../config/checkstyle.xml
+customModulesJarPaths=
+eclipse.preferences.version=1
+enabled=false
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/uc1-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..40bfd0ecdbbe324bb54e4b9f9f32ba95cf5b0c2a
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs
@@ -0,0 +1,4 @@
+customRulesJars=
+eclipse.preferences.version=1
+enabled=false
+ruleSetFilePath=../config/pmd.xml
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/Dockerfile b/theodolite-benchmarks/uc1-hazelcastjet/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..7a0fcf7c5f61bed97c9a1e6d455164c64930c4fe
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/Dockerfile
@@ -0,0 +1,7 @@
+FROM openjdk:11-slim
+
+ADD build/distributions/uc1-hazelcastjet.tar /
+
+
+CMD  JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \
+     /uc1-hazelcastjet/bin/uc1-hazelcastjet
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/build.gradle b/theodolite-benchmarks/uc1-hazelcastjet/build.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..cac5ad9f6f12b62389236decbe75fbec01050071
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/build.gradle
@@ -0,0 +1,9 @@
+plugins {
+  id 'theodolite.hazelcastjet'
+}
+
+dependencies {
+    implementation project(':uc1-commons')
+}
+
+mainClassName = "rocks.theodolite.benchmarks.uc1.hazelcastjet.HistoryService"
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/HistoryService.java b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/HistoryService.java
new file mode 100644
index 0000000000000000000000000000000000000000..83848261318b2e90d19f28d9ab53fdc2cf678279
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/HistoryService.java
@@ -0,0 +1,64 @@
+package rocks.theodolite.benchmarks.uc1.hazelcastjet;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A microservice that manages the history and, therefore, stores and aggregates incoming
+ * measurements.
+ */
+public class HistoryService {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(HistoryService.class);
+
+  // Hazelcast settings (default)
+  private static final String HZ_KUBERNETES_SERVICE_DNS_KEY = "service-dns";
+  private static final String BOOTSTRAP_SERVER_DEFAULT = "localhost:5701";
+
+  // Kafka settings (default)
+  private static final String KAFKA_BOOTSTRAP_DEFAULT = "localhost:9092";
+  private static final String SCHEMA_REGISTRY_URL_DEFAULT = "http://localhost:8081";
+  private static final String KAFKA_TOPIC_DEFAULT = "input";
+
+  // Job name (default)
+  private static final String JOB_NAME = "uc1-hazelcastjet";
+
+
+  /**
+   * Entrypoint for UC1 using Gradle Run.
+   */
+  public static void main(final String[] args) {
+    final HistoryService uc1HistoryService = new HistoryService();
+    try {
+      uc1HistoryService.run();
+    } catch (final Exception e) { // NOPMD
+      LOGGER.error("ABORT MISSION!: {}", e);
+    }
+  }
+
+  /**
+   * Start a UC1 service.
+   *
+   * @throws Exception This Exception occurs if the Uc1HazelcastJetFactory is used in the wrong way.
+   *         Detailed data is provided once an Exception occurs.
+   */
+  public void run() throws Exception { // NOPMD
+    this.createHazelcastJetApplication();
+  }
+
+  /**
+   * Creates a Hazelcast Jet Application for UC1 using the Uc1HazelcastJetFactory.
+   *
+   * @throws Exception This Exception occurs if the Uc1HazelcastJetFactory is used in the wrong way.
+   *         Detailed data is provided once an Exception occurs.
+   */
+  private void createHazelcastJetApplication() throws Exception { // NOPMD
+    new Uc1HazelcastJetFactory()
+        .setPropertiesFromEnv(KAFKA_BOOTSTRAP_DEFAULT, SCHEMA_REGISTRY_URL_DEFAULT,JOB_NAME)
+        .setKafkaInputTopicFromEnv(KAFKA_TOPIC_DEFAULT)
+        .buildUc1Pipeline()
+        .buildUc1JetInstanceFromEnv(LOGGER, BOOTSTRAP_SERVER_DEFAULT, HZ_KUBERNETES_SERVICE_DNS_KEY)
+        .runUc1Job(JOB_NAME);
+  }
+
+}
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/Uc1HazelcastJetFactory.java b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/Uc1HazelcastJetFactory.java
new file mode 100644
index 0000000000000000000000000000000000000000..4a5c5dead14e606847dc5e2ac3c95414d9f611b3
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/Uc1HazelcastJetFactory.java
@@ -0,0 +1,178 @@
+package rocks.theodolite.benchmarks.uc1.hazelcastjet;
+
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.config.JobConfig;
+import com.hazelcast.jet.pipeline.Pipeline;
+import io.confluent.kafka.serializers.KafkaAvroDeserializer;
+import java.util.Objects;
+import java.util.Properties;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.slf4j.Logger;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.ConfigurationKeys;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.JetInstanceBuilder;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.KafkaPropertiesBuilder;
+
+/**
+ * A Hazelcast Jet factory which can build a Hazelcast Jet Instance and Pipeline for the UC1
+ * benchmark and lets you start the Hazelcast Jet job. The JetInstance can be built directly as the
+ * Hazelcast Config is managed internally. In order to build the Pipeline, you first have to build
+ * the Properties and set the input topic which can be done using internal functions of this
+ * factory. Outside data only refers to custom values or default values in case data of the
+ * environment cannot the fetched.
+ */
+public class Uc1HazelcastJetFactory {
+
+  // Information per History Service
+  private Properties kafkaPropertiesForPipeline;
+  private String kafkaInputTopic;
+  private JetInstance uc1JetInstance;
+  private Pipeline uc1JetPipeline;
+
+  /////////////////////////////////////
+  // Layer 1 - Hazelcast Jet Run Job //
+  /////////////////////////////////////
+
+  /**
+   * Needs a JetInstance and Pipeline defined in this factors. Adds the pipeline to the existing
+   * JetInstance as a job.
+   *
+   * @param jobName The name of the job.
+   */
+  public void runUc1Job(final String jobName) {
+
+    // Check if a Jet Instance for UC1 is set.
+    if (this.uc1JetInstance == null) {
+      throw new IllegalStateException("Jet Instance is not set! "
+          + "Cannot start a hazelcast jet job for UC1.");
+    }
+
+    // Check if a Pipeline for UC1 is set.
+    if (this.uc1JetPipeline == null) {
+      throw new IllegalStateException(
+          "Hazelcast Pipeline is not set! Cannot start a hazelcast jet job for UC1.");
+    }
+
+    // Adds the job name and joins a job to the JetInstance defined in this factory
+    final JobConfig jobConfig = new JobConfig();
+    jobConfig.setName(jobName);
+    this.uc1JetInstance.newJobIfAbsent(this.uc1JetPipeline, jobConfig).join();
+  }
+
+  /////////////
+  // Layer 2 //
+  /////////////
+
+  /**
+   * Build a Hazelcast JetInstance used to run a job on.
+   *
+   * @param logger The logger specified for this JetInstance.
+   * @param bootstrapServerDefault Default bootstrap server in case no value can be derived from the
+   *        environment.
+   * @param hzKubernetesServiceDnsKey The kubernetes service dns key.
+   * @return A Uc1HazelcastJetFactory containing a set JetInstance.
+   */
+  public Uc1HazelcastJetFactory buildUc1JetInstanceFromEnv(final Logger logger,
+      final String bootstrapServerDefault,
+      final String hzKubernetesServiceDnsKey) {
+    this.uc1JetInstance = new JetInstanceBuilder()
+        .setConfigFromEnv(logger, bootstrapServerDefault, hzKubernetesServiceDnsKey)
+        .build();
+    return this;
+  }
+
+  /**
+   * Builds a Hazelcast Jet pipeline used for a JetInstance to run it as a job on. Needs the input
+   * topic and kafka properties defined in this factory beforehand.
+   *
+   * @return A Uc1HazelcastJetFactory containg a set pipeline.
+   */
+  public Uc1HazelcastJetFactory buildUc1Pipeline() {
+
+    // Check if Properties for the Kafka Input are set.
+    if (this.kafkaPropertiesForPipeline == null) {
+      throw new IllegalStateException(
+          "Kafka Properties for pipeline not set! Cannot build pipeline.");
+    }
+
+    // Check if the Kafka input topic is set.
+    if (this.kafkaInputTopic == null) {
+      throw new IllegalStateException("Kafka input topic for pipeline not set! "
+          + "Cannot build pipeline.");
+    }
+
+    // Build Pipeline Using the pipelineBuilder
+    final Uc1PipelineBuilder pipeBuilder = new Uc1PipelineBuilder();
+    this.uc1JetPipeline =
+        pipeBuilder.build(this.kafkaPropertiesForPipeline, this.kafkaInputTopic);
+    // Return Uc1HazelcastJetBuilder factory
+    return this;
+  }
+
+  /////////////
+  // Layer 3 //
+  /////////////
+
+  /**
+   * Sets kafka properties for pipeline used in this builder.
+   *
+   * @param kafkaProperties A propeties object containing necessary values used for the hazelcst jet
+   *        kafka connection.
+   * @return The Uc1HazelcastJetBuilder factory with set kafkaPropertiesForPipeline.
+   */
+  public Uc1HazelcastJetFactory setCustomProperties(final Properties kafkaProperties) { // NOPMD
+    this.kafkaPropertiesForPipeline = kafkaProperties;
+    return this;
+  }
+
+  /**
+   * Sets kafka properties for pipeline used in this builder using environment variables.
+   *
+   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
+   *        can be fetched from the environment.
+   * @param schemaRegistryUrlDefault Default schema registry url in the case that no schema registry
+   *        url can be fetched from the environment.
+   * @return The Uc1HazelcastJetBuilder factory with set kafkaPropertiesForPipeline.
+   */
+  public Uc1HazelcastJetFactory setPropertiesFromEnv(final String bootstrapServersDefault, // NOPMD
+                                                     final String schemaRegistryUrlDefault,
+                                                     final String jobName) {
+    // Use KafkaPropertiesBuilder to build a properties object used for kafka
+    final KafkaPropertiesBuilder propsBuilder = new KafkaPropertiesBuilder();
+    final Properties kafkaProps =
+        propsBuilder.buildKafkaInputReadPropsFromEnv(bootstrapServersDefault,
+            schemaRegistryUrlDefault,
+            jobName,
+            StringDeserializer.class.getCanonicalName(),
+            KafkaAvroDeserializer.class.getCanonicalName());
+    this.kafkaPropertiesForPipeline = kafkaProps;
+    return this;
+  }
+
+  /**
+   * Sets the kafka input topic for the pipeline used in this builder.
+   *
+   * @param inputTopic The kafka topic used as the pipeline input.
+   * @return A Uc1HazelcastJetBuilder factory with a set kafkaInputTopic.
+   */
+  public Uc1HazelcastJetFactory setCustomKafkaInputTopic(final String inputTopic) { // NOPMD
+    this.kafkaInputTopic = inputTopic;
+    return this;
+  }
+
+  /**
+   * Sets the kafka input topic for the pipeline used in this builder using environment variables.
+   *
+   * @param defaultInputTopic The default kafka input topic used if no topic is specified by the
+   *        environment.
+   * @return A Uc1HazelcastJetBuilder factory with a set kafkaInputTopic.
+   */
+  public Uc1HazelcastJetFactory setKafkaInputTopicFromEnv(final String defaultInputTopic) { // NOPMD
+    this.kafkaInputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_INPUT_TOPIC),
+        defaultInputTopic);
+    return this;
+  }
+
+
+
+}
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/Uc1PipelineBuilder.java b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/Uc1PipelineBuilder.java
new file mode 100644
index 0000000000000000000000000000000000000000..c02ea1e7ea7fb3f27bdbf818248678011a93f6a2
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc1/hazelcastjet/Uc1PipelineBuilder.java
@@ -0,0 +1,82 @@
+package rocks.theodolite.benchmarks.uc1.hazelcastjet;
+
+import static com.hazelcast.jet.pipeline.SinkBuilder.sinkBuilder;
+
+import com.hazelcast.jet.kafka.KafkaSources;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.Sink;
+import com.hazelcast.jet.pipeline.StreamSource;
+import com.hazelcast.jet.pipeline.StreamStage;
+import java.util.Map.Entry;
+import java.util.Properties;
+import rocks.theodolite.benchmarks.uc1.commons.DatabaseAdapter;
+import rocks.theodolite.benchmarks.uc1.commons.DatabaseWriter;
+import rocks.theodolite.benchmarks.uc1.commons.logger.LogWriterFactory;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * Builder to build a HazelcastJet Pipeline for UC1 which can be used for stream processing using
+ * Hazelcast Jet.
+ */
+public class Uc1PipelineBuilder {
+
+  private final DatabaseAdapter<String> databaseAdapter = LogWriterFactory.forJson();
+
+  /**
+   * Builds a pipeline which can be used for stream processing using Hazelcast Jet.
+   *
+   * @param kafkaPropsForPipeline Properties object containing the necessary Kafka attributes.
+   * @param kafkaInputTopic The name of the input topic used for the pipeline.
+   * @return A Hazelcast Jet pipeline which processes data for Uc1.
+   */
+  public Pipeline build(final Properties kafkaPropsForPipeline, final String kafkaInputTopic) {
+
+    // Define a new pipeline
+    final Pipeline pipe = Pipeline.create();
+
+    // Define the Kafka Source
+    final StreamSource<Entry<String, ActivePowerRecord>> kafkaSource =
+        KafkaSources.<String, ActivePowerRecord>kafka(kafkaPropsForPipeline, kafkaInputTopic);
+
+    // Extend UC1 topology to the pipeline
+    final StreamStage<String> uc1TopologyProduct = this.extendUc1Topology(pipe, kafkaSource);
+
+    // Add Sink: Logger
+    // Do not refactor this to just use the call
+    // (There is a problem with static calls in functions in hazelcastjet)
+    final DatabaseWriter<String> writer = this.databaseAdapter.getDatabaseWriter();
+    final Sink<String> sink = sinkBuilder(
+        "Sink into database", x -> writer)
+        .<String>receiveFn(DatabaseWriter::write)
+        .build();
+
+    uc1TopologyProduct.writeTo(sink);
+
+    return pipe;
+  }
+
+  /**
+   * Extends to a blank Hazelcast Jet Pipeline the UC1 topology defines by Theodolite.
+   *
+   * <p>
+   * UC1 takes {@code Entry<String,ActivePowerRecord>} objects and turns them into JSON strings
+   * using GSON.
+   * </p>
+   *
+   * @param pipe The blank hazelcast jet pipeline to extend the logic to.
+   * @param source A streaming source to fetch data from.
+   * @return A {@code StreamStage<String>} with the above definition of the String. It can be used
+   *         to be further modified or directly be written into a sink.
+   */
+  public StreamStage<String> extendUc1Topology(final Pipeline pipe,
+      final StreamSource<Entry<String, ActivePowerRecord>> source) {
+
+    // Build the pipeline topology
+    return pipe.readFrom(source)
+        .withNativeTimestamps(0)
+        .setLocalParallelism(1)
+        .setName("Convert content")
+        .map(Entry::getValue)
+        .map(this.databaseAdapter.getRecordConverter()::convert);
+  }
+}
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc1-hazelcastjet/src/main/resources/META-INF/application.properties
new file mode 100644
index 0000000000000000000000000000000000000000..e3371cc87e20e85e6e8c327955537e6e49dab86e
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/src/main/resources/META-INF/application.properties
@@ -0,0 +1,8 @@
+application.name=theodolite-uc1-application
+application.version=0.0.1
+
+kafka.bootstrap.servers=localhost:9092
+kafka.input.topic=input
+
+schema.registry.url=http://localhost:8081
+
diff --git a/theodolite-benchmarks/uc1-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc1/hazelcast/Uc1PipelineTest.java b/theodolite-benchmarks/uc1-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc1/hazelcast/Uc1PipelineTest.java
new file mode 100644
index 0000000000000000000000000000000000000000..525327ddbcdcddb6cf1bfe4e2d6be62d3384fc0c
--- /dev/null
+++ b/theodolite-benchmarks/uc1-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc1/hazelcast/Uc1PipelineTest.java
@@ -0,0 +1,152 @@
+package rocks.theodolite.benchmarks.uc1.hazelcast;
+
+import com.hazelcast.jet.Jet;
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.config.JetConfig;
+import com.hazelcast.jet.core.JetTestSupport;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.Sink;
+import com.hazelcast.jet.pipeline.StreamSource;
+import com.hazelcast.jet.pipeline.StreamStage;
+import com.hazelcast.jet.pipeline.test.AssertionCompletedException;
+import com.hazelcast.jet.pipeline.test.Assertions;
+import com.hazelcast.jet.pipeline.test.TestSources;
+import com.hazelcast.jet.test.SerialTest;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.concurrent.CompletionException;
+import com.hazelcast.logging.ILogger;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import rocks.theodolite.benchmarks.uc1.commons.DatabaseAdapter;
+import rocks.theodolite.benchmarks.uc1.commons.DatabaseWriter;
+import rocks.theodolite.benchmarks.uc1.commons.logger.LogWriterFactory;
+import rocks.theodolite.benchmarks.uc1.hazelcastjet.Uc1PipelineBuilder;
+import titan.ccp.model.records.ActivePowerRecord;
+
+import static com.hazelcast.jet.pipeline.SinkBuilder.sinkBuilder;
+import static com.hazelcast.logging.Logger.getLogger;
+
+/**
+ * Test methods for the Hazelcast Jet Implementation of UC1.
+ */
+@Category(SerialTest.class)
+public class Uc1PipelineTest extends JetTestSupport {
+
+  private JetInstance testInstance = null;
+  private Pipeline testPipeline = null;
+  private StreamStage<String> uc1Topology = null;
+
+  // Standart Logger
+  private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(Uc1PipelineTest.class);
+  // HazelcastJet Logger
+  private static final ILogger logger =  getLogger(Uc1PipelineTest.class);
+
+  private final DatabaseAdapter<String> databaseAdapter = LogWriterFactory.forJson();
+
+  /**
+   * Creates the JetInstance, defines a new Hazelcast Jet Pipeline and extends the UC1 topology.
+   * Allows for quick extension of tests.
+   */
+  @Before
+  public void buildUc1Pipeline() {
+
+    this.logger.info("Hazelcast Logger");
+    LOGGER.info("Standard Logger");
+
+
+    // Setup Configuration
+    final int testItemsPerSecond = 1;
+    final String testSensorName = "TEST_SENSOR";
+    final Double testValueInW = 10.0;
+
+    // Create mock jet instance with configuration
+    final String testClusterName = randomName();
+    final JetConfig testJetConfig = new JetConfig();
+//    testJetConfig.setProperty( "hazelcast.logging.type", "slf4j" );
+    testJetConfig.getHazelcastConfig().setClusterName(testClusterName);
+    this.testInstance = createJetMember(testJetConfig);
+
+
+    // Create a test source
+    final StreamSource<Entry<String, ActivePowerRecord>> testSource =
+        TestSources.itemStream(testItemsPerSecond, (timestamp, item) -> {
+          final ActivePowerRecord testRecord =
+              new ActivePowerRecord(testSensorName, timestamp, testValueInW);
+          final Entry<String, ActivePowerRecord> testEntry =
+              Map.entry(testSensorName, testRecord);
+          return testEntry;
+        });
+
+    // Create pipeline to test
+    final Uc1PipelineBuilder pipelineBuilder = new Uc1PipelineBuilder();
+    this.testPipeline = Pipeline.create();
+    this.uc1Topology =
+        pipelineBuilder.extendUc1Topology(this.testPipeline, testSource);
+
+    // Create DatabaseWriter sink
+    final DatabaseWriter<String> adapter = this.databaseAdapter.getDatabaseWriter();
+    final Sink<String> sink = sinkBuilder(
+        "database-sink", x -> adapter)
+        .<String>receiveFn(DatabaseWriter::write)
+        .build();
+
+//    Map Stage, can be used instead of sink
+//    StreamStage<String> log = uc1Topology.map(s -> {
+//        LOGGER.info(s);
+//        return s;
+//    });
+//    log.writeTo(sink);
+
+    //apply sink
+    this.uc1Topology.writeTo(sink);
+  }
+
+  /**
+   * UC1 Pipeline test to check if items are passed through at an acceptable rate.
+   */
+  @Test
+  public void test1Uc1PipelineElements() {
+
+    // Assertion Configuration
+    final int assertTimeoutSeconds = 6;
+    final int assertCollectedItems = 5;
+
+    LOGGER.info("Pipeline build successfully, starting test");
+
+    // Assertion
+    this.uc1Topology.apply(Assertions.assertCollectedEventually(assertTimeoutSeconds,
+        collection -> {
+      //print the newest Record
+//      LOGGER.info(collection.get(collection.size()-1));
+
+          // Run pipeline until 5th item
+          Assert.assertTrue("Not enough data arrived in the end",
+              collection.size() >= assertCollectedItems);
+        }));
+
+    // Test the UC1 Pipeline Recreation
+    try {
+      this.testInstance.newJob(this.testPipeline).join();
+      Assert.fail("Job should have completed with an AssertionCompletedException, "
+          + "but completed normally");
+    } catch (final CompletionException e) {
+      final String errorMsg = e.getCause().getMessage();
+      Assert.assertTrue(
+          "Job was expected to complete with AssertionCompletedException, but completed with: "
+              + e.getCause(),
+          errorMsg.contains(AssertionCompletedException.class.getName()));
+    }
+  }
+
+  @After
+  public void after() {
+    // Shuts down all running Jet Instances
+    Jet.shutdownAll();
+  }
+
+}
diff --git a/theodolite-benchmarks/uc2-flink/build.gradle b/theodolite-benchmarks/uc2-flink/build.gradle
index 0c50937f3378a8644a1551bcab1f79c173257794..ea4d2d8b285792b8ce66484d0d9c0907a7c49957 100644
--- a/theodolite-benchmarks/uc2-flink/build.gradle
+++ b/theodolite-benchmarks/uc2-flink/build.gradle
@@ -2,4 +2,4 @@ plugins {
   id 'theodolite.flink'
 }
 
-mainClassName = "rocks.theodolite.benchmarks.uc1.flink.HistoryServiceFlinkJob"
+mainClassName = "rocks.theodolite.benchmarks.uc2.flink.HistoryServiceFlinkJob"
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc2-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..b2a15f439cf1844efe56f1ac0d82a2884e66cb9d
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs
@@ -0,0 +1,286 @@
+cleanup.add_all=false
+cleanup.add_default_serial_version_id=true
+cleanup.add_generated_serial_version_id=false
+cleanup.add_missing_annotations=true
+cleanup.add_missing_deprecated_annotations=true
+cleanup.add_missing_methods=false
+cleanup.add_missing_nls_tags=false
+cleanup.add_missing_override_annotations=true
+cleanup.add_missing_override_annotations_interface_methods=true
+cleanup.add_serial_version_id=false
+cleanup.always_use_blocks=true
+cleanup.always_use_parentheses_in_expressions=false
+cleanup.always_use_this_for_non_static_field_access=true
+cleanup.always_use_this_for_non_static_method_access=true
+cleanup.array_with_curly=false
+cleanup.arrays_fill=false
+cleanup.bitwise_conditional_expression=false
+cleanup.boolean_literal=false
+cleanup.boolean_value_rather_than_comparison=true
+cleanup.break_loop=false
+cleanup.collection_cloning=false
+cleanup.comparing_on_criteria=false
+cleanup.comparison_statement=false
+cleanup.controlflow_merge=false
+cleanup.convert_functional_interfaces=false
+cleanup.convert_to_enhanced_for_loop=true
+cleanup.convert_to_enhanced_for_loop_if_loop_var_used=true
+cleanup.convert_to_switch_expressions=false
+cleanup.correct_indentation=true
+cleanup.do_while_rather_than_while=true
+cleanup.double_negation=false
+cleanup.else_if=false
+cleanup.embedded_if=false
+cleanup.evaluate_nullable=false
+cleanup.extract_increment=false
+cleanup.format_source_code=true
+cleanup.format_source_code_changes_only=false
+cleanup.hash=false
+cleanup.if_condition=false
+cleanup.insert_inferred_type_arguments=false
+cleanup.instanceof=false
+cleanup.instanceof_keyword=false
+cleanup.invert_equals=false
+cleanup.join=false
+cleanup.lazy_logical_operator=false
+cleanup.make_local_variable_final=true
+cleanup.make_parameters_final=true
+cleanup.make_private_fields_final=true
+cleanup.make_type_abstract_if_missing_method=false
+cleanup.make_variable_declarations_final=true
+cleanup.map_cloning=false
+cleanup.merge_conditional_blocks=false
+cleanup.multi_catch=false
+cleanup.never_use_blocks=false
+cleanup.never_use_parentheses_in_expressions=true
+cleanup.no_string_creation=false
+cleanup.no_super=false
+cleanup.number_suffix=false
+cleanup.objects_equals=false
+cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=true
+cleanup.operand_factorization=false
+cleanup.organize_imports=true
+cleanup.overridden_assignment=false
+cleanup.plain_replacement=false
+cleanup.precompile_regex=false
+cleanup.primitive_comparison=false
+cleanup.primitive_parsing=false
+cleanup.primitive_rather_than_wrapper=true
+cleanup.primitive_serialization=false
+cleanup.pull_out_if_from_if_else=false
+cleanup.pull_up_assignment=false
+cleanup.push_down_negation=false
+cleanup.qualify_static_field_accesses_with_declaring_class=false
+cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+cleanup.qualify_static_member_accesses_with_declaring_class=true
+cleanup.qualify_static_method_accesses_with_declaring_class=false
+cleanup.reduce_indentation=false
+cleanup.redundant_comparator=false
+cleanup.redundant_falling_through_block_end=false
+cleanup.remove_private_constructors=true
+cleanup.remove_redundant_modifiers=false
+cleanup.remove_redundant_semicolons=true
+cleanup.remove_redundant_type_arguments=true
+cleanup.remove_trailing_whitespaces=true
+cleanup.remove_trailing_whitespaces_all=true
+cleanup.remove_trailing_whitespaces_ignore_empty=false
+cleanup.remove_unnecessary_array_creation=false
+cleanup.remove_unnecessary_casts=true
+cleanup.remove_unnecessary_nls_tags=true
+cleanup.remove_unused_imports=true
+cleanup.remove_unused_local_variables=false
+cleanup.remove_unused_private_fields=true
+cleanup.remove_unused_private_members=false
+cleanup.remove_unused_private_methods=true
+cleanup.remove_unused_private_types=true
+cleanup.return_expression=false
+cleanup.simplify_lambda_expression_and_method_ref=false
+cleanup.single_used_field=false
+cleanup.sort_members=false
+cleanup.sort_members_all=false
+cleanup.standard_comparison=false
+cleanup.static_inner_class=false
+cleanup.strictly_equal_or_different=false
+cleanup.stringbuffer_to_stringbuilder=false
+cleanup.stringbuilder=false
+cleanup.stringbuilder_for_local_vars=true
+cleanup.stringconcat_to_textblock=false
+cleanup.substring=false
+cleanup.switch=false
+cleanup.system_property=false
+cleanup.system_property_boolean=false
+cleanup.system_property_file_encoding=false
+cleanup.system_property_file_separator=false
+cleanup.system_property_line_separator=false
+cleanup.system_property_path_separator=false
+cleanup.ternary_operator=false
+cleanup.try_with_resource=false
+cleanup.unlooped_while=false
+cleanup.unreachable_block=false
+cleanup.use_anonymous_class_creation=false
+cleanup.use_autoboxing=false
+cleanup.use_blocks=true
+cleanup.use_blocks_only_for_return_and_throw=false
+cleanup.use_directly_map_method=false
+cleanup.use_lambda=true
+cleanup.use_parentheses_in_expressions=true
+cleanup.use_string_is_blank=false
+cleanup.use_this_for_non_static_field_access=true
+cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+cleanup.use_this_for_non_static_method_access=true
+cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+cleanup.use_unboxing=false
+cleanup.use_var=false
+cleanup.useless_continue=false
+cleanup.useless_return=false
+cleanup.valueof_rather_than_instantiation=false
+cleanup_profile=_CAU-SE-Style
+cleanup_settings_version=2
+eclipse.preferences.version=1
+editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
+formatter_profile=_CAU-SE-Style
+formatter_settings_version=21
+org.eclipse.jdt.ui.ignorelowercasenames=true
+org.eclipse.jdt.ui.importorder=
+org.eclipse.jdt.ui.ondemandthreshold=99
+org.eclipse.jdt.ui.staticondemandthreshold=99
+org.eclipse.jdt.ui.text.custom_code_templates=
+sp_cleanup.add_all=false
+sp_cleanup.add_default_serial_version_id=true
+sp_cleanup.add_generated_serial_version_id=false
+sp_cleanup.add_missing_annotations=true
+sp_cleanup.add_missing_deprecated_annotations=true
+sp_cleanup.add_missing_methods=false
+sp_cleanup.add_missing_nls_tags=false
+sp_cleanup.add_missing_override_annotations=true
+sp_cleanup.add_missing_override_annotations_interface_methods=true
+sp_cleanup.add_serial_version_id=false
+sp_cleanup.always_use_blocks=true
+sp_cleanup.always_use_parentheses_in_expressions=false
+sp_cleanup.always_use_this_for_non_static_field_access=true
+sp_cleanup.always_use_this_for_non_static_method_access=true
+sp_cleanup.array_with_curly=false
+sp_cleanup.arrays_fill=false
+sp_cleanup.bitwise_conditional_expression=false
+sp_cleanup.boolean_literal=false
+sp_cleanup.boolean_value_rather_than_comparison=false
+sp_cleanup.break_loop=false
+sp_cleanup.collection_cloning=false
+sp_cleanup.comparing_on_criteria=true
+sp_cleanup.comparison_statement=false
+sp_cleanup.controlflow_merge=false
+sp_cleanup.convert_functional_interfaces=false
+sp_cleanup.convert_to_enhanced_for_loop=true
+sp_cleanup.convert_to_enhanced_for_loop_if_loop_var_used=false
+sp_cleanup.convert_to_switch_expressions=false
+sp_cleanup.correct_indentation=true
+sp_cleanup.do_while_rather_than_while=false
+sp_cleanup.double_negation=false
+sp_cleanup.else_if=false
+sp_cleanup.embedded_if=false
+sp_cleanup.evaluate_nullable=false
+sp_cleanup.extract_increment=false
+sp_cleanup.format_source_code=true
+sp_cleanup.format_source_code_changes_only=false
+sp_cleanup.hash=false
+sp_cleanup.if_condition=false
+sp_cleanup.insert_inferred_type_arguments=false
+sp_cleanup.instanceof=false
+sp_cleanup.instanceof_keyword=false
+sp_cleanup.invert_equals=false
+sp_cleanup.join=false
+sp_cleanup.lazy_logical_operator=false
+sp_cleanup.make_local_variable_final=true
+sp_cleanup.make_parameters_final=true
+sp_cleanup.make_private_fields_final=true
+sp_cleanup.make_type_abstract_if_missing_method=false
+sp_cleanup.make_variable_declarations_final=true
+sp_cleanup.map_cloning=false
+sp_cleanup.merge_conditional_blocks=false
+sp_cleanup.multi_catch=false
+sp_cleanup.never_use_blocks=false
+sp_cleanup.never_use_parentheses_in_expressions=true
+sp_cleanup.no_string_creation=false
+sp_cleanup.no_super=false
+sp_cleanup.number_suffix=false
+sp_cleanup.objects_equals=false
+sp_cleanup.on_save_use_additional_actions=true
+sp_cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=false
+sp_cleanup.operand_factorization=false
+sp_cleanup.organize_imports=true
+sp_cleanup.overridden_assignment=false
+sp_cleanup.plain_replacement=false
+sp_cleanup.precompile_regex=false
+sp_cleanup.primitive_comparison=false
+sp_cleanup.primitive_parsing=false
+sp_cleanup.primitive_rather_than_wrapper=false
+sp_cleanup.primitive_serialization=false
+sp_cleanup.pull_out_if_from_if_else=false
+sp_cleanup.pull_up_assignment=false
+sp_cleanup.push_down_negation=false
+sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
+sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_with_declaring_class=true
+sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
+sp_cleanup.reduce_indentation=false
+sp_cleanup.redundant_comparator=false
+sp_cleanup.redundant_falling_through_block_end=false
+sp_cleanup.remove_private_constructors=true
+sp_cleanup.remove_redundant_modifiers=false
+sp_cleanup.remove_redundant_semicolons=false
+sp_cleanup.remove_redundant_type_arguments=false
+sp_cleanup.remove_trailing_whitespaces=true
+sp_cleanup.remove_trailing_whitespaces_all=true
+sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
+sp_cleanup.remove_unnecessary_array_creation=false
+sp_cleanup.remove_unnecessary_casts=true
+sp_cleanup.remove_unnecessary_nls_tags=true
+sp_cleanup.remove_unused_imports=true
+sp_cleanup.remove_unused_local_variables=false
+sp_cleanup.remove_unused_private_fields=true
+sp_cleanup.remove_unused_private_members=false
+sp_cleanup.remove_unused_private_methods=true
+sp_cleanup.remove_unused_private_types=true
+sp_cleanup.return_expression=false
+sp_cleanup.simplify_lambda_expression_and_method_ref=false
+sp_cleanup.single_used_field=false
+sp_cleanup.sort_members=false
+sp_cleanup.sort_members_all=false
+sp_cleanup.standard_comparison=false
+sp_cleanup.static_inner_class=false
+sp_cleanup.strictly_equal_or_different=false
+sp_cleanup.stringbuffer_to_stringbuilder=false
+sp_cleanup.stringbuilder=false
+sp_cleanup.stringbuilder_for_local_vars=false
+sp_cleanup.stringconcat_to_textblock=false
+sp_cleanup.substring=false
+sp_cleanup.switch=false
+sp_cleanup.system_property=false
+sp_cleanup.system_property_boolean=false
+sp_cleanup.system_property_file_encoding=false
+sp_cleanup.system_property_file_separator=false
+sp_cleanup.system_property_line_separator=false
+sp_cleanup.system_property_path_separator=false
+sp_cleanup.ternary_operator=false
+sp_cleanup.try_with_resource=false
+sp_cleanup.unlooped_while=false
+sp_cleanup.unreachable_block=false
+sp_cleanup.use_anonymous_class_creation=false
+sp_cleanup.use_autoboxing=false
+sp_cleanup.use_blocks=true
+sp_cleanup.use_blocks_only_for_return_and_throw=false
+sp_cleanup.use_directly_map_method=false
+sp_cleanup.use_lambda=true
+sp_cleanup.use_parentheses_in_expressions=true
+sp_cleanup.use_string_is_blank=false
+sp_cleanup.use_this_for_non_static_field_access=true
+sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+sp_cleanup.use_this_for_non_static_method_access=true
+sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+sp_cleanup.use_unboxing=false
+sp_cleanup.use_var=false
+sp_cleanup.useless_continue=true
+sp_cleanup.useless_return=true
+sp_cleanup.valueof_rather_than_instantiation=false
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/uc2-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..4fa4266c755f4ff8da465ab7341cd70ffb24ecf7
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs
@@ -0,0 +1,4 @@
+configFilePath=../config/checkstyle.xml
+customModulesJarPaths=
+eclipse.preferences.version=1
+enabled=false
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/uc2-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..40bfd0ecdbbe324bb54e4b9f9f32ba95cf5b0c2a
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs
@@ -0,0 +1,4 @@
+customRulesJars=
+eclipse.preferences.version=1
+enabled=false
+ruleSetFilePath=../config/pmd.xml
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/Dockerfile b/theodolite-benchmarks/uc2-hazelcastjet/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..4a7680e29042025d48c4c37a8f424871fe48bbf8
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/Dockerfile
@@ -0,0 +1,7 @@
+FROM openjdk:11-slim
+
+ADD build/distributions/uc2-hazelcastjet.tar /
+
+
+CMD  JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \
+     /uc2-hazelcastjet/bin/uc2-hazelcastjet
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/build.gradle b/theodolite-benchmarks/uc2-hazelcastjet/build.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..ef1597413570a5d7b3af8538ced8d4a98d4fa6f8
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/build.gradle
@@ -0,0 +1,5 @@
+plugins {
+  id 'theodolite.hazelcastjet'
+}
+
+mainClassName = "rocks.theodolite.benchmarks.uc2.hazelcastjet.HistoryService"
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/HistoryService.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/HistoryService.java
new file mode 100644
index 0000000000000000000000000000000000000000..f382978b714fdfdff6c190339c2ed23a2e037069
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/HistoryService.java
@@ -0,0 +1,70 @@
+package rocks.theodolite.benchmarks.uc2.hazelcastjet;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A microservice that manages the history and, therefore, stores and aggregates incoming
+ * measurements.
+ */
+public class HistoryService {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(HistoryService.class);
+
+  // Hazelcast settings (default)
+  private static final String HZ_KUBERNETES_SERVICE_DNS_KEY = "service-dns";
+  private static final String BOOTSTRAP_SERVER_DEFAULT = "localhost:5701";
+
+  // Kafka settings (default)
+  private static final String KAFKA_BOOTSTRAP_DEFAULT = "localhost:9092";
+  private static final String SCHEMA_REGISTRY_URL_DEFAULT = "http://localhost:8081";
+  private static final String KAFKA_INPUT_TOPIC_DEFAULT = "input";
+  private static final String KAFKA_OUTPUT_TOPIC_DEFAULT = "output";
+
+  // UC2 specific (default)
+  private static final String DOWNSAMPLE_INTERVAL_DEFAULT_MS = "60000";
+
+  // Job name (default)
+  private static final String JOB_NAME = "uc2-hazelcastjet";
+
+  /**
+   * Entrypoint for UC2 using Gradle Run.
+   */
+  public static void main(final String[] args) {
+    final HistoryService uc2HistoryService = new HistoryService();
+    try {
+      uc2HistoryService.run();
+    } catch (final Exception e) { // NOPMD
+      LOGGER.error("ABORT MISSION!: {}", e);
+    }
+  }
+
+  /**
+   * Start a UC2 service.
+   *
+   * @throws Exception This Exception occurs if the Uc2HazelcastJetFactory is used in the wrong way.
+   *         Detailed data is provided once an Exception occurs.
+   */
+  public void run() throws Exception { // NOPMD
+    this.createHazelcastJetApplication();
+  }
+
+  /**
+   * Creates a Hazelcast Jet Application for UC2 using the Uc1HazelcastJetFactory.
+   *
+   * @throws Exception This Exception occurs if the Uc2HazelcastJetFactory is used in the wrong way.
+   *         Detailed data is provided once an Exception occurs.
+   */
+  private void createHazelcastJetApplication() throws Exception { // NOPMD
+    new Uc2HazelcastJetFactory()
+        .setReadPropertiesFromEnv(KAFKA_BOOTSTRAP_DEFAULT, SCHEMA_REGISTRY_URL_DEFAULT,JOB_NAME)
+        .setWritePropertiesFromEnv(KAFKA_BOOTSTRAP_DEFAULT,SCHEMA_REGISTRY_URL_DEFAULT)
+        .setKafkaInputTopicFromEnv(KAFKA_INPUT_TOPIC_DEFAULT)
+        .setKafkaOutputTopicFromEnv(KAFKA_OUTPUT_TOPIC_DEFAULT)
+        .setDownsampleIntervalFromEnv(DOWNSAMPLE_INTERVAL_DEFAULT_MS)
+        .buildUc2Pipeline()
+        .buildUc2JetInstanceFromEnv(LOGGER, BOOTSTRAP_SERVER_DEFAULT, HZ_KUBERNETES_SERVICE_DNS_KEY)
+        .runUc2Job(JOB_NAME);
+  }
+
+}
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2HazelcastJetFactory.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2HazelcastJetFactory.java
new file mode 100644
index 0000000000000000000000000000000000000000..143b154f3726e75d2842766b49bd2e26f57ce39b
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2HazelcastJetFactory.java
@@ -0,0 +1,301 @@
+package rocks.theodolite.benchmarks.uc2.hazelcastjet;
+
+import com.google.common.math.StatsAccumulator;
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.config.JobConfig;
+import com.hazelcast.jet.pipeline.Pipeline;
+import io.confluent.kafka.serializers.KafkaAvroDeserializer;
+import java.util.Objects;
+import java.util.Properties;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.apache.kafka.common.serialization.StringSerializer;
+import org.slf4j.Logger;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.ConfigurationKeys;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.JetInstanceBuilder;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.KafkaPropertiesBuilder;
+import rocks.theodolite.benchmarks.uc2.hazelcastjet.uc2specifics.StatsAccumulatorSerializer;
+
+/**
+ * A Hazelcast Jet factory which can build a Hazelcast Jet Instance and Pipeline for the UC2
+ * benchmark and lets you start the Hazelcast Jet job. The JetInstance can be built directly as the
+ * Hazelcast Config is managed internally. In order to build the Pipeline, you first have to build
+ * the Read and Write Properties, set the input and output topic, and set the downsample interval
+ * which can be done using internal functions of this factory. Outside data only refers to custom
+ * values or default values in case data of the environment cannot the fetched.
+ */
+public class Uc2HazelcastJetFactory {
+
+  // Information per History Service
+  private Properties kafkaReadPropsForPipeline;
+  private Properties kafkaWritePropsForPipeline;
+  private String kafkaInputTopic;
+  private String kafkaOutputTopic;
+  private JetInstance uc2JetInstance;
+  private Pipeline uc2JetPipeline;
+  // UC2 specific
+  private int downsampleInterval;
+
+  /////////////////////////////////////
+  // Layer 1 - Hazelcast Jet Run Job //
+  /////////////////////////////////////
+
+  /**
+   * Needs a JetInstance and Pipeline defined in this factors. Adds the pipeline to the existing
+   * JetInstance as a job.
+   *
+   * @param jobName The name of the job.
+   */
+  public void runUc2Job(final String jobName) {
+
+    // Check if a Jet Instance for UC2 is set.
+    if (this.uc2JetInstance == null) {
+      throw new IllegalStateException("Jet Instance is not set! "
+          + "Cannot start a hazelcast jet job for UC2.");
+    }
+
+    // Check if a Pipeline for UC2 is set.
+    if (this.uc2JetPipeline == null) {
+      throw new IllegalStateException(
+          "Hazelcast Pipeline is not set! Cannot start a hazelcast jet job for UC2.");
+    }
+
+    // Adds the job name and joins a job to the JetInstance defined in this factory
+    final JobConfig jobConfig = new JobConfig();
+    jobConfig.registerSerializer(StatsAccumulator.class, StatsAccumulatorSerializer.class);
+    jobConfig.setName(jobName);
+    this.uc2JetInstance.newJobIfAbsent(this.uc2JetPipeline, jobConfig).join();
+  }
+
+  /////////////
+  // Layer 2 //
+  /////////////
+
+  /**
+   * Build a Hazelcast JetInstance used to run a job on.
+   *
+   * @param logger The logger specified for this JetInstance.
+   * @param bootstrapServerDefault Default bootstrap server in case no value can be derived from the
+   *        environment.
+   * @param hzKubernetesServiceDnsKey The kubernetes service dns key.
+   * @return A Uc2HazelcastJetFactory containing a set JetInstance.
+   */
+  public Uc2HazelcastJetFactory buildUc2JetInstanceFromEnv(final Logger logger,
+      final String bootstrapServerDefault,
+      final String hzKubernetesServiceDnsKey) {
+    this.uc2JetInstance = new JetInstanceBuilder()
+        .setConfigFromEnv(logger, bootstrapServerDefault, hzKubernetesServiceDnsKey)
+        .build();
+    return this;
+  }
+
+  /**
+   * Builds a Hazelcast Jet pipeline used for a JetInstance to run it as a job on. Needs the input
+   * topic and kafka properties defined in this factory beforehand.
+   *
+   * @return A Uc2HazelcastJetFactory containg a set pipeline.
+   * @throws Exception If the input topic or the kafka properties are not defined, the pipeline
+   *         cannot be built.
+   */
+  public Uc2HazelcastJetFactory buildUc2Pipeline() throws IllegalStateException { // NOPMD
+
+    final String defaultPipelineWarning = "Cannot build pipeline."; // NOPMD
+
+    // Check if Properties for the Kafka Input are set.
+    if (this.kafkaReadPropsForPipeline == null) {
+      throw new IllegalStateException("Kafka Read Properties for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if Properties for the Kafka Output are set.
+    if (this.kafkaWritePropsForPipeline == null) {
+      throw new IllegalStateException("Kafka Write Properties for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if the Kafka input topic is set.
+    if (this.kafkaInputTopic == null) {
+      throw new IllegalStateException("Kafka input topic for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if the Kafka output topic is set.
+    if (this.kafkaOutputTopic == null) {
+      throw new IllegalStateException("kafka output topic for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if the downsampleInterval (tumbling window time) is set.
+    if (this.downsampleInterval <= 0) {
+      throw new IllegalStateException(
+          "downsample interval for pipeline not set or not bigger than 0! "
+              + defaultPipelineWarning);
+    }
+
+    // Build Pipeline Using the pipelineBuilder
+    final Uc2PipelineBuilder pipeBuilder = new Uc2PipelineBuilder();
+    this.uc2JetPipeline =
+        pipeBuilder.build(this.kafkaReadPropsForPipeline, this.kafkaWritePropsForPipeline,
+            this.kafkaInputTopic, this.kafkaOutputTopic, this.downsampleInterval);
+    // Return Uc2HazelcastJetBuilder factory
+    return this;
+  }
+
+  /////////////
+  // Layer 3 //
+  /////////////
+
+  /**
+   * Sets kafka read properties for pipeline used in this builder.
+   *
+   * @param kafkaReadProperties A propeties object containing necessary values used for the hazelcst
+   *        jet kafka connection to read data.
+   * @return The Uc2HazelcastJetBuilder factory with set kafkaReadPropsForPipeline.
+   */
+  public Uc2HazelcastJetFactory setCustomReadProperties(// NOPMD
+      final Properties kafkaReadProperties) {
+    this.kafkaReadPropsForPipeline = kafkaReadProperties;
+    return this;
+  }
+
+  /**
+   * Sets kafka write properties for pipeline used in this builder.
+   *
+   * @param kafkaWriteProperties A propeties object containing necessary values used for the
+   *        hazelcst jet kafka connection to write data.
+   * @return The Uc2HazelcastJetBuilder factory with set kafkaWritePropsForPipeline.
+   */
+  public Uc2HazelcastJetFactory setCustomWriteProperties(// NOPMD
+      final Properties kafkaWriteProperties) {
+    this.kafkaWritePropsForPipeline = kafkaWriteProperties;
+    return this;
+  }
+
+  /**
+   * Sets kafka read properties for pipeline used in this builder using environment variables.
+   *
+   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
+   *        can be fetched from the environment.
+   * @param schemaRegistryUrlDefault Default schema registry url in the case that no schema registry
+   *        url can be fetched from the environment.
+   * @return The Uc2HazelcastJetBuilder factory with set kafkaReadPropertiesForPipeline.
+   */
+  public Uc2HazelcastJetFactory setReadPropertiesFromEnv(// NOPMD
+                                                         final String bootstrapServersDefault,
+                                                         final String schemaRegistryUrlDefault,
+                                                         final String jobName) {
+    // Use KafkaPropertiesBuilder to build a properties object used for kafka
+    final KafkaPropertiesBuilder propsBuilder = new KafkaPropertiesBuilder();
+    final Properties kafkaReadProps =
+        propsBuilder.buildKafkaInputReadPropsFromEnv(bootstrapServersDefault,
+            schemaRegistryUrlDefault,
+            jobName,
+            StringDeserializer.class.getCanonicalName(),
+            KafkaAvroDeserializer.class.getCanonicalName());
+    this.kafkaReadPropsForPipeline = kafkaReadProps;
+    return this;
+  }
+
+  /**
+   * Sets kafka write properties for pipeline used in this builder using environment variables.
+   *
+   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
+   *        can be fetched from the environment.
+   * @return The Uc2HazelcastJetBuilder factory with set kafkaWritePropertiesForPipeline.
+   */
+  public Uc2HazelcastJetFactory setWritePropertiesFromEnv(// NOPMD
+      final String bootstrapServersDefault, final String schemaRegistryUrlDefault) {
+    // Use KafkaPropertiesBuilder to build a properties object used for kafka
+    final KafkaPropertiesBuilder propsBuilder = new KafkaPropertiesBuilder();
+    final Properties kafkaWriteProps =
+        propsBuilder.buildKafkaWritePropsFromEnv(bootstrapServersDefault,
+            schemaRegistryUrlDefault,
+            StringSerializer.class.getCanonicalName(),
+            StringSerializer.class.getCanonicalName());
+    this.kafkaWritePropsForPipeline = kafkaWriteProps;
+    return this;
+  }
+
+  /**
+   * Sets the kafka input topic for the pipeline used in this builder.
+   *
+   * @param inputTopic The kafka topic used as the pipeline input.
+   * @return A Uc2HazelcastJetBuilder factory with a set kafkaInputTopic.
+   */
+  public Uc2HazelcastJetFactory setCustomKafkaInputTopic(// NOPMD
+      final String inputTopic) {
+    this.kafkaInputTopic = inputTopic;
+    return this;
+  }
+
+  /**
+   * Sets the kafka input output for the pipeline used in this builder.
+   *
+   * @param outputTopic The kafka topic used as the pipeline output.
+   * @return A Uc2HazelcastJetBuilder factory with a set kafkaOutputTopic.
+   */
+  public Uc2HazelcastJetFactory setCustomKafkaOutputTopic(final String outputTopic) { // NOPMD
+    this.kafkaOutputTopic = outputTopic;
+    return this;
+  }
+
+
+  /**
+   * Sets the kafka input topic for the pipeline used in this builder using environment variables.
+   *
+   * @param defaultInputTopic The default kafka input topic used if no topic is specified by the
+   *        environment.
+   * @return A Uc2HazelcastJetBuilder factory with a set kafkaInputTopic.
+   */
+  public Uc2HazelcastJetFactory setKafkaInputTopicFromEnv(// NOPMD
+      final String defaultInputTopic) {
+    this.kafkaInputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_INPUT_TOPIC),
+        defaultInputTopic);
+    return this;
+  }
+
+  /**
+   * Sets the kafka output topic for the pipeline used in this builder using environment variables.
+   *
+   * @param defaultOutputTopic The default kafka output topic used if no topic is specified by the
+   *        environment.
+   * @return A Uc2HazelcastJetBuilder factory with a set kafkaOutputTopic.
+   */
+  public Uc2HazelcastJetFactory setKafkaOutputTopicFromEnv(// NOPMD
+      final String defaultOutputTopic) {
+    this.kafkaOutputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_OUTPUT_TOPIC),
+        defaultOutputTopic);
+    return this;
+  }
+
+  /**
+   * Sets the downsample interval for the pipeline used in this builder.
+   *
+   * @param downsampleInterval the downsample interval to be used for this pipeline.
+   * @return A Uc2HazelcastJetFactory with a set downsampleInterval.
+   */
+  public Uc2HazelcastJetFactory setCustomDownsampleInterval(// NOPMD
+      final int downsampleInterval) {
+    this.downsampleInterval = downsampleInterval;
+    return this;
+  }
+
+  /**
+   * Sets the downsample interval for the pipeline used in this builder from the environment.
+   *
+   * @param defaultDownsampleInterval the default downsample interval to be used for this pipeline
+   *        when none is set in the environment.
+   * @return A Uc2HazelcastJetFactory with a set downsampleInterval.
+   */
+  public Uc2HazelcastJetFactory setDownsampleIntervalFromEnv(// NOPMD
+      final String defaultDownsampleInterval) {
+    final String downsampleInterval = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.DOWNSAMPLE_INTERVAL),
+        defaultDownsampleInterval);
+    final int downsampleIntervalNumber = Integer.parseInt(downsampleInterval);
+    this.downsampleInterval = downsampleIntervalNumber;
+    return this;
+  }
+
+}
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2PipelineBuilder.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2PipelineBuilder.java
new file mode 100644
index 0000000000000000000000000000000000000000..73377de6122d4a723c5dbbcb8198fa814c4bed1e
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2PipelineBuilder.java
@@ -0,0 +1,135 @@
+package rocks.theodolite.benchmarks.uc2.hazelcastjet;
+
+import com.google.common.math.Stats;
+import com.google.common.math.StatsAccumulator;
+import com.hazelcast.jet.aggregate.AggregateOperation;
+import com.hazelcast.jet.aggregate.AggregateOperation1;
+import com.hazelcast.jet.kafka.KafkaSinks;
+import com.hazelcast.jet.kafka.KafkaSources;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.Sinks;
+import com.hazelcast.jet.pipeline.StreamSource;
+import com.hazelcast.jet.pipeline.StreamStage;
+import com.hazelcast.jet.pipeline.WindowDefinition;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Properties;
+import rocks.theodolite.benchmarks.uc2.hazelcastjet.uc2specifics.StatsAccumulatorSupplier;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * Builder to build a HazelcastJet Pipeline for UC2 which can be used for stream processing using
+ * Hazelcast Jet.
+ */
+public class Uc2PipelineBuilder {
+
+  /**
+   * Builds a pipeline which can be used for stream processing using Hazelcast Jet.
+   *
+   * @param kafkaReadPropsForPipeline Properties Object containing the necessary kafka reads
+   *        attributes.
+   * @param kafkaWritePropsForPipeline Properties Object containing the necessary kafka write
+   *        attributes.
+   * @param kafkaInputTopic The name of the input topic used for the pipeline.
+   * @param kafkaOutputTopic The name of the output topic used for the pipeline.
+   * @param downsampleIntervalInMs The window length of the tumbling window used in the aggregation
+   *        of this pipeline.
+   * @return returns a Pipeline used which can be used in a Hazelcast Jet Instance to process data
+   *         for UC2.
+   */
+  public Pipeline build(final Properties kafkaReadPropsForPipeline,
+      final Properties kafkaWritePropsForPipeline, final String kafkaInputTopic,
+      final String kafkaOutputTopic,
+      final int downsampleIntervalInMs) {
+
+    // Define a new pipeline
+    final Pipeline pipe = Pipeline.create();
+
+    // Define the Kafka Source
+    final StreamSource<Entry<String, ActivePowerRecord>> kafkaSource =
+        KafkaSources.<String, ActivePowerRecord>kafka(kafkaReadPropsForPipeline, kafkaInputTopic);
+
+    // Extend UC2 topology to the pipeline
+    final StreamStage<Map.Entry<String, String>> uc2TopologyProduct =
+        this.extendUc2Topology(pipe, kafkaSource, downsampleIntervalInMs);
+
+    // Add Sink1: Logger
+    uc2TopologyProduct.writeTo(Sinks.logger());
+    // Add Sink2: Write back to kafka for the final benchmark
+    uc2TopologyProduct.writeTo(KafkaSinks.<String, String>kafka(
+        kafkaWritePropsForPipeline, kafkaOutputTopic));
+
+    return pipe;
+  }
+
+  /**
+   * Extends to a blank Hazelcast Jet Pipeline the UC2 topology defined by theodolite.
+   *
+   * <p>
+   * UC2 takes {@code ActivePowerRecord} objects, groups them by keys, windows them in a tumbling
+   * window and aggregates them into {@code Stats} objects. The final map returns an
+   * {@code Entry<String,String>} where the key is the key of the group and the String is the
+   * {@code .toString()} representation of the {@code Stats} object.
+   * </p>
+   *
+   * @param pipe The blank hazelcast jet pipeline to extend the logic to.
+   * @param source A streaming source to fetch data from.
+   * @param downsampleIntervalInMs The size of the tumbling window.
+   * @return A {@code StreamStage<Map.Entry<String,String>>} with the above definition of the key
+   *         and value of the Entry object. It can be used to be further modified or directly be
+   *         written into a sink.
+   */
+  public StreamStage<Map.Entry<String, String>> extendUc2Topology(final Pipeline pipe,
+      final StreamSource<Entry<String, ActivePowerRecord>> source,
+      final int downsampleIntervalInMs) {
+    // Build the pipeline topology.
+    return pipe.readFrom(source)
+        .withNativeTimestamps(0)
+        .setLocalParallelism(1)
+        .groupingKey(record -> record.getValue().getIdentifier())
+        .window(WindowDefinition.tumbling(downsampleIntervalInMs))
+        .aggregate(this.uc2AggregateOperation())
+        .map(agg -> {
+          final String theKey = agg.key();
+          final String theValue = agg.getValue().toString();
+          return Map.entry(theKey, theValue);
+        });
+  }
+
+  /**
+   * Defines an AggregateOperation1 for Hazelcast Jet which is used in the Pipeline of the Hazelcast
+   * Jet implementation of UC2.
+   *
+   * <p>
+   * Takes a windowed and keyed {@code Entry<String,ActivePowerRecord>} elements and returns a
+   * {@Stats} object.
+   * </p>
+   *
+   * @return An AggregateOperation used by Hazelcast Jet in a streaming stage which aggregates
+   *         ActivePowerRecord Objects into Stats Objects.
+   */
+  public AggregateOperation1<Entry<String, ActivePowerRecord>,
+      StatsAccumulator, Stats> uc2AggregateOperation() {
+    // Aggregate Operation to Create a Stats Object from Entry<String,ActivePowerRecord> items using
+    // the Statsaccumulator.
+    return AggregateOperation
+        // Creates the accumulator
+        .withCreate(new StatsAccumulatorSupplier())
+        // Defines the accumulation
+        .<Entry<String, ActivePowerRecord>>andAccumulate((accumulator, item) -> {
+          accumulator.add(item.getValue().getValueInW());
+        })
+        // Defines the combination of spread out instances
+        .andCombine((left, right) -> {
+          final Stats rightStats = right.snapshot();
+          left.addAll(rightStats);
+
+        })
+        // Finishes the aggregation
+        .andExportFinish(
+            (accumulator) -> {
+              return accumulator.snapshot();
+          });
+  }
+
+}
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/uc2specifics/StatsAccumulatorSerializer.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/uc2specifics/StatsAccumulatorSerializer.java
new file mode 100644
index 0000000000000000000000000000000000000000..5c22b8dd6cc1a7af995a98b4388f40a1a3867ba5
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/uc2specifics/StatsAccumulatorSerializer.java
@@ -0,0 +1,38 @@
+package rocks.theodolite.benchmarks.uc2.hazelcastjet.uc2specifics;
+
+import com.google.common.math.Stats;
+import com.google.common.math.StatsAccumulator;
+import com.hazelcast.nio.ObjectDataInput;
+import com.hazelcast.nio.ObjectDataOutput;
+import com.hazelcast.nio.serialization.StreamSerializer;
+import java.io.IOException;
+
+/**
+ * A serializer and deserializer for the StatsAccumulator which is used in the UC2 implementation
+ * using Hazelcast Jet.
+ */
+public class StatsAccumulatorSerializer implements StreamSerializer<StatsAccumulator> {
+
+  private static final int TYPE_ID = 69_420;
+
+  @Override
+  public int getTypeId() {
+    return TYPE_ID;
+  }
+
+  @Override
+  public void write(final ObjectDataOutput out, final StatsAccumulator object) throws IOException {
+    final byte[] byteArray = object.snapshot().toByteArray();
+    out.writeByteArray(byteArray);
+  }
+
+  @Override
+  public StatsAccumulator read(final ObjectDataInput in) throws IOException {
+    final byte[] byteArray = in.readByteArray();
+    final Stats deserializedStats = Stats.fromByteArray(byteArray);
+    final StatsAccumulator accumulator = new StatsAccumulator();
+    accumulator.addAll(deserializedStats);
+    return accumulator;
+  }
+
+}
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/uc2specifics/StatsAccumulatorSupplier.java b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/uc2specifics/StatsAccumulatorSupplier.java
new file mode 100644
index 0000000000000000000000000000000000000000..f4d203f03185cda712a5280634d8d3858c02f30d
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/uc2specifics/StatsAccumulatorSupplier.java
@@ -0,0 +1,22 @@
+package rocks.theodolite.benchmarks.uc2.hazelcastjet.uc2specifics;
+
+import com.google.common.math.StatsAccumulator;
+import com.hazelcast.function.SupplierEx;
+
+/**
+ * Supplies a StatsAccumulator. Is used in the aggregation operation of the Hazelcast Jet
+ * implementation for UC2.
+ */
+public class StatsAccumulatorSupplier implements SupplierEx<StatsAccumulator> {
+
+  private static final long serialVersionUID = -656395626316842910L; // NOPMD
+
+  /**
+   * Gets a StatsAccumulator.
+   */
+  @Override
+  public StatsAccumulator getEx() throws Exception {
+    return new StatsAccumulator();
+  }
+
+}
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc2-hazelcastjet/src/main/resources/META-INF/application.properties
new file mode 100644
index 0000000000000000000000000000000000000000..e3371cc87e20e85e6e8c327955537e6e49dab86e
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/main/resources/META-INF/application.properties
@@ -0,0 +1,8 @@
+application.name=theodolite-uc1-application
+application.version=0.0.1
+
+kafka.bootstrap.servers=localhost:9092
+kafka.input.topic=input
+
+schema.registry.url=http://localhost:8081
+
diff --git a/theodolite-benchmarks/uc2-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2PipelineTest.java b/theodolite-benchmarks/uc2-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2PipelineTest.java
new file mode 100644
index 0000000000000000000000000000000000000000..ff72b9558f43334feb8846d50bef2c6714d9404a
--- /dev/null
+++ b/theodolite-benchmarks/uc2-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc2/hazelcastjet/Uc2PipelineTest.java
@@ -0,0 +1,108 @@
+package rocks.theodolite.benchmarks.uc2.hazelcastjet;
+
+import com.hazelcast.jet.Jet;
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.config.JetConfig;
+import com.hazelcast.jet.core.JetTestSupport;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.StreamSource;
+import com.hazelcast.jet.pipeline.StreamStage;
+import com.hazelcast.jet.pipeline.test.AssertionCompletedException;
+import com.hazelcast.jet.pipeline.test.Assertions;
+import com.hazelcast.jet.pipeline.test.TestSources;
+import com.hazelcast.jet.test.SerialTest;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.concurrent.CompletionException;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * Test methods for the Hazelcast Jet Implementation of UC2.
+ */
+@Category(SerialTest.class)
+public class Uc2PipelineTest extends JetTestSupport {
+
+  JetInstance testInstance = null;
+  Pipeline testPipeline = null;
+  StreamStage<Entry<String, String>> uc2Topology = null;
+
+  /*
+   * Creates the JetInstance, defines a new Hazelcast Jet Pipeline and extends the UC2 topology.
+   * Allows for quick extension of tests.
+   */
+  @Before
+  public void buildUc2Pipeline() {
+
+    // Setup Configuration
+    int testItemsPerSecond = 1;
+    String testSensorName = "TEST-SENSOR";
+    Double testValueInW = 10.0;
+    int testWindowInMs = 5000;
+
+    // Create mock jet instance with configuration
+    final String testClusterName = randomName();
+    final JetConfig testJetConfig = new JetConfig();
+    testJetConfig.getHazelcastConfig().setClusterName(testClusterName);
+    this.testInstance = this.createJetMember(testJetConfig);
+
+    // Create a test source
+    final StreamSource<Entry<String, ActivePowerRecord>> testSource =
+        TestSources.itemStream(testItemsPerSecond, (timestamp, item) -> {
+          final ActivePowerRecord testRecord =
+              new ActivePowerRecord(testSensorName, timestamp, testValueInW);
+          final Entry<String, ActivePowerRecord> testEntry =
+              Map.entry(testSensorName, testRecord);
+          return testEntry;
+        });
+
+    // Create pipeline to test
+    Uc2PipelineBuilder pipelineBuilder = new Uc2PipelineBuilder();
+    this.testPipeline = Pipeline.create();
+    this.uc2Topology =
+        pipelineBuilder.extendUc2Topology(this.testPipeline, testSource, testWindowInMs);
+
+  }
+
+  /**
+   * Tests if no items reach the end before the first window ends.
+   */
+  @Test
+  public void testOutput() {
+
+    // Assertion Configuration
+    int timeout = 14;
+    String expectedOutput = "Stats{count=5, mean=10.0, populationStandardDeviation=0.0, min=10.0, max=10.0}";
+
+    // Assertion
+    this.uc2Topology.apply(Assertions.assertCollectedEventually(timeout,
+        collection -> Assert.assertTrue(
+            "Not the right amount items in Stats Object!",
+            collection.get(collection.size()-1).getValue().equals(expectedOutput))));
+
+    // Run the test!
+    try {
+      this.testInstance.newJob(this.testPipeline).join();
+      Assert.fail("Job should have completed with an AssertionCompletedException, "
+          + "but completed normally!");
+    } catch (final CompletionException e) {
+      final String errorMsg = e.getCause().getMessage();
+      Assert.assertTrue(
+          "Job was expected to complete with AssertionCompletedException, but completed with: "
+              + e.getCause(),
+          errorMsg.contains(AssertionCompletedException.class.getName()));
+    }
+
+  }
+
+  @After
+  public void after() {
+    // Shuts down all running Jet Instances
+    Jet.shutdownAll();
+  }
+
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc3-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..b2a15f439cf1844efe56f1ac0d82a2884e66cb9d
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs
@@ -0,0 +1,286 @@
+cleanup.add_all=false
+cleanup.add_default_serial_version_id=true
+cleanup.add_generated_serial_version_id=false
+cleanup.add_missing_annotations=true
+cleanup.add_missing_deprecated_annotations=true
+cleanup.add_missing_methods=false
+cleanup.add_missing_nls_tags=false
+cleanup.add_missing_override_annotations=true
+cleanup.add_missing_override_annotations_interface_methods=true
+cleanup.add_serial_version_id=false
+cleanup.always_use_blocks=true
+cleanup.always_use_parentheses_in_expressions=false
+cleanup.always_use_this_for_non_static_field_access=true
+cleanup.always_use_this_for_non_static_method_access=true
+cleanup.array_with_curly=false
+cleanup.arrays_fill=false
+cleanup.bitwise_conditional_expression=false
+cleanup.boolean_literal=false
+cleanup.boolean_value_rather_than_comparison=true
+cleanup.break_loop=false
+cleanup.collection_cloning=false
+cleanup.comparing_on_criteria=false
+cleanup.comparison_statement=false
+cleanup.controlflow_merge=false
+cleanup.convert_functional_interfaces=false
+cleanup.convert_to_enhanced_for_loop=true
+cleanup.convert_to_enhanced_for_loop_if_loop_var_used=true
+cleanup.convert_to_switch_expressions=false
+cleanup.correct_indentation=true
+cleanup.do_while_rather_than_while=true
+cleanup.double_negation=false
+cleanup.else_if=false
+cleanup.embedded_if=false
+cleanup.evaluate_nullable=false
+cleanup.extract_increment=false
+cleanup.format_source_code=true
+cleanup.format_source_code_changes_only=false
+cleanup.hash=false
+cleanup.if_condition=false
+cleanup.insert_inferred_type_arguments=false
+cleanup.instanceof=false
+cleanup.instanceof_keyword=false
+cleanup.invert_equals=false
+cleanup.join=false
+cleanup.lazy_logical_operator=false
+cleanup.make_local_variable_final=true
+cleanup.make_parameters_final=true
+cleanup.make_private_fields_final=true
+cleanup.make_type_abstract_if_missing_method=false
+cleanup.make_variable_declarations_final=true
+cleanup.map_cloning=false
+cleanup.merge_conditional_blocks=false
+cleanup.multi_catch=false
+cleanup.never_use_blocks=false
+cleanup.never_use_parentheses_in_expressions=true
+cleanup.no_string_creation=false
+cleanup.no_super=false
+cleanup.number_suffix=false
+cleanup.objects_equals=false
+cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=true
+cleanup.operand_factorization=false
+cleanup.organize_imports=true
+cleanup.overridden_assignment=false
+cleanup.plain_replacement=false
+cleanup.precompile_regex=false
+cleanup.primitive_comparison=false
+cleanup.primitive_parsing=false
+cleanup.primitive_rather_than_wrapper=true
+cleanup.primitive_serialization=false
+cleanup.pull_out_if_from_if_else=false
+cleanup.pull_up_assignment=false
+cleanup.push_down_negation=false
+cleanup.qualify_static_field_accesses_with_declaring_class=false
+cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+cleanup.qualify_static_member_accesses_with_declaring_class=true
+cleanup.qualify_static_method_accesses_with_declaring_class=false
+cleanup.reduce_indentation=false
+cleanup.redundant_comparator=false
+cleanup.redundant_falling_through_block_end=false
+cleanup.remove_private_constructors=true
+cleanup.remove_redundant_modifiers=false
+cleanup.remove_redundant_semicolons=true
+cleanup.remove_redundant_type_arguments=true
+cleanup.remove_trailing_whitespaces=true
+cleanup.remove_trailing_whitespaces_all=true
+cleanup.remove_trailing_whitespaces_ignore_empty=false
+cleanup.remove_unnecessary_array_creation=false
+cleanup.remove_unnecessary_casts=true
+cleanup.remove_unnecessary_nls_tags=true
+cleanup.remove_unused_imports=true
+cleanup.remove_unused_local_variables=false
+cleanup.remove_unused_private_fields=true
+cleanup.remove_unused_private_members=false
+cleanup.remove_unused_private_methods=true
+cleanup.remove_unused_private_types=true
+cleanup.return_expression=false
+cleanup.simplify_lambda_expression_and_method_ref=false
+cleanup.single_used_field=false
+cleanup.sort_members=false
+cleanup.sort_members_all=false
+cleanup.standard_comparison=false
+cleanup.static_inner_class=false
+cleanup.strictly_equal_or_different=false
+cleanup.stringbuffer_to_stringbuilder=false
+cleanup.stringbuilder=false
+cleanup.stringbuilder_for_local_vars=true
+cleanup.stringconcat_to_textblock=false
+cleanup.substring=false
+cleanup.switch=false
+cleanup.system_property=false
+cleanup.system_property_boolean=false
+cleanup.system_property_file_encoding=false
+cleanup.system_property_file_separator=false
+cleanup.system_property_line_separator=false
+cleanup.system_property_path_separator=false
+cleanup.ternary_operator=false
+cleanup.try_with_resource=false
+cleanup.unlooped_while=false
+cleanup.unreachable_block=false
+cleanup.use_anonymous_class_creation=false
+cleanup.use_autoboxing=false
+cleanup.use_blocks=true
+cleanup.use_blocks_only_for_return_and_throw=false
+cleanup.use_directly_map_method=false
+cleanup.use_lambda=true
+cleanup.use_parentheses_in_expressions=true
+cleanup.use_string_is_blank=false
+cleanup.use_this_for_non_static_field_access=true
+cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+cleanup.use_this_for_non_static_method_access=true
+cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+cleanup.use_unboxing=false
+cleanup.use_var=false
+cleanup.useless_continue=false
+cleanup.useless_return=false
+cleanup.valueof_rather_than_instantiation=false
+cleanup_profile=_CAU-SE-Style
+cleanup_settings_version=2
+eclipse.preferences.version=1
+editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
+formatter_profile=_CAU-SE-Style
+formatter_settings_version=21
+org.eclipse.jdt.ui.ignorelowercasenames=true
+org.eclipse.jdt.ui.importorder=
+org.eclipse.jdt.ui.ondemandthreshold=99
+org.eclipse.jdt.ui.staticondemandthreshold=99
+org.eclipse.jdt.ui.text.custom_code_templates=
+sp_cleanup.add_all=false
+sp_cleanup.add_default_serial_version_id=true
+sp_cleanup.add_generated_serial_version_id=false
+sp_cleanup.add_missing_annotations=true
+sp_cleanup.add_missing_deprecated_annotations=true
+sp_cleanup.add_missing_methods=false
+sp_cleanup.add_missing_nls_tags=false
+sp_cleanup.add_missing_override_annotations=true
+sp_cleanup.add_missing_override_annotations_interface_methods=true
+sp_cleanup.add_serial_version_id=false
+sp_cleanup.always_use_blocks=true
+sp_cleanup.always_use_parentheses_in_expressions=false
+sp_cleanup.always_use_this_for_non_static_field_access=true
+sp_cleanup.always_use_this_for_non_static_method_access=true
+sp_cleanup.array_with_curly=false
+sp_cleanup.arrays_fill=false
+sp_cleanup.bitwise_conditional_expression=false
+sp_cleanup.boolean_literal=false
+sp_cleanup.boolean_value_rather_than_comparison=false
+sp_cleanup.break_loop=false
+sp_cleanup.collection_cloning=false
+sp_cleanup.comparing_on_criteria=true
+sp_cleanup.comparison_statement=false
+sp_cleanup.controlflow_merge=false
+sp_cleanup.convert_functional_interfaces=false
+sp_cleanup.convert_to_enhanced_for_loop=true
+sp_cleanup.convert_to_enhanced_for_loop_if_loop_var_used=false
+sp_cleanup.convert_to_switch_expressions=false
+sp_cleanup.correct_indentation=true
+sp_cleanup.do_while_rather_than_while=false
+sp_cleanup.double_negation=false
+sp_cleanup.else_if=false
+sp_cleanup.embedded_if=false
+sp_cleanup.evaluate_nullable=false
+sp_cleanup.extract_increment=false
+sp_cleanup.format_source_code=true
+sp_cleanup.format_source_code_changes_only=false
+sp_cleanup.hash=false
+sp_cleanup.if_condition=false
+sp_cleanup.insert_inferred_type_arguments=false
+sp_cleanup.instanceof=false
+sp_cleanup.instanceof_keyword=false
+sp_cleanup.invert_equals=false
+sp_cleanup.join=false
+sp_cleanup.lazy_logical_operator=false
+sp_cleanup.make_local_variable_final=true
+sp_cleanup.make_parameters_final=true
+sp_cleanup.make_private_fields_final=true
+sp_cleanup.make_type_abstract_if_missing_method=false
+sp_cleanup.make_variable_declarations_final=true
+sp_cleanup.map_cloning=false
+sp_cleanup.merge_conditional_blocks=false
+sp_cleanup.multi_catch=false
+sp_cleanup.never_use_blocks=false
+sp_cleanup.never_use_parentheses_in_expressions=true
+sp_cleanup.no_string_creation=false
+sp_cleanup.no_super=false
+sp_cleanup.number_suffix=false
+sp_cleanup.objects_equals=false
+sp_cleanup.on_save_use_additional_actions=true
+sp_cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=false
+sp_cleanup.operand_factorization=false
+sp_cleanup.organize_imports=true
+sp_cleanup.overridden_assignment=false
+sp_cleanup.plain_replacement=false
+sp_cleanup.precompile_regex=false
+sp_cleanup.primitive_comparison=false
+sp_cleanup.primitive_parsing=false
+sp_cleanup.primitive_rather_than_wrapper=false
+sp_cleanup.primitive_serialization=false
+sp_cleanup.pull_out_if_from_if_else=false
+sp_cleanup.pull_up_assignment=false
+sp_cleanup.push_down_negation=false
+sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
+sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_with_declaring_class=true
+sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
+sp_cleanup.reduce_indentation=false
+sp_cleanup.redundant_comparator=false
+sp_cleanup.redundant_falling_through_block_end=false
+sp_cleanup.remove_private_constructors=true
+sp_cleanup.remove_redundant_modifiers=false
+sp_cleanup.remove_redundant_semicolons=false
+sp_cleanup.remove_redundant_type_arguments=false
+sp_cleanup.remove_trailing_whitespaces=true
+sp_cleanup.remove_trailing_whitespaces_all=true
+sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
+sp_cleanup.remove_unnecessary_array_creation=false
+sp_cleanup.remove_unnecessary_casts=true
+sp_cleanup.remove_unnecessary_nls_tags=true
+sp_cleanup.remove_unused_imports=true
+sp_cleanup.remove_unused_local_variables=false
+sp_cleanup.remove_unused_private_fields=true
+sp_cleanup.remove_unused_private_members=false
+sp_cleanup.remove_unused_private_methods=true
+sp_cleanup.remove_unused_private_types=true
+sp_cleanup.return_expression=false
+sp_cleanup.simplify_lambda_expression_and_method_ref=false
+sp_cleanup.single_used_field=false
+sp_cleanup.sort_members=false
+sp_cleanup.sort_members_all=false
+sp_cleanup.standard_comparison=false
+sp_cleanup.static_inner_class=false
+sp_cleanup.strictly_equal_or_different=false
+sp_cleanup.stringbuffer_to_stringbuilder=false
+sp_cleanup.stringbuilder=false
+sp_cleanup.stringbuilder_for_local_vars=false
+sp_cleanup.stringconcat_to_textblock=false
+sp_cleanup.substring=false
+sp_cleanup.switch=false
+sp_cleanup.system_property=false
+sp_cleanup.system_property_boolean=false
+sp_cleanup.system_property_file_encoding=false
+sp_cleanup.system_property_file_separator=false
+sp_cleanup.system_property_line_separator=false
+sp_cleanup.system_property_path_separator=false
+sp_cleanup.ternary_operator=false
+sp_cleanup.try_with_resource=false
+sp_cleanup.unlooped_while=false
+sp_cleanup.unreachable_block=false
+sp_cleanup.use_anonymous_class_creation=false
+sp_cleanup.use_autoboxing=false
+sp_cleanup.use_blocks=true
+sp_cleanup.use_blocks_only_for_return_and_throw=false
+sp_cleanup.use_directly_map_method=false
+sp_cleanup.use_lambda=true
+sp_cleanup.use_parentheses_in_expressions=true
+sp_cleanup.use_string_is_blank=false
+sp_cleanup.use_this_for_non_static_field_access=true
+sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+sp_cleanup.use_this_for_non_static_method_access=true
+sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+sp_cleanup.use_unboxing=false
+sp_cleanup.use_var=false
+sp_cleanup.useless_continue=true
+sp_cleanup.useless_return=true
+sp_cleanup.valueof_rather_than_instantiation=false
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/uc3-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..4fa4266c755f4ff8da465ab7341cd70ffb24ecf7
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs
@@ -0,0 +1,4 @@
+configFilePath=../config/checkstyle.xml
+customModulesJarPaths=
+eclipse.preferences.version=1
+enabled=false
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/uc3-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..40bfd0ecdbbe324bb54e4b9f9f32ba95cf5b0c2a
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs
@@ -0,0 +1,4 @@
+customRulesJars=
+eclipse.preferences.version=1
+enabled=false
+ruleSetFilePath=../config/pmd.xml
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/Dockerfile b/theodolite-benchmarks/uc3-hazelcastjet/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..6dc99aeb7263ef0084e9721ad9bd908a36cd61f6
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/Dockerfile
@@ -0,0 +1,7 @@
+FROM openjdk:11-slim
+
+ADD build/distributions/uc3-hazelcastjet.tar /
+
+
+CMD  JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \
+     /uc3-hazelcastjet/bin/uc3-hazelcastjet
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/build.gradle b/theodolite-benchmarks/uc3-hazelcastjet/build.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..737ef6af17cb4c5aa4aa2ee97b5c3e7e4aa0d929
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/build.gradle
@@ -0,0 +1,5 @@
+plugins {
+  id 'theodolite.hazelcastjet'
+}
+
+mainClassName = "rocks.theodolite.benchmarks.uc3.hazelcastjet.HistoryService"
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/HistoryService.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/HistoryService.java
new file mode 100644
index 0000000000000000000000000000000000000000..ecf38bd6c6a85e6d0f1431708a69f3431aff4730
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/HistoryService.java
@@ -0,0 +1,72 @@
+package rocks.theodolite.benchmarks.uc3.hazelcastjet;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A microservice that manages the history and, therefore, stores and aggregates incoming
+ * measurements.
+ */
+public class HistoryService {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(HistoryService.class);
+
+  // Hazelcast settings (default)
+  private static final String HZ_KUBERNETES_SERVICE_DNS_KEY = "service-dns";
+  private static final String BOOTSTRAP_SERVER_DEFAULT = "localhost:5701";
+
+  // Kafka settings (default)
+  private static final String KAFKA_BOOTSTRAP_DEFAULT = "localhost:9092";
+  private static final String SCHEMA_REGISTRY_URL_DEFAULT = "http://localhost:8081";
+  private static final String KAFKA_INPUT_TOPIC_DEFAULT = "input";
+  private static final String KAFKA_OUTPUT_TOPIC_DEFAULT = "output";
+  
+  // UC3 specific (default)
+  private static final String WINDOW_SIZE_IN_SECONDS_DEFAULT = "2629800";
+  private static final String HOPSIZE_IN_SEC_DEFAULT = "86400";
+
+  // Job name (default)
+  private static final String JOB_NAME = "uc3-hazelcastjet";
+
+  /**
+   * Entrypoint for UC3 using Gradle Run.
+   */
+  public static void main(final String[] args) {
+    final HistoryService uc3HistoryService = new HistoryService();
+    try {
+      uc3HistoryService.run();
+    } catch (final Exception e) { // NOPMD
+      LOGGER.error("ABORT MISSION!: {}", e);
+    }
+  }
+
+  /**
+   * Start a UC3 service.
+   *
+   * @throws Exception This Exception occurs if the Uc3HazelcastJetFactory is used in the wrong way.
+   *         Detailed data is provided once an Exception occurs.
+   */
+  public void run() throws Exception { // NOPMD
+    this.createHazelcastJetApplication();
+  }
+
+  /**
+   * Creates a Hazelcast Jet Application for UC3 using the Uc3HazelcastJetFactory.
+   *
+   * @throws Exception This Exception occurs if the Uc3HazelcastJetFactory is used in the wrong way.
+   *         Detailed data is provided once an Exception occurs.
+   */
+  private void createHazelcastJetApplication() throws Exception { // NOPMD
+    new Uc3HazelcastJetFactory()
+        .setReadPropertiesFromEnv(KAFKA_BOOTSTRAP_DEFAULT, SCHEMA_REGISTRY_URL_DEFAULT, JOB_NAME)
+        .setWritePropertiesFromEnv(KAFKA_BOOTSTRAP_DEFAULT, SCHEMA_REGISTRY_URL_DEFAULT)
+        .setKafkaInputTopicFromEnv(KAFKA_INPUT_TOPIC_DEFAULT)
+        .setKafkaOutputTopicFromEnv(KAFKA_OUTPUT_TOPIC_DEFAULT)
+        .setWindowSizeInSecondsFromEnv(WINDOW_SIZE_IN_SECONDS_DEFAULT)
+        .setHoppingSizeInSecondsFromEnv(HOPSIZE_IN_SEC_DEFAULT)
+        .buildUc3Pipeline()
+        .buildUc3JetInstanceFromEnv(LOGGER, BOOTSTRAP_SERVER_DEFAULT, HZ_KUBERNETES_SERVICE_DNS_KEY)
+        .runUc3Job(JOB_NAME);
+  }
+
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3HazelcastJetFactory.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3HazelcastJetFactory.java
new file mode 100644
index 0000000000000000000000000000000000000000..be6d70d27b9a868914ec5d28e84b4a90454ab56c
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3HazelcastJetFactory.java
@@ -0,0 +1,341 @@
+package rocks.theodolite.benchmarks.uc3.hazelcastjet;
+
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.config.JobConfig;
+import com.hazelcast.jet.pipeline.Pipeline;
+import io.confluent.kafka.serializers.KafkaAvroDeserializer;
+import java.util.Objects;
+import java.util.Properties;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.apache.kafka.common.serialization.StringSerializer;
+import org.slf4j.Logger;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.ConfigurationKeys;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.JetInstanceBuilder;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.KafkaPropertiesBuilder;
+import rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics.HourOfDayKey;
+import rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics.HourOfDayKeySerializer;
+
+/**
+ * A Hazelcast Jet factory which can build a Hazelcast Jet Instance and Pipeline for the UC3
+ * benchmark and lets you start the Hazelcast Jet job. The JetInstance can be built directly as the
+ * Hazelcast Config is managed internally. In order to build the Pipeline, you first have to build
+ * the Read and Write Properties, set the input and output topic, and set the window size in seconds
+ * and the hopping size in seconds. This can be done using internal functions of this factory.
+ * Outside data only refers to custom values or default values in case data of the environment
+ * cannot the fetched.
+ */
+public class Uc3HazelcastJetFactory { // NOPMD
+
+  // Information per History Service
+  private Properties kafkaReadPropsForPipeline;
+  private Properties kafkaWritePropsForPipeline;
+  private String kafkaInputTopic;
+  private String kafkaOutputTopic;
+  private JetInstance uc3JetInstance;
+  private Pipeline uc3JetPipeline;
+  // UC3 specific
+  private int windowSizeInSeconds;
+  private int hoppingSizeInSeconds;
+
+  /////////////////////////////////////
+  // Layer 1 - Hazelcast Jet Run Job //
+  /////////////////////////////////////
+
+  /**
+   * Needs a JetInstance and Pipeline defined in this factors. Adds the pipeline to the existing
+   * JetInstance as a job.
+   *
+   * @param jobName The name of the job.
+   * @throws Exception If either no JetInstance or Pipeline is set, a job cannot be startet.
+   */
+  public void runUc3Job(final String jobName) throws IllegalStateException { // NOPMD
+
+    // Check if a Jet Instance for UC3 is set.
+    if (this.uc3JetInstance == null) {
+      throw new IllegalStateException("Jet Instance is not set! "
+          + "Cannot start a hazelcast jet job for UC3.");
+    }
+
+    // Check if a Pipeline for UC3 is set.
+    if (this.uc3JetPipeline == null) {
+      throw new IllegalStateException(
+          "Hazelcast Pipeline is not set! Cannot start a hazelcast jet job for UC3.");
+    }
+
+    // Adds the job name and joins a job to the JetInstance defined in this factory
+    final JobConfig jobConfig = new JobConfig()
+        .registerSerializer(HourOfDayKey.class, HourOfDayKeySerializer.class)
+        .setName(jobName);
+    this.uc3JetInstance.newJobIfAbsent(this.uc3JetPipeline, jobConfig).join();
+  }
+
+  /////////////
+  // Layer 2 //
+  /////////////
+
+  /**
+   * Build a Hazelcast JetInstance used to run a job on.
+   *
+   * @param logger The logger specified for this JetInstance.
+   * @param bootstrapServerDefault Default bootstrap server in case no value can be derived from the
+   *        environment.
+   * @param hzKubernetesServiceDnsKey The kubernetes service dns key.
+   * @return A Uc3HazelcastJetFactory containing a set JetInstance.
+   */
+  public Uc3HazelcastJetFactory buildUc3JetInstanceFromEnv(final Logger logger,
+      final String bootstrapServerDefault,
+      final String hzKubernetesServiceDnsKey) {
+    this.uc3JetInstance = new JetInstanceBuilder()
+        .setConfigFromEnv(logger, bootstrapServerDefault, hzKubernetesServiceDnsKey)
+        .build();
+    return this;
+  }
+
+  /**
+   * Builds a Hazelcast Jet pipeline used for a JetInstance to run it as a job on. Needs the input
+   * topic and kafka properties defined in this factory beforehand.
+   *
+   * @return A Uc3HazelcastJetFactory containg a set pipeline.
+   * @throws Exception If the input topic or the kafka properties are not defined, the pipeline
+   *         cannot be built.
+   */
+  public Uc3HazelcastJetFactory buildUc3Pipeline() throws IllegalStateException { // NOPMD
+
+    final String defaultPipelineWarning = "Cannot build pipeline."; // NOPMD
+
+    // Check if Properties for the Kafka Input are set.
+    if (this.kafkaReadPropsForPipeline == null) {
+      throw new IllegalStateException("Kafka Read Properties for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if Properties for the Kafka Output are set.
+    if (this.kafkaWritePropsForPipeline == null) {
+      throw new IllegalStateException("Kafka Write Properties for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if the Kafka input topic is set.
+    if (this.kafkaInputTopic == null) {
+      throw new IllegalStateException("Kafka input topic for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if the Kafka output topic is set.
+    if (this.kafkaOutputTopic == null) {
+      throw new IllegalStateException("kafka output topic for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if the window size for the "sliding" window is set.
+    if (this.windowSizeInSeconds <= 0) {
+      throw new IllegalStateException(
+          "window size in seconds for pipeline not set or not greater than 0! "
+              + defaultPipelineWarning);
+    }
+
+    // Check if the hopping distance for the "sliding" window is set.
+    if (this.hoppingSizeInSeconds <= 0) {
+      throw new IllegalStateException(
+          "hopping size in seconds for pipeline not set or not greater than 0! "
+              + defaultPipelineWarning);
+    }
+
+    // Build Pipeline Using the pipelineBuilder
+    final Uc3PipelineBuilder pipeBuilder = new Uc3PipelineBuilder();
+    this.uc3JetPipeline =
+        pipeBuilder.build(this.kafkaReadPropsForPipeline,
+            this.kafkaWritePropsForPipeline,
+            this.kafkaInputTopic, this.kafkaOutputTopic, this.hoppingSizeInSeconds,
+            this.windowSizeInSeconds);
+    // Return Uc3HazelcastJetBuilder factory
+    return this;
+  }
+
+  /////////////
+  // Layer 3 //
+  /////////////
+
+  /**
+   * Sets kafka read properties for pipeline used in this builder.
+   *
+   * @param kafkaReadProperties A propeties object containing necessary values used for the hazelcst
+   *        jet kafka connection to read data.
+   * @return The Uc3HazelcastJetBuilder factory with set kafkaReadPropsForPipeline.
+   */
+  public Uc3HazelcastJetFactory setCustomReadProperties(// NOPMD
+      final Properties kafkaReadProperties) {
+    this.kafkaReadPropsForPipeline = kafkaReadProperties;
+    return this;
+  }
+
+  /**
+   * Sets kafka write properties for pipeline used in this builder.
+   *
+   * @param kafkaWriteProperties A propeties object containing necessary values used for the
+   *        hazelcst jet kafka connection to write data.
+   * @return The Uc3HazelcastJetBuilder factory with set kafkaWritePropsForPipeline.
+   */
+  public Uc3HazelcastJetFactory setCustomWriteProperties(// NOPMD
+      final Properties kafkaWriteProperties) {
+    this.kafkaWritePropsForPipeline = kafkaWriteProperties;
+    return this;
+  }
+
+  /**
+   * Sets kafka read properties for pipeline used in this builder using environment variables.
+   *
+   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
+   *        can be fetched from the environment.
+   * @param schemaRegistryUrlDefault Default schema registry url in the case that no schema registry
+   *        url can be fetched from the environment.
+   * @return The Uc3HazelcastJetBuilder factory with set kafkaReadPropertiesForPipeline.
+   */
+  public Uc3HazelcastJetFactory setReadPropertiesFromEnv(// NOPMD
+                                                         final String bootstrapServersDefault,
+                                                         final String schemaRegistryUrlDefault,
+                                                         final String jobName) {
+    // Use KafkaPropertiesBuilder to build a properties object used for kafka
+    final KafkaPropertiesBuilder propsBuilder = new KafkaPropertiesBuilder();
+    final Properties kafkaReadProps =
+        propsBuilder.buildKafkaInputReadPropsFromEnv(bootstrapServersDefault,
+            schemaRegistryUrlDefault,
+            jobName,
+            StringDeserializer.class.getCanonicalName(),
+            KafkaAvroDeserializer.class.getCanonicalName());
+    this.kafkaReadPropsForPipeline = kafkaReadProps;
+    return this;
+  }
+
+  /**
+   * Sets kafka write properties for pipeline used in this builder using environment variables.
+   *
+   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
+   *        can be fetched from the environment.
+   * @return The Uc3HazelcastJetBuilder factory with set kafkaWritePropertiesForPipeline.
+   */
+  public Uc3HazelcastJetFactory setWritePropertiesFromEnv(// NOPMD
+      final String bootstrapServersDefault, final String schemaRegistryUrlDefault) {
+    // Use KafkaPropertiesBuilder to build a properties object used for kafka
+    final KafkaPropertiesBuilder propsBuilder = new KafkaPropertiesBuilder();
+    final Properties kafkaWriteProps =
+        propsBuilder.buildKafkaWritePropsFromEnv(bootstrapServersDefault,
+            schemaRegistryUrlDefault,
+            StringSerializer.class.getCanonicalName(),
+            StringSerializer.class.getCanonicalName());
+    this.kafkaWritePropsForPipeline = kafkaWriteProps;
+    return this;
+  }
+
+  /**
+   * Sets the kafka input topic for the pipeline used in this builder.
+   *
+   * @param inputTopic The kafka topic used as the pipeline input.
+   * @return A Uc3HazelcastJetBuilder factory with a set kafkaInputTopic.
+   */
+  public Uc3HazelcastJetFactory setCustomKafkaInputTopic(// NOPMD
+      final String inputTopic) {
+    this.kafkaInputTopic = inputTopic;
+    return this;
+  }
+
+  /**
+   * Sets the kafka input output for the pipeline used in this builder.
+   *
+   * @param outputTopic The kafka topic used as the pipeline output.
+   * @return A Uc3HazelcastJetBuilder factory with a set kafkaOutputTopic.
+   */
+  public Uc3HazelcastJetFactory setCustomKafkaOutputTopic(final String outputTopic) { // NOPMD
+    this.kafkaOutputTopic = outputTopic;
+    return this;
+  }
+
+
+  /**
+   * Sets the kafka input topic for the pipeline used in this builder using environment variables.
+   *
+   * @param defaultInputTopic The default kafka input topic used if no topic is specified by the
+   *        environment.
+   * @return A Uc3HazelcastJetBuilder factory with a set kafkaInputTopic.
+   */
+  public Uc3HazelcastJetFactory setKafkaInputTopicFromEnv(// NOPMD
+      final String defaultInputTopic) {
+    this.kafkaInputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_INPUT_TOPIC),
+        defaultInputTopic);
+    return this;
+  }
+
+  /**
+   * Sets the kafka output topic for the pipeline used in this builder using environment variables.
+   *
+   * @param defaultOutputTopic The default kafka output topic used if no topic is specified by the
+   *        environment.
+   * @return A Uc3HazelcastJetBuilder factory with a set kafkaOutputTopic.
+   */
+  public Uc3HazelcastJetFactory setKafkaOutputTopicFromEnv(// NOPMD
+      final String defaultOutputTopic) {
+    this.kafkaOutputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_OUTPUT_TOPIC),
+        defaultOutputTopic);
+    return this;
+  }
+
+  /**
+   * Sets the window size in seconds for the pipeline used in this builder.
+   *
+   * @param windowSizeInSeconds the windowSizeInSeconds to be used for this pipeline.
+   * @return A Uc3HazelcastJetFactory with a set windowSizeInSeconds.
+   */
+  public Uc3HazelcastJetFactory setCustomWindowSizeInSeconds(// NOPMD
+      final int windowSizeInSeconds) {
+    this.windowSizeInSeconds = windowSizeInSeconds;
+    return this;
+  }
+
+  /**
+   * Sets the window size in seconds for the pipeline used in this builder from the environment.
+   *
+   * @param defaultWindowSizeInSeconds the default window size in seconds to be used for this
+   *        pipeline when none is set in the environment.
+   * @return A Uc3HazelcastJetFactory with a set windowSizeInSeconds.
+   */
+  public Uc3HazelcastJetFactory setWindowSizeInSecondsFromEnv(// NOPMD
+      final String defaultWindowSizeInSeconds) {
+    final String windowSizeInSeconds = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.WINDOW_SIZE_IN_SECONDS),
+        defaultWindowSizeInSeconds);
+    final int windowSizeInSecondsNumber = Integer.parseInt(windowSizeInSeconds);
+    this.windowSizeInSeconds = windowSizeInSecondsNumber;
+    return this;
+  }
+
+  /**
+   * Sets the hopping size in seconds for the pipeline used in this builder.
+   *
+   * @param hoppingSizeInSeconds the hoppingSizeInSeconds to be used for this pipeline.
+   * @return A Uc3HazelcastJetFactory with a set hoppingSizeInSeconds.
+   */
+  public Uc3HazelcastJetFactory setCustomHoppingSizeInSeconds(// NOPMD
+      final int hoppingSizeInSeconds) {
+    this.hoppingSizeInSeconds = hoppingSizeInSeconds;
+    return this;
+  }
+
+  /**
+   * Sets the hopping size in seconds for the pipeline used in this builder from the environment.
+   *
+   * @param defaultHoppingSizeInSeconds the default hopping size in seconds to be used for this
+   *        pipeline when none is set in the environment.
+   * @return A Uc3HazelcastJetFactory with a set hoppingSizeInSeconds.
+   */
+  public Uc3HazelcastJetFactory setHoppingSizeInSecondsFromEnv(// NOPMD
+      final String defaultHoppingSizeInSeconds) {
+    final String hoppingSizeInSeconds = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.HOPPING_SIZE_IN_SECONDS),
+        defaultHoppingSizeInSeconds);
+    final int hoppingSizeInSecondsNumber = Integer.parseInt(hoppingSizeInSeconds);
+    this.hoppingSizeInSeconds = hoppingSizeInSecondsNumber;
+    return this;
+  }
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3PipelineBuilder.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3PipelineBuilder.java
new file mode 100644
index 0000000000000000000000000000000000000000..c8427de60742c2923d4ec17703592f5b8310de0c
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3PipelineBuilder.java
@@ -0,0 +1,125 @@
+package rocks.theodolite.benchmarks.uc3.hazelcastjet;
+
+import com.hazelcast.jet.aggregate.AggregateOperations;
+import com.hazelcast.jet.kafka.KafkaSinks;
+import com.hazelcast.jet.kafka.KafkaSources;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.Sinks;
+import com.hazelcast.jet.pipeline.StreamSource;
+import com.hazelcast.jet.pipeline.StreamStage;
+import com.hazelcast.jet.pipeline.WindowDefinition;
+import java.time.Instant;
+import java.time.LocalDateTime;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Properties;
+import java.util.TimeZone;
+import java.util.concurrent.TimeUnit;
+import rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics.HourOfDayKey;
+import rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics.HoursOfDayKeyFactory;
+import rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics.StatsKeyFactory;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * Builder to build a HazelcastJet Pipeline for UC3 which can be used for stream processing using
+ * Hazelcast Jet.
+ */
+public class Uc3PipelineBuilder {
+
+  /**
+   * Builds a pipeline which can be used for stream processing using Hazelcast Jet.
+   *
+   * @param kafkaReadPropsForPipeline Properties Object containing the necessary kafka reads
+   *        attributes.
+   * @param kafkaWritePropsForPipeline Properties Object containing the necessary kafka write
+   *        attributes.
+   * @param kafkaInputTopic The name of the input topic used for the pipeline.
+   * @param kafkaOutputTopic The name of the output topic used for the pipeline.
+   * @param hoppingSizeInSeconds The hop length of the sliding window used in the aggregation of
+   *        this pipeline.
+   * @param windowSizeInSeconds The window length of the sliding window used in the aggregation of
+   *        this pipeline.
+   * @return returns a Pipeline used which can be used in a Hazelcast Jet Instance to process data
+   *         for UC3.
+   */
+  public Pipeline build(final Properties kafkaReadPropsForPipeline,
+      final Properties kafkaWritePropsForPipeline, final String kafkaInputTopic,
+      final String kafkaOutputTopic,
+      final int hoppingSizeInSeconds, final int windowSizeInSeconds) {
+
+    // Define a new Pipeline
+    final Pipeline pipe = Pipeline.create();
+
+    // Define the source
+    final StreamSource<Entry<String, ActivePowerRecord>> kafkaSource = KafkaSources
+        .<String, ActivePowerRecord>kafka(
+            kafkaReadPropsForPipeline, kafkaInputTopic);
+
+    // Extend topology for UC3
+    final StreamStage<Map.Entry<String, String>> uc3Product =
+        this.extendUc3Topology(pipe, kafkaSource, hoppingSizeInSeconds, windowSizeInSeconds);
+
+    // Add Sink1: Logger
+    uc3Product.writeTo(Sinks.logger());
+    // Add Sink2: Write back to kafka for the final benchmark
+    uc3Product.writeTo(KafkaSinks.<String, String>kafka(
+        kafkaWritePropsForPipeline, kafkaOutputTopic));
+
+    return pipe;
+  }
+
+  /**
+   * Extends to a blank Hazelcast Jet Pipeline the UC3 topology defined by theodolite.
+   *
+   * <p>
+   * UC3 takes {@code ActivePowerRecord} object, groups them by keys and calculates average double
+   * values for a sliding window and sorts them into the hour of the day.
+   * </p>
+   *
+   * @param pipe The blank hazelcast jet pipeline to extend the logic to.
+   * @param source A streaming source to fetch data from.
+   * @param hoppingSizeInSeconds The jump distance of the "sliding" window.
+   * @param windowSizeInSeconds The size of the "sliding" window.
+   * @return A {@code StreamStage<Map.Entry<String,String>>} with the above definition of the key
+   *         and value of the Entry object. It can be used to be further modified or directly be
+   *         written into a sink.
+   */
+  public StreamStage<Map.Entry<String, String>> extendUc3Topology(final Pipeline pipe,
+      final StreamSource<Entry<String, ActivePowerRecord>> source, final int hoppingSizeInSeconds,
+      final int windowSizeInSeconds) {
+    // Build the pipeline topology.
+    return pipe
+        .readFrom(source)
+        // use Timestamps
+        .withNativeTimestamps(0)
+        .setLocalParallelism(1)
+        // Map timestamp to hour of day and create new key using sensorID and
+        // datetime mapped to HourOfDay
+        .map(record -> {
+          final String sensorId = record.getValue().getIdentifier();
+          final long timestamp = record.getValue().getTimestamp();
+          final LocalDateTime dateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(timestamp),
+              TimeZone.getDefault().toZoneId());
+
+          final StatsKeyFactory<HourOfDayKey> keyFactory = new HoursOfDayKeyFactory();
+          final HourOfDayKey newKey = keyFactory.createKey(sensorId, dateTime);
+
+          return Map.entry(newKey, record.getValue());
+        })
+        // group by new keys
+        .groupingKey(Entry::getKey)
+        // Sliding/Hopping Window
+        .window(WindowDefinition.sliding(TimeUnit.SECONDS.toMillis(windowSizeInSeconds),
+            TimeUnit.SECONDS.toMillis(hoppingSizeInSeconds)))
+        // get average value of group (sensoreId,hourOfDay)
+        .aggregate(
+            AggregateOperations.averagingDouble(record -> record.getValue().getValueInW()))
+        // map to return pair (sensorID,hourOfDay) -> (averaged what value)
+        .map(agg -> {
+          final String theValue = agg.getValue().toString();
+          final String theKey = agg.getKey().toString();
+          return Map.entry(theKey, theValue);
+        });
+  }
+
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HourOfDayKey.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HourOfDayKey.java
new file mode 100644
index 0000000000000000000000000000000000000000..c69f433f3af7ec0484c254af9e59e7d284379cb0
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HourOfDayKey.java
@@ -0,0 +1,50 @@
+package rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics;
+
+import java.util.Objects;
+
+/**
+ * A key consisting of a hour of a day and a sensorID.
+ *
+ */
+public class HourOfDayKey {
+
+  private final int hourOfDay;
+  private final String sensorId;
+
+  public HourOfDayKey(final int hourOfDay, final String sensorId) {
+    this.hourOfDay = hourOfDay;
+    this.sensorId = sensorId;
+  }
+
+  public int getHourOfDay() {
+    return this.hourOfDay;
+  }
+
+  public String getSensorId() {
+    return this.sensorId;
+  }
+
+  @Override
+  public String toString() {
+    return this.sensorId + ";" + this.hourOfDay;
+  }
+
+  @Override
+  public int hashCode() {
+    return Objects.hash(this.hourOfDay, this.sensorId);
+  }
+
+  @Override
+  public boolean equals(final Object obj) {
+    if (obj == this) {
+      return true;
+    }
+    if (obj instanceof HourOfDayKey) {
+      final HourOfDayKey other = (HourOfDayKey) obj;
+      return Objects.equals(this.hourOfDay, other.hourOfDay)
+          && Objects.equals(this.sensorId, other.sensorId);
+    }
+    return false;
+  }
+
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HourOfDayKeySerializer.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HourOfDayKeySerializer.java
new file mode 100644
index 0000000000000000000000000000000000000000..91ba3f2be26f4317a1dec81caf9080da8c1edc9c
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HourOfDayKeySerializer.java
@@ -0,0 +1,32 @@
+package rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics;
+
+import com.hazelcast.nio.ObjectDataInput;
+import com.hazelcast.nio.ObjectDataOutput;
+import com.hazelcast.nio.serialization.StreamSerializer;
+import java.io.IOException;
+
+/**
+ * A pipeline serializer for the HourOfDayKey to allow for parallelization.
+ * 
+ */
+public class HourOfDayKeySerializer implements StreamSerializer<HourOfDayKey> {
+
+  private static final int TYPE_ID = 1;
+
+  @Override
+  public int getTypeId() {
+    return TYPE_ID;
+  }
+
+  @Override
+  public void write(final ObjectDataOutput out, final HourOfDayKey key) throws IOException {
+    out.writeInt(key.getHourOfDay());
+    out.writeString(key.getSensorId());
+  }
+
+  @Override
+  public HourOfDayKey read(final ObjectDataInput in) throws IOException {
+    return new HourOfDayKey(in.readInt(), in.readString());
+  }
+
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HoursOfDayKeyFactory.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HoursOfDayKeyFactory.java
new file mode 100644
index 0000000000000000000000000000000000000000..4eddb85efebf5b8b07317d0cd39f36b90d3f4fcd
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/HoursOfDayKeyFactory.java
@@ -0,0 +1,22 @@
+package rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics;
+
+import java.time.LocalDateTime;
+
+/**
+ * A factory class to build an {@link HourOfDayKey}.
+ *
+ */
+public class HoursOfDayKeyFactory implements StatsKeyFactory<HourOfDayKey> {
+
+  @Override
+  public HourOfDayKey createKey(final String sensorId, final LocalDateTime dateTime) {
+    final int hourOfDay = dateTime.getHour();
+    return new HourOfDayKey(hourOfDay, sensorId);
+  }
+
+  @Override
+  public String getSensorId(final HourOfDayKey key) {
+    return key.getSensorId();
+  }
+
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/StatsKeyFactory.java b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/StatsKeyFactory.java
new file mode 100644
index 0000000000000000000000000000000000000000..2a404781e5916473604f14f87b9c3eccf9eda342
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/uc3specifics/StatsKeyFactory.java
@@ -0,0 +1,17 @@
+package rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics;
+
+import java.time.LocalDateTime;
+
+/**
+ * Factory interface for creating a stats key from a sensor id and a {@link LocalDateTime} object
+ * and vice versa.
+ *
+ * @param <T> Type of the key
+ */
+public interface StatsKeyFactory<T> {
+
+  T createKey(String sensorId, LocalDateTime dateTime);
+
+  String getSensorId(T key);
+
+}
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc3-hazelcastjet/src/main/resources/META-INF/application.properties
new file mode 100644
index 0000000000000000000000000000000000000000..e3371cc87e20e85e6e8c327955537e6e49dab86e
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/main/resources/META-INF/application.properties
@@ -0,0 +1,8 @@
+application.name=theodolite-uc1-application
+application.version=0.0.1
+
+kafka.bootstrap.servers=localhost:9092
+kafka.input.topic=input
+
+schema.registry.url=http://localhost:8081
+
diff --git a/theodolite-benchmarks/uc3-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3PipelineTest.java b/theodolite-benchmarks/uc3-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3PipelineTest.java
new file mode 100644
index 0000000000000000000000000000000000000000..3df4f4642f1bc6c8637f90bcae3f352f5c298e51
--- /dev/null
+++ b/theodolite-benchmarks/uc3-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc3/hazelcastjet/Uc3PipelineTest.java
@@ -0,0 +1,162 @@
+package rocks.theodolite.benchmarks.uc3.hazelcastjet;
+
+import com.hazelcast.jet.Jet;
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.config.JetConfig;
+import com.hazelcast.jet.config.JobConfig;
+import com.hazelcast.jet.core.JetTestSupport;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.StreamSource;
+import com.hazelcast.jet.pipeline.StreamStage;
+import com.hazelcast.jet.pipeline.test.AssertionCompletedException;
+import com.hazelcast.jet.pipeline.test.Assertions;
+import com.hazelcast.jet.pipeline.test.TestSources;
+import com.hazelcast.jet.test.SerialTest;
+import java.time.Instant;
+import java.time.LocalDateTime;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.TimeZone;
+import java.util.Map.Entry;
+import java.util.concurrent.CompletionException;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import rocks.theodolite.benchmarks.uc3.hazelcastjet.Uc3PipelineBuilder;
+import rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics.HourOfDayKey;
+import rocks.theodolite.benchmarks.uc3.hazelcastjet.uc3specifics.HourOfDayKeySerializer;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * Test methods for the Hazelcast Jet Implementation of UC3.
+ */
+@Category(SerialTest.class)
+public class Uc3PipelineTest extends JetTestSupport {
+
+  // Test Machinery
+  private JetInstance testInstance = null;
+  private Pipeline testPipeline = null;
+  private StreamStage<Entry<String, String>> uc3Topology = null;
+
+
+  /**
+   * Creates the JetInstance, defines a new Hazelcast Jet Pipeline and extends the UC3 topology.
+   * Allows for quick extension of tests.
+   */
+  @Before
+  public void buildUc3Pipeline() {
+
+    // Setup Configuration
+    int testItemsPerSecond = 1;
+    String testSensorName = "TEST-SENSOR";
+    Double testValueInW = 10.0;
+    int testHopSizeInSec = 1;
+    int testWindowSizeInSec = 50;
+    // Used to check hourOfDay
+    long mockTimestamp = 1632741651;
+
+
+    // Create mock jet instance with configuration
+    final String testClusterName = randomName();
+    final JetConfig testJetConfig = new JetConfig();
+    testJetConfig.getHazelcastConfig().setClusterName(testClusterName);
+    this.testInstance = this.createJetMember(testJetConfig);
+
+    // Create a test source
+    final StreamSource<Entry<String, ActivePowerRecord>> testSource =
+        TestSources.itemStream(testItemsPerSecond, (timestamp, item) -> {
+          final ActivePowerRecord testRecord =
+              new ActivePowerRecord(testSensorName, mockTimestamp, testValueInW);
+          final Entry<String, ActivePowerRecord> testEntry =
+              Map.entry(testSensorName, testRecord);
+          return testEntry;
+        });
+
+    // Create pipeline to test
+    Uc3PipelineBuilder pipelineBuilder = new Uc3PipelineBuilder();
+    this.testPipeline = Pipeline.create();
+    this.uc3Topology = pipelineBuilder.extendUc3Topology(testPipeline, testSource,
+        testHopSizeInSec, testWindowSizeInSec);
+  }
+
+  /**
+   * Tests if no items reach the end before the first window ends.
+   */
+  @Test
+  public void testOutput() {
+
+    // Assertion Configuration
+    int timeout = 10;
+    String testSensorName = "TEST-SENSOR";
+    Double testValueInW = 10.0;
+    // Used to check hourOfDay
+    long mockTimestamp = 1632741651;
+
+    // Assertion
+    this.uc3Topology.apply(Assertions.assertCollectedEventually(timeout,
+        collection -> {
+
+          // DEBUG
+          System.out.println("DEBUG: CHECK 1 || Entered Assertion of testOutput()");
+
+          // Check all collected Items
+          boolean allOkay = true;
+          if (collection != null) {
+            System.out.println("DEBUG: CHECK 2 || Collection Size: " + collection.size());
+            for (int i = 0; i < collection.size(); i++) {
+
+              // Build hour of day
+              long timestamp = mockTimestamp;
+              int expectedHour = LocalDateTime.ofInstant(Instant.ofEpochMilli(timestamp),
+                  TimeZone.getDefault().toZoneId()).getHour();
+
+              // Compare expected output with generated output
+              Entry<String, String> currentEntry = collection.get(i);
+              String expectedKey = testSensorName + ";" + expectedHour;
+              String expectedValue = testValueInW.toString();
+
+              // DEBUG
+              System.out.println(
+                  "DEBUG: CHECK 3 || Expected Output: '" + expectedKey + "=" + expectedValue
+                      + "' - Actual Output: '" + currentEntry.getKey() + "="
+                      + currentEntry.getValue().toString() + "'");
+
+              if (!(currentEntry.getKey().equals(expectedKey)
+                  && currentEntry.getValue().toString().equals(expectedValue))) {
+                System.out.println("DEBUG: CHECK 5 || Failed assertion!");
+                allOkay = false;
+              }
+            }
+          }
+
+          // Assertion
+          Assert.assertTrue(
+              "Items do not match expected structure!", allOkay);
+        }));
+
+    // Run the test!
+    try {
+      final JobConfig jobConfig = new JobConfig()
+          .registerSerializer(HourOfDayKey.class, HourOfDayKeySerializer.class);
+      this.testInstance.newJob(this.testPipeline, jobConfig).join();
+      Assert.fail("Job should have completed with an AssertionCompletedException, "
+          + "but completed normally!");
+    } catch (final CompletionException e) {
+      final String errorMsg = e.getCause().getMessage();
+      Assert.assertTrue(
+          "Job was expected to complete with AssertionCompletedException, but completed with: "
+              + e.getCause(),
+          errorMsg.contains(AssertionCompletedException.class.getName()));
+    }
+
+  }
+
+  @After
+  public void after() {
+    // Shuts down all running Jet Instances
+    Jet.shutdownAll();
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-flink/build.gradle b/theodolite-benchmarks/uc4-flink/build.gradle
index c095c4126bfae6556e51596a2d53ade601cf321f..a6d20c2ddeaf8cd614d9fb3818ad8f18ba28c2a9 100644
--- a/theodolite-benchmarks/uc4-flink/build.gradle
+++ b/theodolite-benchmarks/uc4-flink/build.gradle
@@ -2,4 +2,4 @@ plugins {
   id 'theodolite.flink'
 }
 
-mainClassName = "rocks.theodolite.benchmarks.uc2.flink.AggregationServiceFlinkJob"
+mainClassName = "rocks.theodolite.benchmarks.uc4.flink.AggregationServiceFlinkJob"
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc4-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..b2a15f439cf1844efe56f1ac0d82a2884e66cb9d
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/.settings/org.eclipse.jdt.ui.prefs
@@ -0,0 +1,286 @@
+cleanup.add_all=false
+cleanup.add_default_serial_version_id=true
+cleanup.add_generated_serial_version_id=false
+cleanup.add_missing_annotations=true
+cleanup.add_missing_deprecated_annotations=true
+cleanup.add_missing_methods=false
+cleanup.add_missing_nls_tags=false
+cleanup.add_missing_override_annotations=true
+cleanup.add_missing_override_annotations_interface_methods=true
+cleanup.add_serial_version_id=false
+cleanup.always_use_blocks=true
+cleanup.always_use_parentheses_in_expressions=false
+cleanup.always_use_this_for_non_static_field_access=true
+cleanup.always_use_this_for_non_static_method_access=true
+cleanup.array_with_curly=false
+cleanup.arrays_fill=false
+cleanup.bitwise_conditional_expression=false
+cleanup.boolean_literal=false
+cleanup.boolean_value_rather_than_comparison=true
+cleanup.break_loop=false
+cleanup.collection_cloning=false
+cleanup.comparing_on_criteria=false
+cleanup.comparison_statement=false
+cleanup.controlflow_merge=false
+cleanup.convert_functional_interfaces=false
+cleanup.convert_to_enhanced_for_loop=true
+cleanup.convert_to_enhanced_for_loop_if_loop_var_used=true
+cleanup.convert_to_switch_expressions=false
+cleanup.correct_indentation=true
+cleanup.do_while_rather_than_while=true
+cleanup.double_negation=false
+cleanup.else_if=false
+cleanup.embedded_if=false
+cleanup.evaluate_nullable=false
+cleanup.extract_increment=false
+cleanup.format_source_code=true
+cleanup.format_source_code_changes_only=false
+cleanup.hash=false
+cleanup.if_condition=false
+cleanup.insert_inferred_type_arguments=false
+cleanup.instanceof=false
+cleanup.instanceof_keyword=false
+cleanup.invert_equals=false
+cleanup.join=false
+cleanup.lazy_logical_operator=false
+cleanup.make_local_variable_final=true
+cleanup.make_parameters_final=true
+cleanup.make_private_fields_final=true
+cleanup.make_type_abstract_if_missing_method=false
+cleanup.make_variable_declarations_final=true
+cleanup.map_cloning=false
+cleanup.merge_conditional_blocks=false
+cleanup.multi_catch=false
+cleanup.never_use_blocks=false
+cleanup.never_use_parentheses_in_expressions=true
+cleanup.no_string_creation=false
+cleanup.no_super=false
+cleanup.number_suffix=false
+cleanup.objects_equals=false
+cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=true
+cleanup.operand_factorization=false
+cleanup.organize_imports=true
+cleanup.overridden_assignment=false
+cleanup.plain_replacement=false
+cleanup.precompile_regex=false
+cleanup.primitive_comparison=false
+cleanup.primitive_parsing=false
+cleanup.primitive_rather_than_wrapper=true
+cleanup.primitive_serialization=false
+cleanup.pull_out_if_from_if_else=false
+cleanup.pull_up_assignment=false
+cleanup.push_down_negation=false
+cleanup.qualify_static_field_accesses_with_declaring_class=false
+cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+cleanup.qualify_static_member_accesses_with_declaring_class=true
+cleanup.qualify_static_method_accesses_with_declaring_class=false
+cleanup.reduce_indentation=false
+cleanup.redundant_comparator=false
+cleanup.redundant_falling_through_block_end=false
+cleanup.remove_private_constructors=true
+cleanup.remove_redundant_modifiers=false
+cleanup.remove_redundant_semicolons=true
+cleanup.remove_redundant_type_arguments=true
+cleanup.remove_trailing_whitespaces=true
+cleanup.remove_trailing_whitespaces_all=true
+cleanup.remove_trailing_whitespaces_ignore_empty=false
+cleanup.remove_unnecessary_array_creation=false
+cleanup.remove_unnecessary_casts=true
+cleanup.remove_unnecessary_nls_tags=true
+cleanup.remove_unused_imports=true
+cleanup.remove_unused_local_variables=false
+cleanup.remove_unused_private_fields=true
+cleanup.remove_unused_private_members=false
+cleanup.remove_unused_private_methods=true
+cleanup.remove_unused_private_types=true
+cleanup.return_expression=false
+cleanup.simplify_lambda_expression_and_method_ref=false
+cleanup.single_used_field=false
+cleanup.sort_members=false
+cleanup.sort_members_all=false
+cleanup.standard_comparison=false
+cleanup.static_inner_class=false
+cleanup.strictly_equal_or_different=false
+cleanup.stringbuffer_to_stringbuilder=false
+cleanup.stringbuilder=false
+cleanup.stringbuilder_for_local_vars=true
+cleanup.stringconcat_to_textblock=false
+cleanup.substring=false
+cleanup.switch=false
+cleanup.system_property=false
+cleanup.system_property_boolean=false
+cleanup.system_property_file_encoding=false
+cleanup.system_property_file_separator=false
+cleanup.system_property_line_separator=false
+cleanup.system_property_path_separator=false
+cleanup.ternary_operator=false
+cleanup.try_with_resource=false
+cleanup.unlooped_while=false
+cleanup.unreachable_block=false
+cleanup.use_anonymous_class_creation=false
+cleanup.use_autoboxing=false
+cleanup.use_blocks=true
+cleanup.use_blocks_only_for_return_and_throw=false
+cleanup.use_directly_map_method=false
+cleanup.use_lambda=true
+cleanup.use_parentheses_in_expressions=true
+cleanup.use_string_is_blank=false
+cleanup.use_this_for_non_static_field_access=true
+cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+cleanup.use_this_for_non_static_method_access=true
+cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+cleanup.use_unboxing=false
+cleanup.use_var=false
+cleanup.useless_continue=false
+cleanup.useless_return=false
+cleanup.valueof_rather_than_instantiation=false
+cleanup_profile=_CAU-SE-Style
+cleanup_settings_version=2
+eclipse.preferences.version=1
+editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
+formatter_profile=_CAU-SE-Style
+formatter_settings_version=21
+org.eclipse.jdt.ui.ignorelowercasenames=true
+org.eclipse.jdt.ui.importorder=
+org.eclipse.jdt.ui.ondemandthreshold=99
+org.eclipse.jdt.ui.staticondemandthreshold=99
+org.eclipse.jdt.ui.text.custom_code_templates=
+sp_cleanup.add_all=false
+sp_cleanup.add_default_serial_version_id=true
+sp_cleanup.add_generated_serial_version_id=false
+sp_cleanup.add_missing_annotations=true
+sp_cleanup.add_missing_deprecated_annotations=true
+sp_cleanup.add_missing_methods=false
+sp_cleanup.add_missing_nls_tags=false
+sp_cleanup.add_missing_override_annotations=true
+sp_cleanup.add_missing_override_annotations_interface_methods=true
+sp_cleanup.add_serial_version_id=false
+sp_cleanup.always_use_blocks=true
+sp_cleanup.always_use_parentheses_in_expressions=false
+sp_cleanup.always_use_this_for_non_static_field_access=true
+sp_cleanup.always_use_this_for_non_static_method_access=true
+sp_cleanup.array_with_curly=false
+sp_cleanup.arrays_fill=false
+sp_cleanup.bitwise_conditional_expression=false
+sp_cleanup.boolean_literal=false
+sp_cleanup.boolean_value_rather_than_comparison=false
+sp_cleanup.break_loop=false
+sp_cleanup.collection_cloning=false
+sp_cleanup.comparing_on_criteria=true
+sp_cleanup.comparison_statement=false
+sp_cleanup.controlflow_merge=false
+sp_cleanup.convert_functional_interfaces=false
+sp_cleanup.convert_to_enhanced_for_loop=true
+sp_cleanup.convert_to_enhanced_for_loop_if_loop_var_used=false
+sp_cleanup.convert_to_switch_expressions=false
+sp_cleanup.correct_indentation=true
+sp_cleanup.do_while_rather_than_while=false
+sp_cleanup.double_negation=false
+sp_cleanup.else_if=false
+sp_cleanup.embedded_if=false
+sp_cleanup.evaluate_nullable=false
+sp_cleanup.extract_increment=false
+sp_cleanup.format_source_code=true
+sp_cleanup.format_source_code_changes_only=false
+sp_cleanup.hash=false
+sp_cleanup.if_condition=false
+sp_cleanup.insert_inferred_type_arguments=false
+sp_cleanup.instanceof=false
+sp_cleanup.instanceof_keyword=false
+sp_cleanup.invert_equals=false
+sp_cleanup.join=false
+sp_cleanup.lazy_logical_operator=false
+sp_cleanup.make_local_variable_final=true
+sp_cleanup.make_parameters_final=true
+sp_cleanup.make_private_fields_final=true
+sp_cleanup.make_type_abstract_if_missing_method=false
+sp_cleanup.make_variable_declarations_final=true
+sp_cleanup.map_cloning=false
+sp_cleanup.merge_conditional_blocks=false
+sp_cleanup.multi_catch=false
+sp_cleanup.never_use_blocks=false
+sp_cleanup.never_use_parentheses_in_expressions=true
+sp_cleanup.no_string_creation=false
+sp_cleanup.no_super=false
+sp_cleanup.number_suffix=false
+sp_cleanup.objects_equals=false
+sp_cleanup.on_save_use_additional_actions=true
+sp_cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=false
+sp_cleanup.operand_factorization=false
+sp_cleanup.organize_imports=true
+sp_cleanup.overridden_assignment=false
+sp_cleanup.plain_replacement=false
+sp_cleanup.precompile_regex=false
+sp_cleanup.primitive_comparison=false
+sp_cleanup.primitive_parsing=false
+sp_cleanup.primitive_rather_than_wrapper=false
+sp_cleanup.primitive_serialization=false
+sp_cleanup.pull_out_if_from_if_else=false
+sp_cleanup.pull_up_assignment=false
+sp_cleanup.push_down_negation=false
+sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
+sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_with_declaring_class=true
+sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
+sp_cleanup.reduce_indentation=false
+sp_cleanup.redundant_comparator=false
+sp_cleanup.redundant_falling_through_block_end=false
+sp_cleanup.remove_private_constructors=true
+sp_cleanup.remove_redundant_modifiers=false
+sp_cleanup.remove_redundant_semicolons=false
+sp_cleanup.remove_redundant_type_arguments=false
+sp_cleanup.remove_trailing_whitespaces=true
+sp_cleanup.remove_trailing_whitespaces_all=true
+sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
+sp_cleanup.remove_unnecessary_array_creation=false
+sp_cleanup.remove_unnecessary_casts=true
+sp_cleanup.remove_unnecessary_nls_tags=true
+sp_cleanup.remove_unused_imports=true
+sp_cleanup.remove_unused_local_variables=false
+sp_cleanup.remove_unused_private_fields=true
+sp_cleanup.remove_unused_private_members=false
+sp_cleanup.remove_unused_private_methods=true
+sp_cleanup.remove_unused_private_types=true
+sp_cleanup.return_expression=false
+sp_cleanup.simplify_lambda_expression_and_method_ref=false
+sp_cleanup.single_used_field=false
+sp_cleanup.sort_members=false
+sp_cleanup.sort_members_all=false
+sp_cleanup.standard_comparison=false
+sp_cleanup.static_inner_class=false
+sp_cleanup.strictly_equal_or_different=false
+sp_cleanup.stringbuffer_to_stringbuilder=false
+sp_cleanup.stringbuilder=false
+sp_cleanup.stringbuilder_for_local_vars=false
+sp_cleanup.stringconcat_to_textblock=false
+sp_cleanup.substring=false
+sp_cleanup.switch=false
+sp_cleanup.system_property=false
+sp_cleanup.system_property_boolean=false
+sp_cleanup.system_property_file_encoding=false
+sp_cleanup.system_property_file_separator=false
+sp_cleanup.system_property_line_separator=false
+sp_cleanup.system_property_path_separator=false
+sp_cleanup.ternary_operator=false
+sp_cleanup.try_with_resource=false
+sp_cleanup.unlooped_while=false
+sp_cleanup.unreachable_block=false
+sp_cleanup.use_anonymous_class_creation=false
+sp_cleanup.use_autoboxing=false
+sp_cleanup.use_blocks=true
+sp_cleanup.use_blocks_only_for_return_and_throw=false
+sp_cleanup.use_directly_map_method=false
+sp_cleanup.use_lambda=true
+sp_cleanup.use_parentheses_in_expressions=true
+sp_cleanup.use_string_is_blank=false
+sp_cleanup.use_this_for_non_static_field_access=true
+sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+sp_cleanup.use_this_for_non_static_method_access=true
+sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=false
+sp_cleanup.use_unboxing=false
+sp_cleanup.use_var=false
+sp_cleanup.useless_continue=true
+sp_cleanup.useless_return=true
+sp_cleanup.valueof_rather_than_instantiation=false
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/uc4-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..4fa4266c755f4ff8da465ab7341cd70ffb24ecf7
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/.settings/qa.eclipse.plugin.checkstyle.prefs
@@ -0,0 +1,4 @@
+configFilePath=../config/checkstyle.xml
+customModulesJarPaths=
+eclipse.preferences.version=1
+enabled=false
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/uc4-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs
new file mode 100644
index 0000000000000000000000000000000000000000..40bfd0ecdbbe324bb54e4b9f9f32ba95cf5b0c2a
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/.settings/qa.eclipse.plugin.pmd.prefs
@@ -0,0 +1,4 @@
+customRulesJars=
+eclipse.preferences.version=1
+enabled=false
+ruleSetFilePath=../config/pmd.xml
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/Dockerfile b/theodolite-benchmarks/uc4-hazelcastjet/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..a09c59d007a4de426a5046221662cdf1e912ee56
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/Dockerfile
@@ -0,0 +1,7 @@
+FROM openjdk:11-slim
+
+ADD build/distributions/uc4-hazelcastjet.tar /
+
+
+CMD  JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \
+     /uc4-hazelcastjet/bin/uc4-hazelcastjet
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/build.gradle b/theodolite-benchmarks/uc4-hazelcastjet/build.gradle
new file mode 100644
index 0000000000000000000000000000000000000000..01daa0b88ffea88ed52e1ca6afa682150ade1b50
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/build.gradle
@@ -0,0 +1,5 @@
+plugins {
+  id 'theodolite.hazelcastjet'
+}
+
+mainClassName = "rocks.theodolite.benchmarks.uc4.hazelcastjet.HistoryService"
\ No newline at end of file
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/HistoryService.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/HistoryService.java
new file mode 100644
index 0000000000000000000000000000000000000000..419c25fec3eeffbd9eabef4897c44b7c6e773cee
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/HistoryService.java
@@ -0,0 +1,74 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A microservice that manages the history and, therefore, stores and aggregates incoming
+ * measurements.
+ */
+public class HistoryService {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(HistoryService.class);
+
+  // Hazelcast settings (default)
+  private static final String HZ_KUBERNETES_SERVICE_DNS_KEY = "service-dns";
+  private static final String BOOTSTRAP_SERVER_DEFAULT = "localhost:5701";
+
+  // Kafka settings (default)
+  private static final String KAFKA_BOOTSTRAP_DEFAULT = "localhost:9092";
+  private static final String SCHEMA_REGISTRY_URL_DEFAULT = "http://localhost:8081";
+  private static final String KAFKA_INPUT_TOPIC_DEFAULT = "input";
+  private static final String KAFKA_CONFIG_TOPIC_DEFAULT = "configuration";
+  private static final String KAFKA_FEEDBACK_TOPIC_DEFAULT = "aggregation-feedback";
+  private static final String KAFKA_OUTPUT_TOPIC_DEFAULT = "output";
+
+  // UC4 specific (default)
+  private static final String WINDOW_SIZE_DEFAULT_MS = "5000";
+
+  // Job name (default)
+  private static final String JOB_NAME = "uc4-hazelcastjet";
+
+  /**
+   * Entrypoint for UC4 using Gradle Run.
+   */
+  public static void main(final String[] args) {
+    final HistoryService uc4HistoryService = new HistoryService();
+    try {
+      uc4HistoryService.run();
+    } catch (final Exception e) { // NOPMD
+      LOGGER.error("ABORT MISSION!: {}", e);
+    }
+  }
+
+  /**
+   * Start a UC4 service.
+   *
+   * @throws Exception This Exception occurs if the Uc4HazelcastJetFactory is used in the wrong way.
+   *         Detailed data is provided once an Exception occurs.
+   */
+  public void run() throws Exception { // NOPMD
+    this.createHazelcastJetApplication();
+  }
+
+  /**
+   * Creates a Hazelcast Jet Application for UC4 using the Uc1HazelcastJetFactory.
+   *
+   * @throws Exception This Exception occurs if the Uc4HazelcastJetFactory is used in the wrong way.
+   *         Detailed data is provided once an Exception occurs.
+   */
+  private void createHazelcastJetApplication() throws Exception { // NOPMD
+    new Uc4HazelcastJetFactory()
+        .setReadPropertiesFromEnv(KAFKA_BOOTSTRAP_DEFAULT, SCHEMA_REGISTRY_URL_DEFAULT,JOB_NAME)
+        .setWritePropertiesFromEnv(KAFKA_BOOTSTRAP_DEFAULT, SCHEMA_REGISTRY_URL_DEFAULT)
+        .setKafkaInputTopicFromEnv(KAFKA_INPUT_TOPIC_DEFAULT)
+        .setKafkaOutputTopicFromEnv(KAFKA_OUTPUT_TOPIC_DEFAULT)
+        .setKafkaConfigurationTopicFromEnv(KAFKA_CONFIG_TOPIC_DEFAULT)
+        .setKafkaFeedbackTopicFromEnv(KAFKA_FEEDBACK_TOPIC_DEFAULT)
+        .setWindowSizeFromEnv(WINDOW_SIZE_DEFAULT_MS)
+        .buildUc4JetInstanceFromEnv(LOGGER, BOOTSTRAP_SERVER_DEFAULT, HZ_KUBERNETES_SERVICE_DNS_KEY)
+        .buildUc4Pipeline()
+        .runUc4Job(JOB_NAME);
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4HazelcastJetFactory.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4HazelcastJetFactory.java
new file mode 100644
index 0000000000000000000000000000000000000000..9b6aa71267150296d8b65268b1922925b7ada796
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4HazelcastJetFactory.java
@@ -0,0 +1,389 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet;
+
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.config.JobConfig;
+import com.hazelcast.jet.pipeline.Pipeline;
+import io.confluent.kafka.serializers.KafkaAvroDeserializer;
+import io.confluent.kafka.serializers.KafkaAvroSerializer;
+import java.util.Objects;
+import java.util.Properties;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.apache.kafka.common.serialization.StringSerializer;
+import org.slf4j.Logger;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.ConfigurationKeys;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.JetInstanceBuilder;
+import rocks.theodolite.benchmarks.commons.hazelcastjet.KafkaPropertiesBuilder;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.EventDeserializer;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.ImmutableSensorRegistryUc4Serializer;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.SensorGroupKey;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.SensorGroupKeySerializer;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.ValueGroup;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.ValueGroupSerializer;
+import titan.ccp.model.sensorregistry.ImmutableSensorRegistry;
+
+/**
+ * A Hazelcast Jet factory which can build a Hazelcast Jet Instance and Pipeline for the UC4
+ * benchmark and lets you start the Hazelcast Jet job. The JetInstance can be built directly as the
+ * Hazelcast Config is managed internally. In order to build the Pipeline, you first have to build
+ * the Read and Write Properties and set the input, output, and configuration topic. This can be
+ * done using internal functions of this factory. Outside data only refers to custom values or
+ * default values in case data of the environment cannot the fetched.
+ */
+public class Uc4HazelcastJetFactory {
+
+  // Information per History Service
+  private Properties kafkaInputReadPropsForPipeline;
+  private Properties kafkaConfigPropsForPipeline;
+  private Properties kafkaFeedbackPropsForPipeline;
+  private Properties kafkaWritePropsForPipeline;
+  private String kafkaInputTopic;
+  private String kafkaOutputTopic;
+  private JetInstance uc4JetInstance;
+  private Pipeline uc4JetPipeline;
+  // UC4 specific
+  private String kafkaConfigurationTopic;
+  private String kafkaFeedbackTopic;
+  private int windowSize;
+
+  /////////////////////////////////////
+  // Layer 1 - Hazelcast Jet Run Job //
+  /////////////////////////////////////
+
+  /**
+   * Needs a JetInstance and Pipeline defined in this factors. Adds the pipeline to the existing
+   * JetInstance as a job.
+   *
+   * @param jobName The name of the job.
+   * @throws Exception If either no JetInstance or Pipeline is set, a job cannot be startet.
+   */
+  public void runUc4Job(final String jobName) throws IllegalStateException { // NOPMD
+
+    // Check if a Jet Instance for UC4 is set.
+    if (this.uc4JetInstance == null) {
+      throw new IllegalStateException("Jet Instance is not set! "
+          + "Cannot start a hazelcast jet job for UC4.");
+    }
+
+    // Check if a Pipeline for UC3 is set.
+    if (this.uc4JetPipeline == null) {
+      throw new IllegalStateException(
+          "Hazelcast Pipeline is not set! Cannot start a hazelcast jet job for UC4.");
+    }
+
+    // Adds the job name and joins a job to the JetInstance defined in this factory
+    final JobConfig jobConfig = new JobConfig()
+        .registerSerializer(ValueGroup.class, ValueGroupSerializer.class)
+        .registerSerializer(SensorGroupKey.class, SensorGroupKeySerializer.class)
+        .registerSerializer(ImmutableSensorRegistry.class,
+            ImmutableSensorRegistryUc4Serializer.class)
+        .setName(jobName);
+    this.uc4JetInstance.newJobIfAbsent(this.uc4JetPipeline, jobConfig).join();
+  }
+
+  /////////////
+  // Layer 2 //
+  /////////////
+
+  /**
+   * Build a Hazelcast JetInstance used to run a job on.
+   *
+   * @param logger The logger specified for this JetInstance.
+   * @param bootstrapServerDefault Default bootstrap server in case no value can be derived from the
+   *        environment.
+   * @param hzKubernetesServiceDnsKey The kubernetes service dns key.
+   * @return A Uc4HazelcastJetFactory containing a set JetInstance.
+   */
+  public Uc4HazelcastJetFactory buildUc4JetInstanceFromEnv(final Logger logger,
+      final String bootstrapServerDefault,
+      final String hzKubernetesServiceDnsKey) {
+    this.uc4JetInstance = new JetInstanceBuilder()
+        .setConfigFromEnv(logger, bootstrapServerDefault, hzKubernetesServiceDnsKey)
+        .build();
+    return this;
+  }
+
+  /**
+   * Builds a Hazelcast Jet pipeline used for a JetInstance to run it as a job on. Needs the input
+   * topic and kafka properties defined in this factory beforehand.
+   *
+   * @return A Uc4HazelcastJetFactory containg a set pipeline.
+   * @throws Exception If the input topic or the kafka properties are not defined, the pipeline
+   *         cannot be built.
+   */
+  public Uc4HazelcastJetFactory buildUc4Pipeline() throws IllegalStateException { // NOPMD
+
+    final String defaultPipelineWarning = "Cannot build pipeline."; // NOPMD
+
+    // Check if Properties for the Kafka Input are set.
+    if (this.kafkaInputReadPropsForPipeline == null) {
+      throw new IllegalStateException("Kafka Input Read Properties for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if Properties for the Kafka Output are set.
+    if (this.kafkaWritePropsForPipeline == null) {
+      throw new IllegalStateException("Kafka Write Properties for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if Properties for the Kafka Config Read are set.
+    if (this.kafkaConfigPropsForPipeline == null) {
+      throw new IllegalStateException("Kafka Config Read Properties for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if Properties for the Kafka Feedback Read are set.
+    if (this.kafkaFeedbackPropsForPipeline == null) {
+      throw new IllegalStateException("Kafka Feedback Read Properties for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if the Kafka input topic is set.
+    if (this.kafkaInputTopic == null) {
+      throw new IllegalStateException("Kafka input topic for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if the Kafka output topic is set.
+    if (this.kafkaOutputTopic == null) {
+      throw new IllegalStateException("kafka output topic for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if the Kafka config topic is set.
+    if (this.kafkaConfigurationTopic == null) {
+      throw new IllegalStateException("configuratin topic for pipeline not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if the Kafka feedback topic is set.
+    if (this.kafkaFeedbackTopic == null) {
+      throw new IllegalStateException("Feedback topic not set! "
+          + defaultPipelineWarning);
+    }
+
+    // Check if window size for tumbling window is set.
+    if (this.windowSize <= 0) {
+      throw new IllegalStateException("window size for pipeline not set or not greater than 0! "
+          + defaultPipelineWarning);
+    }
+
+    // Build Pipeline Using the pipelineBuilder
+    final Uc4PipelineBuilder pipeBuilder = new Uc4PipelineBuilder();
+    this.uc4JetPipeline =
+        pipeBuilder.build(this.kafkaInputReadPropsForPipeline,
+            this.kafkaConfigPropsForPipeline,
+            this.kafkaFeedbackPropsForPipeline,
+            this.kafkaWritePropsForPipeline,
+            this.kafkaInputTopic, this.kafkaOutputTopic,
+            this.kafkaConfigurationTopic,
+            this.kafkaFeedbackTopic,
+            this.windowSize);
+    // Return Uc4HazelcastJetBuilder factory
+    return this;
+  }
+
+  /////////////
+  // Layer 3 //
+  /////////////
+
+  /**
+   * Sets kafka read properties for pipeline used in this builder using environment variables.
+   *
+   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
+   *        can be fetched from the environment.
+   * @param schemaRegistryUrlDefault Default schema registry url in the case that no schema registry
+   *        url can be fetched from the environment.
+   * @return The Uc4HazelcastJetBuilder factory with set kafkaReadPropertiesForPipeline.
+   */
+  public Uc4HazelcastJetFactory setReadPropertiesFromEnv(// NOPMD
+                                                         final String bootstrapServersDefault,
+                                                         final String schemaRegistryUrlDefault,
+                                                         final String jobName) {
+    // Use KafkaPropertiesBuilder to build a properties object used for kafka
+    final KafkaPropertiesBuilder propsBuilder = new KafkaPropertiesBuilder();
+
+    final Properties kafkaInputReadProps =
+        propsBuilder.buildKafkaInputReadPropsFromEnv(bootstrapServersDefault,
+            schemaRegistryUrlDefault, jobName,
+            StringDeserializer.class.getCanonicalName(),
+            KafkaAvroDeserializer.class.getCanonicalName());
+
+    final Properties kafkaConfigReadProps =
+        propsBuilder.buildKafkaInputReadPropsFromEnv(bootstrapServersDefault,
+            schemaRegistryUrlDefault,
+            jobName,
+            EventDeserializer.class.getCanonicalName(),
+            StringDeserializer.class.getCanonicalName());
+
+    final Properties kafkaAggregationReadProps =
+        propsBuilder.buildKafkaInputReadPropsFromEnv(bootstrapServersDefault,
+            schemaRegistryUrlDefault,
+            jobName,
+            StringDeserializer.class.getCanonicalName(),
+            KafkaAvroDeserializer.class.getCanonicalName());
+
+    this.kafkaInputReadPropsForPipeline = kafkaInputReadProps;
+    this.kafkaConfigPropsForPipeline = kafkaConfigReadProps;
+    this.kafkaFeedbackPropsForPipeline = kafkaAggregationReadProps;
+    return this;
+  }
+
+  /**
+   * Sets kafka write properties for pipeline used in this builder using environment variables.
+   *
+   * @param bootstrapServersDefault Default Bootstrap server in the case that no bootstrap server
+   *        can be fetched from the environment.
+   * @return The Uc4HazelcastJetBuilder factory with set kafkaWritePropertiesForPipeline.
+   */
+  public Uc4HazelcastJetFactory setWritePropertiesFromEnv(// NOPMD
+      final String bootstrapServersDefault, final String schemaRegistryUrlDefault) {
+    // Use KafkaPropertiesBuilder to build a properties object used for kafka
+    final KafkaPropertiesBuilder propsBuilder = new KafkaPropertiesBuilder();
+    final Properties kafkaWriteProps =
+        propsBuilder.buildKafkaWritePropsFromEnv(bootstrapServersDefault,
+            schemaRegistryUrlDefault,
+            StringSerializer.class.getCanonicalName(),
+            KafkaAvroSerializer.class.getCanonicalName());
+    this.kafkaWritePropsForPipeline = kafkaWriteProps;
+    return this;
+  }
+
+  /**
+   * Sets the kafka input topic for the pipeline used in this builder.
+   *
+   * @param inputTopic The kafka topic used as the pipeline input.
+   * @return A Uc4HazelcastJetBuilder factory with a set kafkaInputTopic.
+   */
+  public Uc4HazelcastJetFactory setCustomKafkaInputTopic(// NOPMD
+      final String inputTopic) {
+    this.kafkaInputTopic = inputTopic;
+    return this;
+  }
+
+  /**
+   * Sets the kafka input output for the pipeline used in this builder.
+   *
+   * @param outputTopic The kafka topic used as the pipeline output.
+   * @return A Uc4HazelcastJetBuilder factory with a set kafkaOutputTopic.
+   */
+  public Uc4HazelcastJetFactory setCustomKafkaOutputTopic(final String outputTopic) { // NOPMD
+    this.kafkaOutputTopic = outputTopic;
+    return this;
+  }
+
+
+  /**
+   * Sets the kafka input topic for the pipeline used in this builder using environment variables.
+   *
+   * @param defaultInputTopic The default kafka input topic used if no topic is specified by the
+   *        environment.
+   * @return A Uc4HazelcastJetBuilder factory with a set kafkaInputTopic.
+   */
+  public Uc4HazelcastJetFactory setKafkaInputTopicFromEnv(// NOPMD
+      final String defaultInputTopic) {
+    this.kafkaInputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_INPUT_TOPIC),
+        defaultInputTopic);
+    return this;
+  }
+
+  /**
+   * Sets the kafka output topic for the pipeline used in this builder using environment variables.
+   *
+   * @param defaultOutputTopic The default kafka output topic used if no topic is specified by the
+   *        environment.
+   * @return A Uc4HazelcastJetBuilder factory with a set kafkaOutputTopic.
+   */
+  public Uc4HazelcastJetFactory setKafkaOutputTopicFromEnv(// NOPMD
+      final String defaultOutputTopic) {
+    this.kafkaOutputTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_OUTPUT_TOPIC),
+        defaultOutputTopic);
+    return this;
+  }
+
+  /**
+   * Sets the window size for the pipeline used in this builder.
+   *
+   * @param windowSize the window size to be used for this pipeline.
+   * @return A Uc4HazelcastJetFactory with a set windowSize.
+   */
+  public Uc4HazelcastJetFactory setCustomWindowSize(// NOPMD
+      final int windowSize) {
+    this.windowSize = windowSize;
+    return this;
+  }
+
+  /**
+   * Sets the window size for the pipeline used in this builder from the environment.
+   *
+   * @param defaultWindowSize the default window size to be used for this pipeline when none is set
+   *        in the environment.
+   * @return A Uc4HazelcastJetFactory with a set windowSize.
+   */
+  public Uc4HazelcastJetFactory setWindowSizeFromEnv(// NOPMD
+      final String defaultWindowSize) {
+    final String windowSize = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.WINDOW_SIZE_UC4),
+        defaultWindowSize);
+    final int windowSizeNumber = Integer.parseInt(windowSize);
+    this.windowSize = windowSizeNumber;
+    return this;
+  }
+
+  /**
+   * Sets the configuration topic for the pipeline used in this builder.
+   *
+   * @param kafkaConfigurationTopic the configuration topic to be used for this pipeline.
+   * @return A Uc4HazelcastJetFactory with a set configuration topic.
+   */
+  public Uc4HazelcastJetFactory setCustomKafkaConfigurationTopic(// NOPMD
+      final String kafkaConfigurationTopic) {
+    this.kafkaConfigurationTopic = kafkaConfigurationTopic;
+    return this;
+  }
+
+  /**
+   * Sets the configuration topic for the pipeline used in this builder from the environment.
+   *
+   * @param defaultKafkaConfigurationTopic the default configuration topic to be used for this
+   *        pipeline when none is set in the environment.
+   * @return A Uc4HazelcastJetFactory with a set kafkaConfigurationTopic.
+   */
+  public Uc4HazelcastJetFactory setKafkaConfigurationTopicFromEnv(// NOPMD
+      final String defaultKafkaConfigurationTopic) {
+    this.kafkaConfigurationTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_CONFIGURATION_TOPIC),
+        defaultKafkaConfigurationTopic);
+    return this;
+  }
+
+  /**
+   * Sets the Feedback topic for the pipeline used in this builder.
+   *
+   * @param kafkaFeedbackTopic the Feedback topic to be used for this pipeline.
+   * @return A Uc4HazelcastJetFactory with a set Feedback topic.
+   */
+  public Uc4HazelcastJetFactory setCustomKafkaFeedbackTopic(// NOPMD
+      final String kafkaFeedbackTopic) {
+    this.kafkaFeedbackTopic = kafkaFeedbackTopic;
+    return this;
+  }
+
+  /**
+   * Sets the Feedback topic for the pipeline used in this builder from the environment.
+   *
+   * @param defaultKafkaFeedbackTopic the default Feedback topic to be used for this pipeline when
+   *        none is set in the environment.
+   * @return A Uc4HazelcastJetFactory with a set kafkaFeedbackTopic.
+   */
+  public Uc4HazelcastJetFactory setKafkaFeedbackTopicFromEnv(// NOPMD
+      final String defaultKafkaFeedbackTopic) {
+    this.kafkaFeedbackTopic = Objects.requireNonNullElse(
+        System.getenv(ConfigurationKeys.KAFKA_FEEDBACK_TOPIC),
+        defaultKafkaFeedbackTopic);
+    return this;
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4PipelineBuilder.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4PipelineBuilder.java
new file mode 100644
index 0000000000000000000000000000000000000000..2efb8250c0e1136b34412e4553b2d216c5e24b43
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4PipelineBuilder.java
@@ -0,0 +1,309 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet;
+
+import com.hazelcast.function.BiFunctionEx;
+import com.hazelcast.jet.Traverser;
+import com.hazelcast.jet.Traversers;
+import com.hazelcast.jet.Util;
+import com.hazelcast.jet.aggregate.AggregateOperation;
+import com.hazelcast.jet.aggregate.AggregateOperation1;
+import com.hazelcast.jet.kafka.KafkaSinks;
+import com.hazelcast.jet.kafka.KafkaSources;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.Sinks;
+import com.hazelcast.jet.pipeline.StageWithWindow;
+import com.hazelcast.jet.pipeline.StreamSource;
+import com.hazelcast.jet.pipeline.StreamStage;
+import com.hazelcast.jet.pipeline.StreamStageWithKey;
+import com.hazelcast.jet.pipeline.WindowDefinition;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Properties;
+import java.util.Set;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.AggregatedActivePowerRecordAccumulator;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.ChildParentsTransformer;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.SensorGroupKey;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.ValueGroup;
+import titan.ccp.configuration.events.Event;
+import titan.ccp.model.records.ActivePowerRecord;
+import titan.ccp.model.records.AggregatedActivePowerRecord;
+import titan.ccp.model.sensorregistry.SensorRegistry;
+
+/**
+ * Builder to build a HazelcastJet Pipeline for UC4 which can be used for stream processing using
+ * Hazelcast Jet.
+ */
+@SuppressWarnings("PMD.ExcessiveImports")
+public class Uc4PipelineBuilder {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(Uc4PipelineBuilder.class);
+  private static final String SENSOR_PARENT_MAP_NAME = "SensorParentMap";
+
+  /**
+   * Builds a pipeline which can be used for stream processing using Hazelcast Jet.
+   *
+   * @param kafkaInputReadPropsForPipeline Properties Object containing the necessary kafka input
+   *        read attributes.
+   * @param kafkaConfigPropsForPipeline Properties Object containing the necessary kafka config read
+   *        attributes.
+   * @param kafkaFeedbackPropsForPipeline Properties Object containing the necessary kafka
+   *        aggregation read attributes.
+   * @param kafkaWritePropsForPipeline Properties Object containing the necessary kafka write
+   *        attributes.
+   * @param kafkaInputTopic The name of the input topic used for the pipeline.
+   * @param kafkaOutputTopic The name of the output topic used for the pipeline.
+   * @param kafkaConfigurationTopic The name of the configuration topic used for the pipeline.
+   * @param kafkaFeedbackTopic The name of the feedback topic used for the pipeline.
+   * @param windowSize The window size of the tumbling window used in this pipeline.
+   * @return returns a Pipeline used which can be used in a Hazelcast Jet Instance to process data
+   *         for UC3.
+   */
+  public Pipeline build(final Properties kafkaInputReadPropsForPipeline, // NOPMD
+      final Properties kafkaConfigPropsForPipeline,
+      final Properties kafkaFeedbackPropsForPipeline,
+      final Properties kafkaWritePropsForPipeline,
+      final String kafkaInputTopic,
+      final String kafkaOutputTopic,
+      final String kafkaConfigurationTopic,
+      final String kafkaFeedbackTopic,
+      final int windowSize) {
+
+    if (LOGGER.isInfoEnabled()) {
+      LOGGER.info("kafkaConfigProps: " + kafkaConfigPropsForPipeline);
+      LOGGER.info("kafkaFeedbackProps: " + kafkaFeedbackPropsForPipeline);
+      LOGGER.info("kafkaWriteProps: " + kafkaWritePropsForPipeline);
+    }
+
+    // The pipeline for this Use Case
+    final Pipeline uc4Pipeline = Pipeline.create();
+
+    // Sources for this use case
+    final StreamSource<Entry<Event, String>> configSource =
+        KafkaSources.kafka(kafkaConfigPropsForPipeline, kafkaConfigurationTopic);
+
+    final StreamSource<Entry<String, ActivePowerRecord>> inputSource =
+        KafkaSources.kafka(kafkaInputReadPropsForPipeline, kafkaInputTopic);
+
+    final StreamSource<Entry<String, AggregatedActivePowerRecord>> aggregationSource =
+        KafkaSources.kafka(kafkaFeedbackPropsForPipeline, kafkaFeedbackTopic);
+
+    // Extend UC4 topology to pipeline
+    final StreamStage<Entry<String, AggregatedActivePowerRecord>> uc4Aggregation =
+        this.extendUc4Topology(uc4Pipeline, inputSource, aggregationSource, configSource,
+            windowSize);
+
+    // Add Sink2: Write back to kafka feedback/aggregation topic
+    uc4Aggregation.writeTo(KafkaSinks.kafka(
+        kafkaWritePropsForPipeline, kafkaFeedbackTopic));
+
+    // Log aggregation product
+    uc4Aggregation.writeTo(Sinks.logger());
+
+    // Add Sink2: Write back to kafka output topic
+    uc4Aggregation.writeTo(KafkaSinks.kafka(
+        kafkaWritePropsForPipeline, kafkaOutputTopic));
+
+    // Return the pipeline
+    return uc4Pipeline;
+  }
+
+  /**
+   * Extends to a blank Hazelcast Jet Pipeline the UC4 topology defines by theodolite.
+   *
+   * <p>
+   * UC4 takes {@code ActivePowerRecord} events from sensors and a {@code SensorRegistry} with maps
+   * from keys to groups to map values to their according groups. A feedback stream allows for
+   * group keys to be mapped to values and eventually to be mapped to other top level groups defines
+   * by the {@code SensorRegistry}.
+   * </p>
+   *
+   * <p>
+   * 6 Step topology: <br>
+   * (1) Inputs (Config, Values, Aggregations) <br>
+   * (2) Merge Input Values and Aggregations <br>
+   * (3) Join Configuration with Merged Input Stream <br>
+   * (4) Duplicate as flatmap per value and group <br>
+   * (5) Window (preparation for possible last values) <br>
+   * (6) Aggregate data over the window
+   * </p>
+   *
+   * @param pipe The blank pipeline to extend the logic to.
+   * @param inputSource A streaming source with {@code ActivePowerRecord} data.
+   * @param aggregationSource A streaming source with aggregated data.
+   * @param configurationSource A streaming source delivering a {@code SensorRegistry}.
+   * @param windowSize The window size used to aggregate over.
+   * @return A {@code StreamSource<String,Double>} with sensorKeys or groupKeys mapped to their
+   *         according aggregated values. The data can be further modified or directly be linked to
+   *         a Hazelcast Jet sink.
+   */
+  public StreamStage<Entry<String, AggregatedActivePowerRecord>> extendUc4Topology(// NOPMD
+      final Pipeline pipe,
+      final StreamSource<Entry<String, ActivePowerRecord>> inputSource,
+      final StreamSource<Entry<String, AggregatedActivePowerRecord>> aggregationSource,
+      final StreamSource<Entry<Event, String>> configurationSource, final int windowSize) {
+
+    //////////////////////////////////
+    // (1) Configuration Stream
+    pipe.readFrom(configurationSource)
+        .withNativeTimestamps(0)
+        .filter(entry -> entry.getKey() == Event.SENSOR_REGISTRY_CHANGED
+            || entry.getKey() == Event.SENSOR_REGISTRY_STATUS)
+        .map(data -> Util.entry(data.getKey(), SensorRegistry.fromJson(data.getValue())))
+        .flatMapStateful(HashMap::new, new ConfigFlatMap())
+        .writeTo(Sinks.mapWithUpdating(
+            SENSOR_PARENT_MAP_NAME, // The addressed IMAP
+            Entry::getKey, // The key to look for
+            (oldValue, newEntry) -> newEntry.getValue()));
+
+    //////////////////////////////////
+    // (1) Sensor Input Stream
+    final StreamStage<Entry<String, ActivePowerRecord>> inputStream = pipe
+        .readFrom(inputSource)
+        .withNativeTimestamps(0);
+
+    //////////////////////////////////
+    // (1) Aggregation Stream
+    final StreamStage<Entry<String, ActivePowerRecord>> aggregations = pipe
+        .readFrom(aggregationSource)
+        .withNativeTimestamps(0)
+        .map(entry -> { // Map Aggregated to ActivePowerRecord
+          final AggregatedActivePowerRecord agg = entry.getValue();
+          final ActivePowerRecord record = new ActivePowerRecord(
+              agg.getIdentifier(), agg.getTimestamp(), agg.getSumInW());
+          return Util.entry(entry.getKey(), record);
+        });
+
+    //////////////////////////////////
+    // (2) UC4 Merge Input with aggregation stream
+    final StreamStageWithKey<Entry<String, ActivePowerRecord>, String>
+        mergedInputAndAggregations = inputStream
+        .merge(aggregations)
+        .groupingKey(Entry::getKey);
+
+    //////////////////////////////////
+    // (3) UC4 Join Configuration and Merges Input/Aggregation Stream
+    // [sensorKey , (value,Set<Groups>)]
+    final StreamStage<Entry<String, ValueGroup>> joinedStage = mergedInputAndAggregations
+        .<Set<String>, Entry<String, ValueGroup>>mapUsingIMap(
+            SENSOR_PARENT_MAP_NAME,
+            (sensorEvent, sensorParentsSet) -> {
+              // Check whether a groupset exists for a key or not
+              if (sensorParentsSet == null) {
+                // No group set exists for this key: return valuegroup with default null group set.
+                final Set<String> nullSet = new HashSet<>();
+                nullSet.add("NULL-GROUPSET");
+                return Util.entry(sensorEvent.getKey(),
+                    new ValueGroup(sensorEvent.getValue(), nullSet));
+              } else {
+                // Group set exists for this key: return valuegroup with the groupset.
+                final ValueGroup valueParentsPair =
+                    new ValueGroup(sensorEvent.getValue(), sensorParentsSet);
+                // Return solution
+                return Util.entry(sensorEvent.getKey(), valueParentsPair);
+              }
+            });
+
+    //////////////////////////////////
+    // (4) UC4 Duplicate as flatmap joined Stream
+    // [(sensorKey, Group) , value]
+    final StreamStage<Entry<SensorGroupKey, ActivePowerRecord>> dupliAsFlatmappedStage = joinedStage
+        .flatMap(entry -> {
+
+          // Supplied data
+          final String keyGroupId = entry.getKey();
+          final ActivePowerRecord record = entry.getValue().getRecord();
+          final Set<String> groups = entry.getValue().getGroups();
+
+          // Transformed Data
+          final String[] groupList = groups.toArray(String[]::new);
+          final SensorGroupKey[] newKeyList = new SensorGroupKey[groupList.length];
+          final List<Entry<SensorGroupKey, ActivePowerRecord>> newEntryList = new ArrayList<>();
+          for (int i = 0; i < groupList.length; i++) {
+            newKeyList[i] = new SensorGroupKey(keyGroupId, groupList[i]);
+            newEntryList.add(Util.entry(newKeyList[i], record));
+          }
+
+          // Return traversable list of new entry elements
+          return Traversers.traverseIterable(newEntryList);
+        });
+
+    //////////////////////////////////
+    // (5) UC4 Last Value Map
+    // Table with tumbling window differentiation [ (sensorKey,Group) , value ],Time
+    final StageWithWindow<Entry<SensorGroupKey, ActivePowerRecord>>
+        windowedLastValues = dupliAsFlatmappedStage
+        .window(WindowDefinition.tumbling(windowSize));
+
+    final AggregateOperation1<Entry<SensorGroupKey, ActivePowerRecord>,
+        AggregatedActivePowerRecordAccumulator, AggregatedActivePowerRecord> aggrOp =
+        AggregateOperation
+        .withCreate(AggregatedActivePowerRecordAccumulator::new)
+        .<Entry<SensorGroupKey, ActivePowerRecord>>andAccumulate((acc, rec) -> {
+          acc.setId(rec.getKey().getGroup());
+          acc.addInputs(rec.getValue());
+        })
+        .andCombine((acc, acc2) ->
+            acc.addInputs(acc2.getId(), acc2.getSumInW(), acc2.getCount(), acc.getTimestamp()))
+        .andDeduct((acc, acc2) -> acc.removeInputs(acc2.getSumInW(), acc2.getCount()))
+        .andExportFinish(acc ->
+            new AggregatedActivePowerRecord(acc.getId(),
+                acc.getTimestamp(),
+                acc.getCount(),
+                acc.getSumInW(),
+                acc.getAverageInW())
+        );
+
+    // write aggregation back to kafka
+
+    return windowedLastValues
+        .groupingKey(entry -> entry.getKey().getGroup())
+        .aggregate(aggrOp).map(agg -> Util.entry(agg.getKey(), agg.getValue()));
+  }
+
+
+
+  /**
+   * FlatMap function used to process the configuration input for UC4.
+   */
+  private static class ConfigFlatMap implements
+      BiFunctionEx<Map<String, Set<String>>, Entry<Event, SensorRegistry>, Traverser<Entry<String, Set<String>>>> { // NOCS
+
+    private static final long serialVersionUID = -6769931374907428699L;
+
+    @Override
+    public Traverser<Entry<String, Set<String>>> applyEx(
+        final Map<String, Set<String>> flatMapStage,
+        final Entry<Event, SensorRegistry> eventItem) {
+      // Transform new Input
+      final ChildParentsTransformer transformer = new ChildParentsTransformer("default-name");
+      final Map<String, Set<String>> mapFromRegistry =
+          transformer.constructChildParentsPairs(eventItem.getValue());
+
+      // Compare both tables
+      final Map<String, Set<String>> updates = new HashMap<>();
+      for (final String key : mapFromRegistry.keySet()) {
+        if (flatMapStage.containsKey(key)) {
+          if (!mapFromRegistry.get(key).equals(flatMapStage.get(key))) {
+            updates.put(key, mapFromRegistry.get(key));
+          }
+        } else {
+          updates.put(key, mapFromRegistry.get(key));
+        }
+      }
+
+      // Create a updates list to pass onto the next pipeline stage-
+      final List<Entry<String, Set<String>>> updatesList = new ArrayList<>(updates.entrySet());
+
+      // Return traverser with updates list.
+      return Traversers.traverseIterable(updatesList)
+          .map(e -> Util.entry(e.getKey(), e.getValue()));
+    }
+
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/AggregatedActivePowerRecordAccumulator.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/AggregatedActivePowerRecordAccumulator.java
new file mode 100644
index 0000000000000000000000000000000000000000..3166f16cd31bf0e6d4dff6548468791e7a5e5c5c
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/AggregatedActivePowerRecordAccumulator.java
@@ -0,0 +1,100 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * Accumulator class for AggregatedActivePowerRecords.
+ */
+public class AggregatedActivePowerRecordAccumulator {
+
+  private String id;
+  private long timestamp;
+  private long count;
+  private double sumInW;
+  private double averageInW;
+
+  /**
+   * Default constructor.
+   */
+  public AggregatedActivePowerRecordAccumulator() {
+    // This constructor is intentionally empty. Nothing special is needed here.
+  }
+
+
+  /**
+   * Creates an AggregationObject.
+   */
+  public AggregatedActivePowerRecordAccumulator(final String id,
+                                                final long timestamp,
+                                                final long count,
+                                                final double sumInW,
+                                                final double averageInW) {
+    this.id = id;
+    this.timestamp = timestamp;
+    this.count = count;
+    this.sumInW = sumInW;
+    this.averageInW = averageInW;
+  }
+
+  /**
+   * Sets the id.
+   */
+  public void setId(final String id) {
+    this.id = id;
+  }
+
+  /**
+   * Adds the record to the aggregation.
+   */
+  public void addInputs(final ActivePowerRecord record) {
+    this.count += 1;
+    this.sumInW += record.getValueInW();
+    this.timestamp = record.getTimestamp();
+    this.averageInW = sumInW / count;
+  }
+
+  /**
+   * Adds the records from another aggregator.
+   */
+  public void addInputs(final String id,
+                        final double sumInW,
+                        final long count,
+                        final long timestamp) {
+    this.id = this.id == null ? id : this.id;
+    this.sumInW += sumInW;
+    this.count += count;
+    this.timestamp = Math.max(this.timestamp, timestamp);
+    this.averageInW = this.sumInW / this.count;
+  }
+
+  /**
+   * Removes the values of another aggreagator.
+   * Not a complete reset since the old timestamp is lost.
+   */
+  public void removeInputs(final double sumInW, final long count) {
+    this.sumInW -= sumInW;
+    this.count -= count;
+    this.averageInW = this.count == 0 ? 0.0 : this.sumInW / this.count;
+    this.timestamp = -1L;
+  }
+
+  public long getCount() {
+    return count;
+  }
+
+  public double getSumInW() {
+    return sumInW;
+  }
+
+  public double getAverageInW() {
+    return averageInW;
+  }
+
+  public String getId() {
+    return id;
+  }
+
+  public long getTimestamp() {
+    return timestamp;
+  }
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ChildParentsTransformer.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ChildParentsTransformer.java
new file mode 100644
index 0000000000000000000000000000000000000000..ad3b2294cb934ba04b07df2e2b2d3dbdd6e1a905
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ChildParentsTransformer.java
@@ -0,0 +1,118 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+import org.apache.kafka.streams.KeyValue;
+import org.apache.kafka.streams.kstream.Transformer;
+import org.apache.kafka.streams.processor.ProcessorContext;
+import org.apache.kafka.streams.state.KeyValueIterator;
+import org.apache.kafka.streams.state.KeyValueStore;
+import titan.ccp.configuration.events.Event;
+import titan.ccp.model.sensorregistry.AggregatedSensor;
+import titan.ccp.model.sensorregistry.Sensor;
+import titan.ccp.model.sensorregistry.SensorRegistry;
+
+/**
+ * Transforms a {@link SensorRegistry} into key value pairs of Sensor identifiers and their parents'
+ * sensor identifiers. All pairs whose sensor's parents have changed since last iteration are
+ * forwarded. A mapping of an identifier to <code>null</code> means that the corresponding sensor
+ * does not longer exists in the sensor registry.
+ *
+ */
+public class ChildParentsTransformer implements
+    Transformer<Event, SensorRegistry, Iterable<KeyValue<String, Optional<Set<String>>>>> {
+
+  private final String stateStoreName;
+  // private ProcessorContext context;
+  private KeyValueStore<String, Set<String>> state;
+
+  public ChildParentsTransformer(final String stateStoreName) {
+    this.stateStoreName = stateStoreName;
+  }
+
+  @Override
+  @SuppressWarnings("unchecked")
+  public void init(final ProcessorContext context) {
+    // this.context = context;
+    this.state = (KeyValueStore<String, Set<String>>) context.getStateStore(this.stateStoreName);
+  }
+
+  @Override
+  public Iterable<KeyValue<String, Optional<Set<String>>>> transform(final Event event,
+      final SensorRegistry registry) {
+
+    // Values may later be null for deleting a sensor
+    final Map<String, Set<String>> childParentsPairs = this.constructChildParentsPairs(registry);
+
+    this.updateChildParentsPairs(childParentsPairs);
+
+    this.updateState(childParentsPairs);
+
+    return childParentsPairs
+        .entrySet()
+        .stream()
+        .map(e -> KeyValue.pair(e.getKey(), Optional.ofNullable(e.getValue())))
+        .collect(Collectors.toList());
+  }
+
+  @Override
+  public void close() {
+    // Do nothing
+  }
+
+  /**
+   * Constructs a map of keys to their set of parents out of a SensorRegistry.
+   *
+   * @param registry The SensorRegistry to build the map out of.
+   * @return A map of keys to a set of their parents.
+   */
+  public Map<String, Set<String>> constructChildParentsPairs(final SensorRegistry registry) {
+    return this.streamAllChildren(registry.getTopLevelSensor())
+        .collect(Collectors.toMap(
+            Sensor::getIdentifier,
+            child -> child.getParent()
+                .map(p -> Set.of(p.getIdentifier()))
+                .orElseGet(Set::of)));
+  }
+
+  private Stream<Sensor> streamAllChildren(final AggregatedSensor sensor) {
+    return sensor.getChildren().stream()
+        .flatMap(s -> Stream.concat(
+            Stream.of(s),
+            s instanceof AggregatedSensor ? this.streamAllChildren((AggregatedSensor) s)
+                : Stream.empty()));
+  }
+
+  private void updateChildParentsPairs(final Map<String, Set<String>> childParentsPairs) {
+    final KeyValueIterator<String, Set<String>> oldChildParentsPairs = this.state.all();
+    while (oldChildParentsPairs.hasNext()) {
+      final KeyValue<String, Set<String>> oldChildParentPair = oldChildParentsPairs.next();
+      final String identifier = oldChildParentPair.key;
+      final Set<String> oldParents = oldChildParentPair.value;
+      final Set<String> newParents = childParentsPairs.get(identifier); // null if not exists
+      if (newParents == null) {
+        // Sensor was deleted
+        childParentsPairs.put(identifier, null);
+      } else if (newParents.equals(oldParents)) {
+        // No changes
+        childParentsPairs.remove(identifier);
+      }
+      // Else: Later Perhaps: Mark changed parents
+    }
+    oldChildParentsPairs.close();
+  }
+
+  private void updateState(final Map<String, Set<String>> childParentsPairs) {
+    for (final Map.Entry<String, Set<String>> childParentPair : childParentsPairs.entrySet()) {
+      if (childParentPair.getValue() == null) {
+        this.state.delete(childParentPair.getKey());
+      } else {
+        this.state.put(childParentPair.getKey(), childParentPair.getValue());
+      }
+    }
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/EventDeserializer.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/EventDeserializer.java
new file mode 100644
index 0000000000000000000000000000000000000000..c8d06b497009944b9a9a0fda4ab224e5fe992e3d
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/EventDeserializer.java
@@ -0,0 +1,32 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+
+import java.util.Map;
+import org.apache.kafka.common.serialization.Deserializer;
+import titan.ccp.configuration.events.Event;
+import titan.ccp.configuration.events.EventSerde;
+
+/**
+ * Deserializer for Event Objects.
+ *
+ */
+public class EventDeserializer implements Deserializer<Event> {
+
+  private final Deserializer<Event> deserializer = EventSerde.serde().deserializer();
+
+  @Override
+  public void configure(final Map<String, ?> configs, final boolean isKey) {
+    this.deserializer.configure(configs, isKey);
+  }
+
+  @Override
+  public Event deserialize(final String topic, final byte[] data) {
+    return this.deserializer.deserialize(topic, data);
+  }
+
+  @Override
+  public void close() {
+    this.deserializer.close();
+  }
+
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/HashMapSupplier.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/HashMapSupplier.java
new file mode 100644
index 0000000000000000000000000000000000000000..ec240bf8cb925aa3a444b56457da5adc411212b2
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/HashMapSupplier.java
@@ -0,0 +1,26 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+
+import com.hazelcast.function.SupplierEx;
+import java.util.HashMap;
+import java.util.Set;
+
+/**
+ * Supplies a {@link HashMap} and implements {@link SupplierEx}.
+ */
+public class HashMapSupplier implements SupplierEx<HashMap<String, Set<String>>> {
+
+  private static final long serialVersionUID = -6247504592403610702L; // NOPMD
+
+  @Override
+  public HashMap<String, Set<String>> get() {
+    return new HashMap<>();
+  }
+
+  @Override
+  public HashMap<String, Set<String>> getEx() throws Exception {
+    return this.get();
+  }
+
+
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ImmutableSensorRegistryUc4Serializer.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ImmutableSensorRegistryUc4Serializer.java
new file mode 100644
index 0000000000000000000000000000000000000000..53d22f7f156891cf11e5b8915eed17b74c3d57fb
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ImmutableSensorRegistryUc4Serializer.java
@@ -0,0 +1,36 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+
+import com.hazelcast.nio.ObjectDataInput;
+import com.hazelcast.nio.ObjectDataOutput;
+import com.hazelcast.nio.serialization.StreamSerializer;
+import java.io.IOException;
+import titan.ccp.model.sensorregistry.ImmutableSensorRegistry;
+
+/**
+ * {@link StreamSerializer} for Hazelcast Jet to serialize and deserialize an
+ * {@link ImmutableSensorRegistry}.
+ */
+public class ImmutableSensorRegistryUc4Serializer
+    implements StreamSerializer<ImmutableSensorRegistry> {
+
+  private static final int TYPE_ID = 3;
+
+  @Override
+  public int getTypeId() {
+    return TYPE_ID;
+  }
+
+  @Override
+  public void write(final ObjectDataOutput out, final ImmutableSensorRegistry object)
+      throws IOException {
+    final String sensorRegistryJson = object.toJson();
+    out.writeString(sensorRegistryJson);
+  }
+
+  @Override
+  public ImmutableSensorRegistry read(final ObjectDataInput in) throws IOException {
+    final String sensorRegistryJson = in.readString();
+    return (ImmutableSensorRegistry) ImmutableSensorRegistry.fromJson(sensorRegistryJson);
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/SensorGroupKey.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/SensorGroupKey.java
new file mode 100644
index 0000000000000000000000000000000000000000..24114cc90a709c99e74495714559c12324e07788
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/SensorGroupKey.java
@@ -0,0 +1,50 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+
+import java.util.Objects;
+
+/**
+ * Structure (sensorId, group).
+ */
+public class SensorGroupKey {
+
+  private final String sensorId;
+  private final String group;
+
+  public SensorGroupKey(final String sensorId, final String group) {
+    this.sensorId = sensorId;
+    this.group = group;
+  }
+
+  public String getSensorId() {
+    return this.sensorId;
+  }
+
+  public String getGroup() {
+    return this.group;
+  }
+
+  @Override
+  public String toString() {
+    return "[SensorId: " + this.sensorId + "; Group: " + this.group + "]";
+  }
+
+  @Override
+  public int hashCode() {
+    return Objects.hash(this.sensorId, this.group);
+  }
+
+  @Override
+  public boolean equals(final Object obj) {
+    if (obj == this) {
+      return true;
+    }
+    if (obj instanceof SensorGroupKey) {
+      final SensorGroupKey other = (SensorGroupKey) obj;
+      return Objects.equals(this.sensorId, other.sensorId)
+          && Objects.equals(this.group, other.group);
+    }
+    return false;
+  }
+
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/SensorGroupKeySerializer.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/SensorGroupKeySerializer.java
new file mode 100644
index 0000000000000000000000000000000000000000..12a46b9d8f91ea145f614654a6ce9813b9014290
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/SensorGroupKeySerializer.java
@@ -0,0 +1,31 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+
+import com.hazelcast.nio.ObjectDataInput;
+import com.hazelcast.nio.ObjectDataOutput;
+import com.hazelcast.nio.serialization.StreamSerializer;
+import java.io.IOException;
+
+/**
+ * Serializes and Deserializes a SensorGroupKey.
+ */
+public class SensorGroupKeySerializer implements StreamSerializer<SensorGroupKey> {
+
+  private static final int TYPE_ID = 2;
+
+  @Override
+  public int getTypeId() {
+    return TYPE_ID;
+  }
+
+  @Override
+  public void write(final ObjectDataOutput out, final SensorGroupKey key) throws IOException {
+    out.writeString(key.getSensorId());
+    out.writeString(key.getGroup());
+  }
+
+  @Override
+  public SensorGroupKey read(final ObjectDataInput in) throws IOException {
+    return new SensorGroupKey(in.readString(), in.readString());
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ValueGroup.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ValueGroup.java
new file mode 100644
index 0000000000000000000000000000000000000000..893efcf74fe8a16202d795fca5cc43b63190dc50
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ValueGroup.java
@@ -0,0 +1,59 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+
+import java.util.Objects;
+import java.util.Set;
+import titan.ccp.model.records.ActivePowerRecord;
+
+/**
+ * Structure: (valueInW, Set(Groups)).
+ */
+public class ValueGroup {
+
+  private final ActivePowerRecord record;
+  private final Set<String> groups;
+
+  public ValueGroup(final ActivePowerRecord record, final Set<String> groups) {
+    this.record = record;
+    this.groups = groups;
+  }
+
+  public ActivePowerRecord getRecord() {
+    return this.record;
+  }
+
+  public Double getValueInW() {
+    return this.record.getValueInW();
+  }
+
+  public Set<String> getGroups() {
+    return this.groups;
+  }
+
+  @Override
+  public String toString() {
+    String groupString = "[";
+    for (final String group : this.groups) {
+      groupString = groupString + group + "/";// NOPMD
+    }
+    return this.record.getValueInW() + ";" + groupString + "]";
+  }
+
+  @Override
+  public int hashCode() {
+    return Objects.hash(this.record, this.groups);
+  }
+
+  @Override
+  public boolean equals(final Object obj) {
+    if (obj == this) {
+      return true;
+    }
+    if (obj instanceof ValueGroup) {
+      final ValueGroup other = (ValueGroup) obj;
+      return Objects.equals(this.record.getValueInW(), other.getValueInW())
+          && this.groups.containsAll(other.groups);
+    }
+    return false;
+  }
+
+}
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ValueGroupSerializer.java b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ValueGroupSerializer.java
new file mode 100644
index 0000000000000000000000000000000000000000..e136d1da0cd8362fed4f76807e7f8725c2075b7f
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/uc4specifics/ValueGroupSerializer.java
@@ -0,0 +1,33 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics;
+
+import com.hazelcast.nio.ObjectDataInput;
+import com.hazelcast.nio.ObjectDataOutput;
+import com.hazelcast.nio.serialization.StreamSerializer;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashSet;
+
+/** A pipeline serializer for the HourOfDayKey to allow for parallelization. */
+public class ValueGroupSerializer implements StreamSerializer<ValueGroup> {
+
+  private static final int TYPE_ID = 1;
+
+  @Override
+  public int getTypeId() {
+    return TYPE_ID;
+  }
+
+  @Override
+  public void write(final ObjectDataOutput out, final ValueGroup key) throws IOException {
+    out.writeObject(key);
+    out.writeString(String.join(",", key.getGroups()));
+  }
+
+  @Override
+  public ValueGroup read(final ObjectDataInput in) throws IOException {
+    return new ValueGroup(in.readObject(ValueGroup.class),
+        new HashSet<>(Arrays.asList(in.readString().split(","))));
+  }
+
+}
+
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc4-hazelcastjet/src/main/resources/META-INF/application.properties
new file mode 100644
index 0000000000000000000000000000000000000000..e3371cc87e20e85e6e8c327955537e6e49dab86e
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/main/resources/META-INF/application.properties
@@ -0,0 +1,8 @@
+application.name=theodolite-uc1-application
+application.version=0.0.1
+
+kafka.bootstrap.servers=localhost:9092
+kafka.input.topic=input
+
+schema.registry.url=http://localhost:8081
+
diff --git a/theodolite-benchmarks/uc4-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4PipelineTest.java b/theodolite-benchmarks/uc4-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4PipelineTest.java
new file mode 100644
index 0000000000000000000000000000000000000000..b74c2874b92a51b138ffe8b44b1cf750dfce5880
--- /dev/null
+++ b/theodolite-benchmarks/uc4-hazelcastjet/src/test/java/rocks/theodolite/benchmarks/uc4/hazelcastjet/Uc4PipelineTest.java
@@ -0,0 +1,226 @@
+package rocks.theodolite.benchmarks.uc4.hazelcastjet;
+
+import com.hazelcast.jet.Jet;
+import com.hazelcast.jet.JetInstance;
+import com.hazelcast.jet.config.JetConfig;
+import com.hazelcast.jet.config.JobConfig;
+import com.hazelcast.jet.core.JetTestSupport;
+import com.hazelcast.jet.pipeline.Pipeline;
+import com.hazelcast.jet.pipeline.Sinks;
+import com.hazelcast.jet.pipeline.StreamSource;
+import com.hazelcast.jet.pipeline.StreamStage;
+import com.hazelcast.jet.pipeline.test.AssertionCompletedException;
+import com.hazelcast.jet.pipeline.test.Assertions;
+import com.hazelcast.jet.pipeline.test.TestSources;
+import com.hazelcast.jet.test.SerialTest;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Objects;
+import java.util.concurrent.CompletionException;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.Uc4PipelineBuilder;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.ImmutableSensorRegistryUc4Serializer;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.SensorGroupKey;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.SensorGroupKeySerializer;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.ValueGroup;
+import rocks.theodolite.benchmarks.uc4.hazelcastjet.uc4specifics.ValueGroupSerializer;
+import titan.ccp.configuration.events.Event;
+import titan.ccp.model.records.ActivePowerRecord;
+import titan.ccp.model.records.AggregatedActivePowerRecord;
+import titan.ccp.model.sensorregistry.ImmutableSensorRegistry;
+import titan.ccp.model.sensorregistry.MachineSensor;
+import titan.ccp.model.sensorregistry.MutableAggregatedSensor;
+import titan.ccp.model.sensorregistry.MutableSensorRegistry;
+
+@Category(SerialTest.class)
+public class Uc4PipelineTest extends JetTestSupport {
+
+  // TEst Machinery
+  JetInstance testInstance = null;
+  Pipeline testPipeline = null;
+  StreamStage<Entry<String, AggregatedActivePowerRecord>> uc4Topology = null;
+
+  @Before
+  public void buildUc4Pipeline() {
+
+    // Setup Configuration
+    final int testItemsPerSecond = 2;
+    final String testSensorName = "TEST-SENSOR";
+    final String testLevel1GroupName = "TEST-LEVEL1-GROUP";
+    final String testLevel2GroupName = "TEST-LEVEL2-GROUP";
+    final Double testValueInW = 10.0;
+    final int testWindowSize = 5000; // As window size is bugged, not necessary.
+
+    // Create mock jet instance with configuration
+    final String testClusterName = randomName();
+    final JetConfig testJetConfig = new JetConfig();
+    testJetConfig.getHazelcastConfig().setClusterName(testClusterName);
+    this.testInstance = this.createJetMember(testJetConfig);
+
+    // Create test source 1 : Input Values
+    final StreamSource<Entry<String, ActivePowerRecord>> testInputSource =
+        TestSources.itemStream(testItemsPerSecond, (timestamp, item) -> {
+          final ActivePowerRecord testRecord =
+              new ActivePowerRecord(testSensorName, timestamp, testValueInW);
+          final Entry<String, ActivePowerRecord> testEntry =
+              Map.entry(testSensorName, testRecord);
+          return testEntry;
+        });
+
+    // Create test source 2 : Mock aggregation Values
+    final StreamSource<Entry<String, AggregatedActivePowerRecord>> testAggregationSource =
+        TestSources.itemStream(testItemsPerSecond, (timestamp, item) -> {
+
+          AggregatedActivePowerRecord test =
+              new AggregatedActivePowerRecord(testSensorName,
+                  System.currentTimeMillis(),
+                  1L,
+                  testValueInW,
+                  testValueInW);
+
+          final ActivePowerRecord testAggValue =
+              new ActivePowerRecord(testSensorName,
+                  System.currentTimeMillis(),
+                  testValueInW);
+
+          final Entry<String, AggregatedActivePowerRecord> testEntry =
+              Map.entry(testLevel1GroupName, test);
+          return testEntry;
+        });
+
+
+    // Create test source 3 : Mock Config Values
+    final StreamSource<Entry<Event, String>> testConfigSource =
+        TestSources.itemStream(testItemsPerSecond, (timestamp, item) -> {
+          final Event theEvent = Event.SENSOR_REGISTRY_CHANGED;
+
+          // Topology:
+          // level2Group -> level1Group -> testSensor
+          
+          // Create Registry
+          final MutableSensorRegistry testRegistry = new MutableSensorRegistry(testLevel2GroupName);
+          // Add Sensors
+          final MutableAggregatedSensor topLevelSensor = testRegistry.getTopLevelSensor();
+          final MutableAggregatedSensor level1GroupSensor =
+              topLevelSensor.addChildAggregatedSensor(testLevel1GroupName);
+          final MachineSensor inputSensor = level1GroupSensor.addChildMachineSensor(testSensorName);
+
+          final String stringRegistry = testRegistry.toJson();
+          final Entry<Event, String> testEntry =
+              Map.entry(theEvent, stringRegistry);
+          return testEntry;
+        });
+
+    // Create pipeline to test
+    final Uc4PipelineBuilder pipelineBuilder = new Uc4PipelineBuilder();
+    this.testPipeline = Pipeline.create();
+    this.uc4Topology = pipelineBuilder.extendUc4Topology(testPipeline,
+        testInputSource, testAggregationSource, testConfigSource, testWindowSize);
+
+    this.uc4Topology.writeTo(Sinks.logger());
+  }
+
+  /**
+   * Tests if no items reach the end before the first window ends.
+   */
+  @Test
+  public void testOutput() {
+
+//    System.out.println("DEBUG DEBUG DEBUG || ENTERED TEST 1");
+    
+    // Assertion Configuration
+    final int timeout = 20;
+    final String testSensorName = "TEST-SENSOR";
+    final String testLevel1GroupName = "TEST-LEVEL1-GROUP";
+    final String testLevel2GroupName = "TEST-LEVEL2-GROUP";
+    final double testValueInW = 10.0;
+
+
+    // Assertion
+    this.uc4Topology.apply(Assertions.assertCollectedEventually(timeout, 
+        collection -> {
+          System.out.println("DEBUG || ENTERED ASSERTION COLLECTED EVENTUALLY");
+
+          boolean allOkay = false;
+
+          boolean testLevel1contained = false;
+          boolean testLevel2contained = false;
+          boolean averageEqTest = true;
+          boolean avOk = true;
+
+
+          if (collection != null) {
+            System.out.println("Collection size: " + collection.size());
+
+
+            for (final Entry<String, AggregatedActivePowerRecord> entry : collection) {
+              System.out.println("DEBUG || " + entry.toString());
+
+              final String key = entry.getKey();
+              final AggregatedActivePowerRecord agg = entry.getValue();
+
+
+              if (Objects.equals(key, testLevel1GroupName)) {
+                testLevel1contained = true;
+              }
+
+              if(Objects.equals(key, testLevel2GroupName)){
+                testLevel2contained = true;
+              }
+
+              if (testValueInW != agg.getAverageInW()){
+                averageEqTest = false;
+              }
+
+              final double average = agg.getSumInW() / agg.getCount();
+              if (average != agg.getAverageInW()) {
+                avOk = false;
+              }
+
+            }
+            allOkay = testLevel1contained && testLevel2contained && averageEqTest && avOk;
+          }
+
+          System.out.println("testLevel1contained: " + testLevel1contained);
+          System.out.println("testLevel2contained: " + testLevel2contained);
+          System.out.println("averageEqTest: " + averageEqTest);
+          System.out.println("avOk: " + avOk);
+
+          Assert.assertTrue("Assertion did not complete!", allOkay);
+          
+        }));
+
+    try{
+
+      final JobConfig jobConfig = new JobConfig()
+          .registerSerializer(ValueGroup.class, ValueGroupSerializer.class)
+          .registerSerializer(SensorGroupKey.class, SensorGroupKeySerializer.class)
+          .registerSerializer(ImmutableSensorRegistry.class,
+              ImmutableSensorRegistryUc4Serializer.class);
+      this.testInstance.newJob(this.testPipeline, jobConfig).join();
+
+    } catch (final CompletionException e) {
+      final String errorMsg = e.getCause().getMessage();
+      Assert.assertTrue(
+          "Job was expected to complete with AssertionCompletedException, but completed with: "
+              + e.getCause(),
+          errorMsg.contains(AssertionCompletedException.class.getName()));
+    } catch (Exception e){
+      System.out.println("ERRORORORO TEST BROKEN !!!!");
+      System.out.println(e);
+    }
+  }
+
+
+  @After
+  public void after() {
+    System.out.println("Shutting down");
+    // Shuts down all running Jet Instances
+    Jet.shutdownAll();
+  }
+
+}
diff --git a/theodolite/README.md b/theodolite/README.md
index f662329f7eda3a39632581b7125a2f2f2feced8a..49019813c43e0b19e32e35703ca294b2b5c54cb0 100644
--- a/theodolite/README.md
+++ b/theodolite/README.md
@@ -51,7 +51,7 @@ Or, if you don't have GraalVM installed, you can run the native executable build
 ```
 
 You can then execute your native executable with:
-```./build/theodolite-0.7.0-SNAPSHOT-runner```
+```./build/theodolite-0.8.0-SNAPSHOT-runner```
 
 If you want to learn more about building native executables, please consult https://quarkus.io/guides/gradle-tooling.
 
diff --git a/theodolite/build.gradle b/theodolite/build.gradle
index 521137d7315de193f26fdf2307155e587c0dd921..7e10245b8605ba926ac171e260bc145378a0d8d8 100644
--- a/theodolite/build.gradle
+++ b/theodolite/build.gradle
@@ -37,7 +37,7 @@ dependencies {
 }
 
 group 'theodolite'
-version '0.7.0-SNAPSHOT'
+version '0.8.0-SNAPSHOT'
 
 java {
     sourceCompatibility = JavaVersion.VERSION_11