Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • add-helm-test-debug
  • add-helm-test-debug2
  • align-packages-test
  • apache-beam
  • apache-flink
  • apache-flink-se-cluster-config
  • beam-dataflow
  • beam-dataflow-job-template
  • beam-pubsub
  • benchmarks-docs
  • better-home-page
  • bump-kotlinlogging-to-5.0.2
  • cleanup-commit-interval
  • cluster-role-prometheus
  • code-cleanup
  • code-style-fixes
  • container-build-without-docker
  • delete-action-for-other-namespace
  • firestore-test
  • firestore-test-debuug
  • fix-flink-deprecations
  • flink-benchmark-def-with-rm-rebalance
  • forecasting-anomalydetection
  • java-operator-sdk
  • kafka-nodeport-config-windows
  • kieker-records
  • kstreams24
  • kstreams25
  • main
  • make-kafka-config-optional
  • master
  • multi-stage-builder-image
  • offset-debug
  • otel-demo-dynatrace-example
  • pubsublite-load-generator
  • quarkus-36
  • reduce-code-duplication
  • refactor-beam-service
  • rework-examples
  • rework-state-handling
  • se-cluster-config
  • se-cluster-config-prod
  • self-built-slo-checkers
  • simple-uc3
  • simpleuc4
  • stable-200513
  • stable-200514
  • stable-200522
  • standalone-docs
  • support-empty-query-response
  • test-child-pipeline
  • test-ci
  • test-ci-protected
  • test-coverage
  • test-k3d
  • test-kaniko
  • try-reduce-build-time
  • upgrade-beam
  • upgrade-flink
  • upgrade-kstreams
  • use-internal-registry
  • v0.1
  • v0.1-se-cluster
  • v0.10
  • v0.2
  • v0.3
  • v0.4
  • v0.5
  • v0.6
  • v0.7
  • v0.8
  • v0.9
  • v0.1.1
  • v0.1.2
  • v0.10.0
  • v0.2.0
  • v0.3.0
  • v0.4.0
  • v0.5.0
  • v0.5.1
  • v0.5.2
  • v0.6.0
  • v0.6.1
  • v0.6.2
  • v0.6.3
  • v0.6.4
  • v0.7.0
  • v0.8.0
  • v0.8.1
  • v0.8.2
  • v0.8.3
  • v0.8.4
  • v0.8.5
  • v0.8.6
  • v0.9.0
95 results

Target

Select target project
No results found
Select Git revision
  • add-helm-test-debug
  • add-helm-test-debug2
  • align-packages-test
  • apache-beam
  • apache-flink
  • apache-flink-se-cluster-config
  • beam-dataflow
  • beam-dataflow-job-template
  • beam-pubsub
  • benchmarks-docs
  • better-home-page
  • bump-kotlinlogging-to-5.0.2
  • cleanup-commit-interval
  • cluster-role-prometheus
  • code-cleanup
  • code-style-fixes
  • container-build-without-docker
  • delete-action-for-other-namespace
  • firestore-test
  • firestore-test-debuug
  • fix-flink-deprecations
  • flink-benchmark-def-with-rm-rebalance
  • forecasting-anomalydetection
  • java-operator-sdk
  • kafka-nodeport-config-windows
  • kieker-records
  • kstreams24
  • kstreams25
  • main
  • make-kafka-config-optional
  • master
  • multi-stage-builder-image
  • offset-debug
  • otel-demo-dynatrace-example
  • pubsublite-load-generator
  • quarkus-36
  • reduce-code-duplication
  • refactor-beam-service
  • rework-examples
  • rework-state-handling
  • se-cluster-config
  • se-cluster-config-prod
  • self-built-slo-checkers
  • simple-uc3
  • simpleuc4
  • stable-200513
  • stable-200514
  • stable-200522
  • standalone-docs
  • support-empty-query-response
  • test-child-pipeline
  • test-ci
  • test-ci-protected
  • test-coverage
  • test-k3d
  • test-kaniko
  • try-reduce-build-time
  • upgrade-beam
  • upgrade-flink
  • upgrade-kstreams
  • use-internal-registry
  • v0.1
  • v0.1-se-cluster
  • v0.10
  • v0.2
  • v0.3
  • v0.4
  • v0.5
  • v0.6
  • v0.7
  • v0.8
  • v0.9
  • v0.1.1
  • v0.1.2
  • v0.10.0
  • v0.2.0
  • v0.3.0
  • v0.4.0
  • v0.5.0
  • v0.5.1
  • v0.5.2
  • v0.6.0
  • v0.6.1
  • v0.6.2
  • v0.6.3
  • v0.6.4
  • v0.7.0
  • v0.8.0
  • v0.8.1
  • v0.8.2
  • v0.8.3
  • v0.8.4
  • v0.8.5
  • v0.8.6
  • v0.9.0
95 results
Show changes

Commits on Source 69

89 files
+ 959
576
Compare changes
  • Side-by-side
  • Inline

Files

+60 −15
Original line number Original line Diff line number Diff line
@@ -33,10 +33,16 @@ default:
  script:
  script:
    - mkdir -p /kaniko/.docker
    - mkdir -p /kaniko/.docker
    - echo "{\"auths\":{\"${CR_HOST}\":{\"auth\":\"$(printf "%s:%s" "${CR_USER}" "${CR_PW}" | base64 | tr -d '\n')\"}}}" > /kaniko/.docker/config.json
    - echo "{\"auths\":{\"${CR_HOST}\":{\"auth\":\"$(printf "%s:%s" "${CR_USER}" "${CR_PW}" | base64 | tr -d '\n')\"}}}" > /kaniko/.docker/config.json
    - DOCKER_TAG_NAME=$(echo $CI_COMMIT_REF_SLUG- | sed 's/^master-$//')
    - >
    - "[ ! $CI_COMMIT_TAG ] && KANIKO_D=\"$KANIKO_D -d $CR_HOST/$CR_ORG/$IMAGE_NAME:${DOCKER_TAG_NAME}latest\""
      if [ $IMAGE_TAG ]; then
    - "[ ! $CI_COMMIT_TAG ] && KANIKO_D=\"$KANIKO_D -d $CR_HOST/$CR_ORG/$IMAGE_NAME:$DOCKER_TAG_NAME$CI_COMMIT_SHORT_SHA\""
        KANIKO_D="$KANIKO_D -d $CR_HOST/$CR_ORG/$IMAGE_NAME:$IMAGE_TAG"
    - "[ $CI_COMMIT_TAG ] && KANIKO_D=\"$KANIKO_D -d $CR_HOST/$CR_ORG/$IMAGE_NAME:$CI_COMMIT_TAG\""
      elif [ $CI_COMMIT_TAG ]; then
        KANIKO_D="$KANIKO_D -d $CR_HOST/$CR_ORG/$IMAGE_NAME:$CI_COMMIT_TAG"
      else
        DOCKER_TAG_NAME=$(echo $CI_COMMIT_REF_SLUG- | sed 's/^master-$//')
        KANIKO_D="$KANIKO_D -d $CR_HOST/$CR_ORG/$IMAGE_NAME:${DOCKER_TAG_NAME}latest"
        KANIKO_D="$KANIKO_D -d $CR_HOST/$CR_ORG/$IMAGE_NAME:$DOCKER_TAG_NAME$CI_COMMIT_SHORT_SHA"
      fi
    - "[ $DOCKERFILE ] && KANIKO_DOCKERFILE=\"--dockerfile $DOCKERFILE\""
    - "[ $DOCKERFILE ] && KANIKO_DOCKERFILE=\"--dockerfile $DOCKERFILE\""
    - /kaniko/executor --context `pwd`/$CONTEXT $KANIKO_DOCKERFILE $KANIKO_D
    - /kaniko/executor --context `pwd`/$CONTEXT $KANIKO_DOCKERFILE $KANIKO_D


@@ -71,28 +77,39 @@ test-docs-links:
  extends: .docs
  extends: .docs
  needs:
  needs:
    - build-docs
    - build-docs
  script: bundle exec htmlproofer --assume-extension --allow_hash_href --url-ignore "/favicon.ico" ./_site
  script: bundle exec htmlproofer --assume-extension --allow_hash_href ./_site

build-docs-crds:
  stage: build
  image:
    name: ghcr.io/fybrik/crdoc:0.6.1
    entrypoint: [""]
  script: /crdoc --resources theodolite/crd/ --template docs/api-reference/crds.tmpl --output docs/api-reference/crds.ref.md
  artifacts:
    paths:
      - docs/api-reference/crds.ref.md
    expire_in: 1 week
  rules:
    - changes:
      - docs/api-reference/crds.tmpl
      - theodolite/crd/**/*
    - when: manual
      allow_failure: true


test-docs-crds-regression:
test-docs-crds-regression:
  stage: test
  stage: test
  image: golang
  needs:
    - build-docs-crds
  image: alpine:3.15
  before_script:
  before_script:
    - cd docs
    - cd docs
    - go install fybrik.io/crdoc@latest
  script:
  script:
    - crdoc --resources ../theodolite/crd/ --template api-reference/crds.tmpl  --output api-reference/crds.ref.md
    - cmp api-reference/crds.md api-reference/crds.ref.md
    - cmp api-reference/crds.md api-reference/crds.ref.md
  artifacts:
  artifacts:
    when: on_failure
    when: on_failure
    paths:
    paths:
      - docs/api-reference/crds.ref.md
      - docs/api-reference/crds.ref.md
    expire_in: 1 week
    expire_in: 1 week
  rules:
    - changes:
      - docs/api-reference/crds.tmpl
      - theodolite/crd/**/*
    - when: manual
      allow_failure: true




# Theodolite Helm Chart
# Theodolite Helm Chart
@@ -104,6 +121,11 @@ lint-helm:
    name: alpine/helm:3.5.2
    name: alpine/helm:3.5.2
    entrypoint: [""]
    entrypoint: [""]
  script: helm lint helm/
  script: helm lint helm/
  rules:
  - changes:
    - helm/*
  - when: manual
    allow_failure: true




# Theodolite Benchmarks
# Theodolite Benchmarks
@@ -367,6 +389,11 @@ deploy-uc4-load-generator:
  before_script:
  before_script:
    - export GRADLE_USER_HOME=`pwd`/.gradle
    - export GRADLE_USER_HOME=`pwd`/.gradle
    - cd theodolite
    - cd theodolite
  rules:
    - changes:
      - theodolite/**/*
    - when: manual
      allow_failure: true


build-theodolite-jvm:
build-theodolite-jvm:
  stage: build
  stage: build
@@ -568,3 +595,21 @@ deploy-random-scheduler:
      when: manual
      when: manual
      allow_failure: true
      allow_failure: true


deploy-buildimage-docker-compose-jq:
  stage: deploy
  extends:
    - .kaniko-push
  needs: []
  variables:
    DOCKER_VERSION: 20.10.12
    IMAGE_NAME: theodolite-build-docker-compose-jq
    IMAGE_TAG: $DOCKER_VERSION
  before_script:
    - cd buildimages/docker-compose-jq
  rules:
    - changes:
      - buildimages/docker-compose-jq/Dockerfile
      if: "$CR_HOST && $CR_ORG && $CR_USER && $CR_PW"
    - if: "$CR_HOST && $CR_ORG && $CR_USER && $CR_PW && $CI_PIPELINE_SOURCE == 'web'"
      when: manual
      allow_failure: true
+1 −1
Original line number Original line Diff line number Diff line
@@ -8,7 +8,7 @@ authors:
    given-names: Wilhelm
    given-names: Wilhelm
    orcid: "https://orcid.org/0000-0001-6625-4335"
    orcid: "https://orcid.org/0000-0001-6625-4335"
title: Theodolite
title: Theodolite
version: "0.6.1"
version: "0.6.3"
repository-code: "https://github.com/cau-se/theodolite"
repository-code: "https://github.com/cau-se/theodolite"
license: "Apache-2.0"
license: "Apache-2.0"
doi: "10.1016/j.bdr.2021.100209"
doi: "10.1016/j.bdr.2021.100209"
Original line number Original line Diff line number Diff line
FROM docker:${DOCKER_VERSION:-latest}

RUN apk update && \
    apk add jq && \
    apk add py-pip python3-dev libffi-dev openssl-dev gcc libc-dev rust cargo make && \
    pip install docker-compose
+2 −2
Original line number Original line Diff line number Diff line
@@ -5,10 +5,10 @@
    "codeRepository": "https://github.com/cau-se/theodolite",
    "codeRepository": "https://github.com/cau-se/theodolite",
    "dateCreated": "2020-03-13",
    "dateCreated": "2020-03-13",
    "datePublished": "2020-07-27",
    "datePublished": "2020-07-27",
    "dateModified": "2022-01-17",
    "dateModified": "2022-01-24",
    "downloadUrl": "https://github.com/cau-se/theodolite/releases",
    "downloadUrl": "https://github.com/cau-se/theodolite/releases",
    "name": "Theodolite",
    "name": "Theodolite",
    "version": "0.6.1",
    "version": "0.6.3",
    "description": "Theodolite is a framework for benchmarking the horizontal and vertical scalability of cloud-native applications.",
    "description": "Theodolite is a framework for benchmarking the horizontal and vertical scalability of cloud-native applications.",
    "developmentStatus": "active",
    "developmentStatus": "active",
    "relatedLink": [
    "relatedLink": [
+1 −1
Original line number Original line Diff line number Diff line
@@ -39,5 +39,5 @@ crdoc --resources ../theodolite/crd/ --template api-reference/crds.tmpl --outpu
With the following command, crdoc is executed in Docker:
With the following command, crdoc is executed in Docker:


```sh
```sh
docker run --rm -v "`pwd`/../theodolite/crd/":/crd -u $UID -v "`pwd`/api-reference":/api-reference ghcr.io/fybrik/crdoc:0.6.0 --resources /crd/ --template /api-reference/crds.tmpl --output /api-reference/crds.md
docker run --rm -v "`pwd`/../theodolite/crd/":/crd -v "`pwd`/api-reference":/api-reference ghcr.io/fybrik/crdoc:0.6.1 --resources /crd/ --template /api-reference/crds.tmpl --output /api-reference/crds.md
```
```
Original line number Original line Diff line number Diff line
@@ -94,13 +94,6 @@ Resource Types:
        </tr>
        </tr>
    </thead>
    </thead>
    <tbody><tr>
    <tbody><tr>
        <td><b><a href="#benchmarkspeckafkaconfig">kafkaConfig</a></b></td>
        <td>object</td>
        <td>
          Contains the Kafka configuration.<br/>
        </td>
        <td>true</td>
      </tr><tr>
        <td><b><a href="#benchmarkspecloadgenerator">loadGenerator</a></b></td>
        <td><b><a href="#benchmarkspecloadgenerator">loadGenerator</a></b></td>
        <td>object</td>
        <td>object</td>
        <td>
        <td>
@@ -138,103 +131,20 @@ Resource Types:
        </td>
        </td>
        <td>false</td>
        <td>false</td>
      </tr><tr>
      </tr><tr>
        <td><b>name</b></td>
        <td><b><a href="#benchmarkspeckafkaconfig">kafkaConfig</a></b></td>
        <td>string</td>
        <td>object</td>
        <td>
        <td>
          This field exists only for technical reasons and should not be set by the user. The value of the field will be overwritten.<br/>
          Contains the Kafka configuration.<br/>
          <br/>
            <i>Default</i>: <br/>
        </td>
        </td>
        <td>false</td>
        <td>false</td>
      </tr></tbody>
</table>


### benchmark.spec.kafkaConfig
<sup><sup>[↩ Parent](#benchmarkspec)</sup></sup>



Contains the Kafka configuration.

<table>
    <thead>
        <tr>
            <th>Name</th>
            <th>Type</th>
            <th>Description</th>
            <th>Required</th>
        </tr>
    </thead>
    <tbody><tr>
        <td><b>bootstrapServer</b></td>
        <td>string</td>
        <td>
          The bootstrap servers connection string.<br/>
        </td>
        <td>true</td>
      </tr><tr>
      </tr><tr>
        <td><b><a href="#benchmarkspeckafkaconfigtopicsindex">topics</a></b></td>
        <td>[]object</td>
        <td>
          List of topics to be created for each experiment. Alternative theodolite offers the possibility to remove certain topics after each experiment.<br/>
        </td>
        <td>true</td>
      </tr></tbody>
</table>


### benchmark.spec.kafkaConfig.topics[index]
<sup><sup>[↩ Parent](#benchmarkspeckafkaconfig)</sup></sup>





<table>
    <thead>
        <tr>
            <th>Name</th>
            <th>Type</th>
            <th>Description</th>
            <th>Required</th>
        </tr>
    </thead>
    <tbody><tr>
        <td><b>name</b></td>
        <td><b>name</b></td>
        <td>string</td>
        <td>string</td>
        <td>
        <td>
          The name of the topic.<br/>
          This field exists only for technical reasons and should not be set by the user. The value of the field will be overwritten.<br/>
          <br/>
          <br/>
            <i>Default</i>: <br/>
            <i>Default</i>: <br/>
        </td>
        </td>
        <td>true</td>
      </tr><tr>
        <td><b>numPartitions</b></td>
        <td>integer</td>
        <td>
          The number of partitions of the topic.<br/>
          <br/>
            <i>Default</i>: 0<br/>
        </td>
        <td>false</td>
      </tr><tr>
        <td><b>removeOnly</b></td>
        <td>boolean</td>
        <td>
          Determines if this topic should only be deleted after each experiement. For removeOnly topics the name can be a RegEx describing the topic.<br/>
          <br/>
            <i>Default</i>: false<br/>
        </td>
        <td>false</td>
      </tr><tr>
        <td><b>replicationFactor</b></td>
        <td>integer</td>
        <td>
          The replication factor of the topic.<br/>
          <br/>
            <i>Default</i>: 0<br/>
        </td>
        <td>false</td>
        <td>false</td>
      </tr></tbody>
      </tr></tbody>
</table>
</table>
@@ -1647,6 +1557,96 @@ The fileSystem resourceSet loads the Kubernetes manifests from the filesystem.
</table>
</table>




### benchmark.spec.kafkaConfig
<sup><sup>[↩ Parent](#benchmarkspec)</sup></sup>



Contains the Kafka configuration.

<table>
    <thead>
        <tr>
            <th>Name</th>
            <th>Type</th>
            <th>Description</th>
            <th>Required</th>
        </tr>
    </thead>
    <tbody><tr>
        <td><b>bootstrapServer</b></td>
        <td>string</td>
        <td>
          The bootstrap servers connection string.<br/>
        </td>
        <td>true</td>
      </tr><tr>
        <td><b><a href="#benchmarkspeckafkaconfigtopicsindex">topics</a></b></td>
        <td>[]object</td>
        <td>
          List of topics to be created for each experiment. Alternative theodolite offers the possibility to remove certain topics after each experiment.<br/>
        </td>
        <td>true</td>
      </tr></tbody>
</table>


### benchmark.spec.kafkaConfig.topics[index]
<sup><sup>[↩ Parent](#benchmarkspeckafkaconfig)</sup></sup>





<table>
    <thead>
        <tr>
            <th>Name</th>
            <th>Type</th>
            <th>Description</th>
            <th>Required</th>
        </tr>
    </thead>
    <tbody><tr>
        <td><b>name</b></td>
        <td>string</td>
        <td>
          The name of the topic.<br/>
          <br/>
            <i>Default</i>: <br/>
        </td>
        <td>true</td>
      </tr><tr>
        <td><b>numPartitions</b></td>
        <td>integer</td>
        <td>
          The number of partitions of the topic.<br/>
          <br/>
            <i>Default</i>: 0<br/>
        </td>
        <td>false</td>
      </tr><tr>
        <td><b>removeOnly</b></td>
        <td>boolean</td>
        <td>
          Determines if this topic should only be deleted after each experiement. For removeOnly topics the name can be a RegEx describing the topic.<br/>
          <br/>
            <i>Default</i>: false<br/>
        </td>
        <td>false</td>
      </tr><tr>
        <td><b>replicationFactor</b></td>
        <td>integer</td>
        <td>
          The replication factor of the topic.<br/>
          <br/>
            <i>Default</i>: 0<br/>
        </td>
        <td>false</td>
      </tr></tbody>
</table>


### benchmark.status
### benchmark.status
<sup><sup>[↩ Parent](#benchmark)</sup></sup>
<sup><sup>[↩ Parent](#benchmark)</sup></sup>


Original line number Original line Diff line number Diff line
@@ -108,13 +108,20 @@ Suppose the resources needed by your benchmark are defined as YAML files, locate
Benchmarks need to specify at least one supported load and resource type for which scalability can be benchmarked.
Benchmarks need to specify at least one supported load and resource type for which scalability can be benchmarked.


Load and resource types are described by a name (used for reference from an Execution) and a list of patchers.
Load and resource types are described by a name (used for reference from an Execution) and a list of patchers.
Patchers can be seen as functions, which take a value as input and modify a Kubernetes resource in a patcher-specific way. Examples of patchers are the *ReplicaPatcher*, which modifies the replica specification of a deployment, or the *EnvVarPatcher*, which modifies an environment variable.
See the [patcher API reference](api-reference/patchers) for an overview of available patchers.

If a benchmark is [executed by an Execution](running-benchmarks), these patchers are used to configure SUT and load generator according to the [load and resource values](creating-an-execution) set in the Execution.
If a benchmark is [executed by an Execution](running-benchmarks), these patchers are used to configure SUT and load generator according to the [load and resource values](creating-an-execution) set in the Execution.


## Kafka Configuration
## Kafka Configuration


Theodolite allows to automatically create and remove Kafka topics for each SLO experiment.
Theodolite allows to automatically create and remove Kafka topics for each SLO experiment by setting a `kafkaConfig`.
Use the `removeOnly: True` property for topics which are created automatically by the SUT.
It `bootstrapServer` needs to point your Kafka cluster and `topics` configures the list of Kafka topics to be created/removed.
For those topics, also wildcards are allowed in the topic name.
For each topic, you configure its name, the number of partitions and the replication factor.

With the `removeOnly: True` property, you can also instruct Theodolite to only remove topics and not create them.
This is useful when benchmarking SUTs, which create topics on their own (e.g., Kafka Streams and Samza applications).
For those topics, also wildcards are allowed in the topic name and, of course, no partition count or replication factor must be provided.




<!-- Further information: API Reference -->
<!-- Further information: API Reference -->
+71 −1
Original line number Original line Diff line number Diff line
apiVersion: v1
apiVersion: v1
entries:
entries:
  theodolite:
  theodolite:
  - apiVersion: v2
    appVersion: 0.6.3
    created: "2022-01-24T13:40:40.07330713+01:00"
    dependencies:
    - condition: grafana.enabled
      name: grafana
      repository: https://grafana.github.io/helm-charts
      version: 6.17.5
    - condition: kube-prometheus-stack.enabled
      name: kube-prometheus-stack
      repository: https://prometheus-community.github.io/helm-charts
      version: 20.0.1
    - condition: cp-helm-charts.enabled
      name: cp-helm-charts
      repository: https://soerenhenning.github.io/cp-helm-charts
      version: 0.6.0
    - condition: kafka-lag-exporter.enabled
      name: kafka-lag-exporter
      repository: https://lightbend.github.io/kafka-lag-exporter/repo/
      version: 0.6.7
    description: Theodolite is a framework for benchmarking the horizontal and vertical
      scalability of cloud-native applications.
    digest: ebf08e3bf084fcd96eb2ee0588d495258d1741c74019257e55ba40f574874525
    home: https://www.theodolite.rocks
    maintainers:
    - email: soeren.henning@email.uni-kiel.de
      name: Sören Henning
      url: https://www.se.informatik.uni-kiel.de/en/team/soeren-henning-m-sc
    name: theodolite
    sources:
    - https://github.com/cau-se/theodolite
    type: application
    urls:
    - https://github.com/cau-se/theodolite/releases/download/v0.6.3/theodolite-0.6.3.tgz
    version: 0.6.3
  - apiVersion: v2
    appVersion: 0.6.2
    created: "2022-01-23T22:31:04.773793557+01:00"
    dependencies:
    - condition: grafana.enabled
      name: grafana
      repository: https://grafana.github.io/helm-charts
      version: 6.17.5
    - condition: kube-prometheus-stack.enabled
      name: kube-prometheus-stack
      repository: https://prometheus-community.github.io/helm-charts
      version: 20.0.1
    - condition: cp-helm-charts.enabled
      name: cp-helm-charts
      repository: https://soerenhenning.github.io/cp-helm-charts
      version: 0.6.0
    - condition: kafka-lag-exporter.enabled
      name: kafka-lag-exporter
      repository: https://lightbend.github.io/kafka-lag-exporter/repo/
      version: 0.6.7
    description: Theodolite is a framework for benchmarking the horizontal and vertical
      scalability of cloud-native applications.
    digest: f6514038741051230dc9be0a6bde3fbc6f92136ecb36c276343e98e550f2c6d0
    home: https://www.theodolite.rocks
    maintainers:
    - email: soeren.henning@email.uni-kiel.de
      name: Sören Henning
      url: https://www.se.informatik.uni-kiel.de/en/team/soeren-henning-m-sc
    name: theodolite
    sources:
    - https://github.com/cau-se/theodolite
    type: application
    urls:
    - https://github.com/cau-se/theodolite/releases/download/v0.6.2/theodolite-0.6.2.tgz
    version: 0.6.2
  - apiVersion: v2
  - apiVersion: v2
    appVersion: 0.6.1
    appVersion: 0.6.1
    created: "2022-01-18T10:40:00.557347616+01:00"
    created: "2022-01-18T10:40:00.557347616+01:00"
@@ -176,4 +246,4 @@ entries:
    urls:
    urls:
    - https://github.com/cau-se/theodolite/releases/download/v0.4.0/theodolite-0.4.0.tgz
    - https://github.com/cau-se/theodolite/releases/download/v0.4.0/theodolite-0.4.0.tgz
    version: 0.4.0
    version: 0.4.0
generated: "2022-01-18T10:40:00.486387187+01:00"
generated: "2022-01-24T13:40:40.036786105+01:00"
Original line number Original line Diff line number Diff line
@@ -143,7 +143,7 @@ The easiest way to use them is at MyBinder:


Alternatively, you can also [run these notebook locally](https://github.com/cau-se/theodolite/tree/master/analysis), for example, with Docker or Visual Studio Code.
Alternatively, you can also [run these notebook locally](https://github.com/cau-se/theodolite/tree/master/analysis), for example, with Docker or Visual Studio Code.


The notebooks allow to compute a scalability function using its *demand* metric and to visualize multiple such functions in plots:
The notebooks allow to compute a scalability function using Theodolite's *demand* metric and to visualize multiple such functions in plots:


### Computing the *demand* metric with `demand-metric.ipynb` (optional)
### Computing the *demand* metric with `demand-metric.ipynb` (optional)


Original line number Original line Diff line number Diff line
---
---
title: Available Benchmarks
title: Available Benchmarks
has_children: false
has_children: true
nav_order: 7
nav_order: 7
---
---


# Theodolite Benchmarks
# Theodolite Benchmarks


Theodolite comes with 4 application benchmarks, which are based on typical use cases for stream processing within microservices. For each benchmark, a corresponding load generator is provided. Currently, Theodolite provides benchmark implementations for Apache Kafka Streams and Apache Flink.
Theodolite comes with 4 application benchmarks, which are based on typical use cases for stream processing within microservices. For each benchmark, a corresponding [load generator](load-generator) is provided. Currently, Theodolite provides benchmark implementations for Apache Kafka Streams and Apache Flink.




Theodolite's benchmarks are based on typical use cases for stream processing within microservices. Specifically, all benchmarks represent some sort of microservice doing Industrial Internet of Things data analytics. 
Theodolite's benchmarks are based on typical use cases for stream processing within microservices. Specifically, all benchmarks represent some sort of microservice doing Industrial Internet of Things data analytics. 
Original line number Original line Diff line number Diff line
---
title: Load Generators
parent: Available Benchmarks
has_children: false
nav_order: 1
---

# Load Generator Framework

Theodolite's benchmarks come with a flexible load generator framework. It is used to create load on the [4 Theodolite benchmarks](#prebuilt-container-images), but can also be applied to create [custom load generators](#creating-a-custom-load-generator).
It is particularly designed for scalability: Just spin up multiple instances of the load generator and the instances automatically divide the load to be generated among themselves.

## Prebuilt container images

For each benchmark, we provide a [load generator as OCI container image](https://github.com/orgs/cau-se/packages?tab=packages&q=workload-generator). These load generators simulate smart power meters in an industrial facility, which generate measurement records at a fixed rate. Records are published to an Apache Kafka topic (default) or sent as POST requests to an HTTP endpoint.

You can simply run a load generator container, for example, for benchmark UC1 with:

```sh
docker run ghcr.io/cau-se/theodolite-uc1-workload-generator
```

### Message format

Messages generated by the load generators represent a single measurement of [active power](https://en.wikipedia.org/wiki/AC_power#Active,_reactive,_apparent,_and_complex_power_in_sinusoidal_steady-state). The corresponding message type is specified as [`ActivePowerRecords`](https://github.com/cau-se/titan-ccp-common/blob/master/src/main/avro/ActivePower.avdl)
defined with Avro. It consists of an identifier for simulated power sensor, a timestamp in epoch milliseconds and the actual measured (simulated) value in watts.

When sending generated records via Apache Kafka, these records are serialized with the [Confluent Schema Registry](https://docs.confluent.io/platform/current/schema-registry).
If the load generator is configured to send records as HTTP POST requests, records are serialized as JSON according to the following format:

```json
{
  "identifier": "sensor-id",
  "timestamp": 1645564942000,
  "valueInW": 1234.56
}
```

### Configuration

The prebuilt container images can be configured with the following environment variables:

| Environment Variable | Description | Default |
|:----|:----|:----|
| `BOOTSTRAP_SERVER` | Address (`hostname:port`) of another load generator instance to form a cluster with. Can also be this instance. | `localhost:5701` |
| `KUBERNETES_DNS_NAME` | Kubernetes service name to discover other load generators to form a cluster with. Must be a fully qualified domain name (FQDN), e.g., something like `<service>.<namespace>.svc.cluster.local`. * Requires `BOOTSTRAP_SERVER` not to be set. | |
| `PORT` | Port used for for coordination among load generator instances. | 5701 |
| `PORT_AUTO_INCREMENT` | If set to true and the specified PORT is already used, use the next higher one. Useful if multiple instances should run on the same host, without configuring each instance individually. | true |
| `CLUSTER_NAME_PREFIX` | Only required if unrelated load generators form a cluster. | theodolite-load-generation |
| `TARGET` | The target system the load generator send messages to. Valid values are: `kafka`, `http`. | `kafka` |
| `KAFKA_BOOTSTRAP_SERVERS` | A list of host/port pairs to use for establishing the initial connection to the Kafka cluster. See [Kafka producer config: `bootstrap.servers`](https://kafka.apache.org/documentation/#producerconfigs_bootstrap.servers) for more information. Only used if Kafka is set as `TARGET`. | `localhost:9092` |
| `KAFKA_INPUT_TOPIC` | Name of the Kafka topic, which should receive the generated messages. Only used if Kafka is set as `TARGET`. | input |
| `SCHEMA_REGISTRY_URL` | URL of the [Confluent Schema Registry](https://docs.confluent.io/platform/current/schema-registry). | `http://localhost:8081` |
| `KAFKA_BATCH_SIZE` | Value for the Kafka producer configuration: [`batch.size`](https://kafka.apache.org/documentation/#producerconfigs_batch.size). Only used if Kafka is set as `TARGET`. | see Kafka producer config: [`batch.size`](https://kafka.apache.org/documentation/#producerconfigs_batch.size) |
| `KAFKA_LINGER_MS` | Value for the Kafka producer configuration: [`linger.ms`](https://kafka.apache.org/documentation/#producerconfigs_linger.ms). Only used if Kafka is set as `TARGET`. | see Kafka producer config: [`linger.ms`](https://kafka.apache.org/documentation/#producerconfigs_linger.ms) |
| `KAFKA_BUFFER_MEMORY` | Value for the Kafka producer configuration: [`buffer.memory`](https://kafka.apache.org/documentation/#producerconfigs_buffer.memory) Only used if Kafka is set as `TARGET`. | see Kafka producer config: [`buffer.memory`](https://kafka.apache.org/documentation/#producerconfigs_buffer.memory) |
| `HTTP_URL` | The URL the load generator should post messages to. Only used if HTTP is set as `TARGET`. | |
| `NUM_SENSORS` | The amount of simulated sensors. | 10 |
| `PERIOD_MS` | The time in milliseconds between generating two messages for the same sensor. With our Theodolite benchmarks, we apply an [open workload model](https://www.usenix.org/legacy/event/nsdi06/tech/full_papers/schroeder/schroeder.pdf) in which new messages are generated at a fixed rate, without considering the think time of the target server nor the time required for generating a message. | 1000 |
| `VALUE` | The constant `valueInW` of an `ActivePowerRecord`. | 10 |
| `THREADS` | Number of worker threads used to generate the load. | 4 |

Please note that there are some additional configuration options for benchmark [UC4's load generator](https://github.com/cau-se/theodolite/blob/master/theodolite-benchmarks/uc4-load-generator/src/main/java/theodolite/uc4/workloadgenerator/LoadGenerator.java).

## Creating a custom load generator

To create a custom load generator, you need to import the [load-generator-commons](https://github.com/cau-se/theodolite/tree/master/theodolite-benchmarks/load-generator-commons) project. You can then create an instance of the `LoadGenerator` object and call its `run` method:

```java
LoadGenerator loadGenerator = new LoadGenerator()
    .setClusterConfig(clusterConfig)
    .setLoadDefinition(new WorkloadDefinition(
        new KeySpace(key_prefix, numSensors),
        duration))
    .setGeneratorConfig(new LoadGeneratorConfig(
        recordGenerator,
        recordSender))
    .withThreads(threads);
loadGenerator.run();
```

Alternatively, you can also start with a load generator populated with a default configuration or created from environment variables and then adjust the `LoadGenerator` as desired:

```java
LoadGenerator loadGeneratorFromDefaults = LoadGenerator.fromDefaults()
LoadGenerator loadGeneratorFromEnv = LoadGenerator.fromEnvironment();
```
Original line number Original line Diff line number Diff line
@@ -21,17 +21,16 @@ spec:
              valueFrom:
              valueFrom:
                fieldRef:
                fieldRef:
                  fieldPath: metadata.namespace
                  fieldPath: metadata.namespace

            # - name: MODE
            # - name: MODE
            #   value: yaml-executor # Default is `yaml-executor`
            #   value: yaml-executor # Default is `yaml-executor`
            - name: THEODOLITE_EXECUTION
            - name: THEODOLITE_EXECUTION
              value: "execution/execution.yaml" # The name of this file must correspond to the filename of the execution, from which the config map is created.
              value: "/deployments/execution/execution.yaml" # The name of this file must correspond to the filename of the execution, from which the config map is created.
            - name: THEODOLITE_BENCHMARK
            - name: THEODOLITE_BENCHMARK
              value: "benchmark/benchmark.yaml" # The name of this file must correspond to the filename of the benchmark, from which the config map is created.
              value: "/deployments/benchmark/benchmark.yaml" # The name of this file must correspond to the filename of the benchmark, from which the config map is created.
            - name: THEODOLITE_APP_RESOURCES
            - name: THEODOLITE_APP_RESOURCES
              value: "benchmark-resources"
              value: "/deployments/benchmark-resources"
            - name: RESULTS_FOLDER # Folder for saving results
            - name: RESULTS_FOLDER # Folder for saving results
              value: results # Default is the pwd (/deployments)
              value: /deployments/results # Default is the pwd (/deployments)
            # - name: CREATE_RESULTS_FOLDER # Specify whether the specified result folder should be created if it does not exist.
            # - name: CREATE_RESULTS_FOLDER # Specify whether the specified result folder should be created if it does not exist.
            #   value: "false" # Default is false.
            #   value: "false" # Default is false.
          volumeMounts:
          volumeMounts:
Original line number Original line Diff line number Diff line
@@ -5,10 +5,7 @@ metadata:
  name: {{ template "theodolite.fullname" . }}-prometheus
  name: {{ template "theodolite.fullname" . }}-prometheus
spec:
spec:
  serviceAccountName: {{ template "theodolite.fullname" . }}-prometheus
  serviceAccountName: {{ template "theodolite.fullname" . }}-prometheus
  serviceMonitorSelector:
  serviceMonitorSelector: {}
    matchLabels:
      #app: cp-kafka
      appScope: titan-ccp
  resources:
  resources:
    requests:
    requests:
      memory: 400Mi
      memory: 400Mi
Original line number Original line Diff line number Diff line
@@ -27,11 +27,18 @@ spec:
            - name: MODE
            - name: MODE
              value: operator
              value: operator
            - name: RESULTS_FOLDER
            - name: RESULTS_FOLDER
              value: "./results"
              value: "/deployments/results"
          volumeMounts:
          volumeMounts:
            - name: theodolite-results-volume
            - name: theodolite-results-volume
              mountPath: "/deployments/results"
              mountPath: "/deployments/results"
        {{- if .Values.operator.sloChecker.droppedRecordsKStreams.enabled }}
          resources:
            requests:
              memory: "512Mi"
              cpu: "250m"
            limits:
              memory: "1024Mi"
              cpu: "500m"
        {{- if .Values.operator.sloChecker.generic.enabled }}
        - name: slo-checker-generic
        - name: slo-checker-generic
          image: "{{ .Values.operator.sloChecker.generic.image }}:{{ .Values.operator.sloChecker.generic.imageTag }}"
          image: "{{ .Values.operator.sloChecker.generic.image }}:{{ .Values.operator.sloChecker.generic.imageTag }}"
          imagePullPolicy: "{{ .Values.operator.sloChecker.generic.imagePullPolicy }}"
          imagePullPolicy: "{{ .Values.operator.sloChecker.generic.imagePullPolicy }}"
@@ -43,6 +50,13 @@ spec:
            value: "8082"
            value: "8082"
          - name: LOG_LEVEL
          - name: LOG_LEVEL
            value: INFO
            value: INFO
          resources:
            requests:
              memory: "64Mi"
              cpu: "50m"
            limits:
              memory: "128Mi"
              cpu: "100m"
        {{- end }}
        {{- end }}
        {{- if .Values.operator.sloChecker.lagTrend.enabled }}
        {{- if .Values.operator.sloChecker.lagTrend.enabled }}
        - name: lag-trend-slo-checker
        - name: lag-trend-slo-checker
@@ -54,6 +68,13 @@ spec:
          env:
          env:
          - name: LOG_LEVEL
          - name: LOG_LEVEL
            value: INFO
            value: INFO
          resources:
            requests:
              memory: "64Mi"
              cpu: "50m"
            limits:
              memory: "128Mi"
              cpu: "100m"
        {{- end }}
        {{- end }}
        {{- if .Values.operator.sloChecker.droppedRecordsKStreams.enabled }}
        {{- if .Values.operator.sloChecker.droppedRecordsKStreams.enabled }}
        - name: slo-checker-dropped-records-kstreams
        - name: slo-checker-dropped-records-kstreams
@@ -67,6 +88,13 @@ spec:
            value: "8081"
            value: "8081"
          - name: LOG_LEVEL
          - name: LOG_LEVEL
            value: INFO
            value: INFO
          resources:
            requests:
              memory: "64Mi"
              cpu: "50m"
            limits:
              memory: "128Mi"
              cpu: "100m"
        {{- end }}
        {{- end }}
        {{- if .Values.operator.resultsVolume.accessSidecar.enabled }}
        {{- if .Values.operator.resultsVolume.accessSidecar.enabled }}
        - name: results-access
        - name: results-access
Original line number Original line Diff line number Diff line
FROM tiangolo/uvicorn-gunicorn-fastapi:python3.7
FROM python:3.8


COPY requirements.txt requirements.txt
WORKDIR /code
RUN pip install -r requirements.txt


COPY ./app /app
COPY ./requirements.txt /code/requirements.txt
 No newline at end of file
RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt

COPY ./app /code/app

WORKDIR /code/app

ENV HOST 0.0.0.0
ENV PORT 80

CMD ["sh", "-c", "uvicorn main:app --host $HOST --port $PORT"]
Original line number Original line Diff line number Diff line
fastapi==0.65.2
scikit-learn==0.20.3
pandas==1.0.3
uvicorn
requests
requests
fastapi>=0.68.0,<0.69.0
uvicorn>=0.15.0,<0.16.0
#pydantic>=1.8.0,<2.0.0
#scikit-learn==0.22.2
pandas==1.0.3
Original line number Original line Diff line number Diff line
FROM tiangolo/uvicorn-gunicorn-fastapi:python3.7
FROM python:3.8


COPY requirements.txt requirements.txt
WORKDIR /code
RUN pip install -r requirements.txt


COPY ./app /app
COPY ./requirements.txt /code/requirements.txt
 No newline at end of file
RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt

COPY ./app /code/app

WORKDIR /code/app

ENV HOST 0.0.0.0
ENV PORT 80

CMD ["sh", "-c", "uvicorn main:app --host $HOST --port $PORT"]
Original line number Original line Diff line number Diff line
fastapi==0.65.2
pandas==1.0.3
uvicorn
requests
requests
fastapi>=0.68.0,<0.69.0
uvicorn>=0.15.0,<0.16.0
#pydantic>=1.8.0,<2.0.0
#scikit-learn==0.22.2
pandas==1.0.3
Original line number Original line Diff line number Diff line
FROM tiangolo/uvicorn-gunicorn-fastapi:python3.7
FROM python:3.8


COPY requirements.txt requirements.txt
WORKDIR /code
RUN pip install -r requirements.txt


COPY ./app /app
COPY ./requirements.txt /code/requirements.txt
 No newline at end of file
RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt

COPY ./app /code/app

WORKDIR /code/app

ENV HOST 0.0.0.0
ENV PORT 80

CMD ["sh", "-c", "uvicorn main:app --host $HOST --port $PORT"]
Original line number Original line Diff line number Diff line
fastapi==0.65.2
scikit-learn==0.20.3
pandas==1.0.3
uvicorn
requests
requests
fastapi>=0.68.0,<0.69.0
uvicorn>=0.15.0,<0.16.0
#pydantic>=1.8.0,<2.0.0
scikit-learn==0.22.2
pandas==1.0.3
Original line number Original line Diff line number Diff line
cleanup.add_default_serial_version_id=true
cleanup.add_generated_serial_version_id=false
cleanup.add_missing_annotations=true
cleanup.add_missing_deprecated_annotations=true
cleanup.add_missing_methods=false
cleanup.add_missing_nls_tags=false
cleanup.add_missing_override_annotations=true
cleanup.add_missing_override_annotations_interface_methods=true
cleanup.add_serial_version_id=false
cleanup.always_use_blocks=true
cleanup.always_use_parentheses_in_expressions=false
cleanup.always_use_this_for_non_static_field_access=true
cleanup.always_use_this_for_non_static_method_access=true
cleanup.convert_functional_interfaces=false
cleanup.convert_to_enhanced_for_loop=true
cleanup.correct_indentation=true
cleanup.format_source_code=true
cleanup.format_source_code_changes_only=false
cleanup.insert_inferred_type_arguments=false
cleanup.make_local_variable_final=true
cleanup.make_parameters_final=true
cleanup.make_private_fields_final=true
cleanup.make_type_abstract_if_missing_method=false
cleanup.make_variable_declarations_final=true
cleanup.never_use_blocks=false
cleanup.never_use_parentheses_in_expressions=true
cleanup.organize_imports=true
cleanup.qualify_static_field_accesses_with_declaring_class=false
cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
cleanup.qualify_static_member_accesses_with_declaring_class=true
cleanup.qualify_static_method_accesses_with_declaring_class=false
cleanup.remove_private_constructors=true
cleanup.remove_redundant_modifiers=false
cleanup.remove_redundant_semicolons=true
cleanup.remove_redundant_type_arguments=true
cleanup.remove_trailing_whitespaces=true
cleanup.remove_trailing_whitespaces_all=true
cleanup.remove_trailing_whitespaces_ignore_empty=false
cleanup.remove_unnecessary_casts=true
cleanup.remove_unnecessary_nls_tags=true
cleanup.remove_unused_imports=true
cleanup.remove_unused_local_variables=false
cleanup.remove_unused_private_fields=true
cleanup.remove_unused_private_members=false
cleanup.remove_unused_private_methods=true
cleanup.remove_unused_private_types=true
cleanup.sort_members=false
cleanup.sort_members_all=false
cleanup.use_anonymous_class_creation=false
cleanup.use_blocks=true
cleanup.use_blocks_only_for_return_and_throw=false
cleanup.use_lambda=true
cleanup.use_parentheses_in_expressions=true
cleanup.use_this_for_non_static_field_access=true
cleanup.use_this_for_non_static_field_access_only_if_necessary=false
cleanup.use_this_for_non_static_method_access=true
cleanup.use_this_for_non_static_method_access_only_if_necessary=false
cleanup_profile=_CAU-SE-Style
cleanup_settings_version=2
eclipse.preferences.version=1
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
formatter_profile=_CAU-SE-Style
formatter_settings_version=21
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.ondemandthreshold=99
org.eclipse.jdt.ui.staticondemandthreshold=99
sp_cleanup.add_default_serial_version_id=true
sp_cleanup.add_generated_serial_version_id=false
sp_cleanup.add_missing_annotations=true
sp_cleanup.add_missing_deprecated_annotations=true
sp_cleanup.add_missing_methods=false
sp_cleanup.add_missing_nls_tags=false
sp_cleanup.add_missing_override_annotations=true
sp_cleanup.add_missing_override_annotations_interface_methods=true
sp_cleanup.add_serial_version_id=false
sp_cleanup.always_use_blocks=true
sp_cleanup.always_use_parentheses_in_expressions=false
sp_cleanup.always_use_this_for_non_static_field_access=true
sp_cleanup.always_use_this_for_non_static_method_access=true
sp_cleanup.convert_functional_interfaces=false
sp_cleanup.convert_to_enhanced_for_loop=true
sp_cleanup.correct_indentation=true
sp_cleanup.format_source_code=true
sp_cleanup.format_source_code_changes_only=false
sp_cleanup.insert_inferred_type_arguments=false
sp_cleanup.make_local_variable_final=true
sp_cleanup.make_parameters_final=true
sp_cleanup.make_private_fields_final=true
sp_cleanup.make_type_abstract_if_missing_method=false
sp_cleanup.make_variable_declarations_final=true
sp_cleanup.never_use_blocks=false
sp_cleanup.never_use_parentheses_in_expressions=true
sp_cleanup.on_save_use_additional_actions=true
sp_cleanup.organize_imports=true
sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
sp_cleanup.qualify_static_member_accesses_with_declaring_class=true
sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
sp_cleanup.remove_private_constructors=true
sp_cleanup.remove_redundant_modifiers=false
sp_cleanup.remove_redundant_semicolons=true
sp_cleanup.remove_redundant_type_arguments=true
sp_cleanup.remove_trailing_whitespaces=true
sp_cleanup.remove_trailing_whitespaces_all=true
sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
sp_cleanup.remove_unnecessary_casts=true
sp_cleanup.remove_unnecessary_nls_tags=true
sp_cleanup.remove_unused_imports=true
sp_cleanup.remove_unused_local_variables=false
sp_cleanup.remove_unused_private_fields=true
sp_cleanup.remove_unused_private_members=false
sp_cleanup.remove_unused_private_methods=true
sp_cleanup.remove_unused_private_types=true
sp_cleanup.sort_members=false
sp_cleanup.sort_members_all=false
sp_cleanup.use_anonymous_class_creation=false
sp_cleanup.use_blocks=true
sp_cleanup.use_blocks_only_for_return_and_throw=false
sp_cleanup.use_lambda=true
sp_cleanup.use_parentheses_in_expressions=true
sp_cleanup.use_this_for_non_static_field_access=true
sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=false
sp_cleanup.use_this_for_non_static_method_access=true
sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=false
 No newline at end of file
Original line number Original line Diff line number Diff line
@@ -13,21 +13,19 @@ repositories {
}
}


dependencies {
dependencies {
  // These dependencies are used internally, and not exposed to consumers on their own compile classpath.
  implementation('org.industrial-devops:titan-ccp-common:0.1.0-SNAPSHOT') { changing = true }
  implementation('org.industrial-devops:titan-ccp-common:0.1.0-SNAPSHOT') { changing = true }
  implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true }
  implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true }
  implementation 'com.google.code.gson:gson:2.8.2'
  implementation 'com.google.guava:guava:24.1-jre'


  implementation group: 'org.apache.beam', name: 'beam-sdks-java-core', version: '2.22.0'
  implementation('org.apache.beam:beam-sdks-java-io-kafka:2.22.0'){
  implementation('org.apache.beam:beam-sdks-java-io-kafka:2.22.0'){
    exclude group: 'org.apache.kafka', module: 'kafka-clients'
    exclude group: 'org.apache.kafka', module: 'kafka-clients'
  }
  }
  implementation ('io.confluent:kafka-streams-avro-serde:5.3.2') 
  
  implementation group: 'org.slf4j', name: 'slf4j-simple', version: '1.7.30'
  implementation group: 'org.slf4j', name: 'slf4j-simple', version: '1.7.30'
  implementation group: 'org.apache.beam', name: 'beam-sdks-java-core', version: '2.22.0'
  
  
  runtimeOnly 'org.slf4j:slf4j-api:1.7.32'
  runtimeOnly 'org.slf4j:slf4j-api:1.7.32'
  runtimeOnly 'org.slf4j:slf4j-jdk14:1.7.32'
  runtimeOnly 'org.slf4j:slf4j-jdk14:1.7.32'


  // Use JUnit test framework
  testImplementation 'junit:junit:4.12'
  testImplementation 'junit:junit:4.12'
}
}
Original line number Original line Diff line number Diff line
@@ -12,6 +12,9 @@ import org.apache.kafka.clients.consumer.ConsumerConfig;
 */
 */
public class AbstractPipeline extends Pipeline {
public class AbstractPipeline extends Pipeline {


  private static final String KAFKA_CONFIG_SPECIFIC_AVRO_READER = "specific.avro.reader"; // NOPMD
  private static final String KAFKA_CONFIG_SCHEMA_REGISTRY_URL = "schema.registry.url"; // NOPMD

  protected final String inputTopic;
  protected final String inputTopic;
  protected final String bootstrapServer;
  protected final String bootstrapServer;
  // Application Configurations
  // Application Configurations
@@ -21,8 +24,8 @@ public class AbstractPipeline extends Pipeline {
    super(options);
    super(options);
    this.config = config;
    this.config = config;


    inputTopic = config.getString(ConfigurationKeys.KAFKA_INPUT_TOPIC);
    this.inputTopic = config.getString(ConfigurationKeys.KAFKA_INPUT_TOPIC);
    bootstrapServer = config.getString(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS);
    this.bootstrapServer = config.getString(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS);
  }
  }


  /**
  /**
@@ -32,19 +35,37 @@ public class AbstractPipeline extends Pipeline {
   */
   */
  public Map<String, Object> buildConsumerConfig() {
  public Map<String, Object> buildConsumerConfig() {
    final Map<String, Object> consumerConfig = new HashMap<>();
    final Map<String, Object> consumerConfig = new HashMap<>();
    consumerConfig.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,
    consumerConfig.put(
        config.getString(ConfigurationKeys.ENABLE_AUTO_COMMIT_CONFIG));
        ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,
    consumerConfig.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,
        this.config.getString(ConfigurationKeys.ENABLE_AUTO_COMMIT_CONFIG));
        config
    consumerConfig.put(
            .getString(ConfigurationKeys.AUTO_OFFSET_RESET_CONFIG));
        ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,
    consumerConfig.put("schema.registry.url",
        this.config.getString(ConfigurationKeys.AUTO_OFFSET_RESET_CONFIG));
        config.getString(ConfigurationKeys.SCHEMA_REGISTRY_URL));
    consumerConfig.put(

        KAFKA_CONFIG_SCHEMA_REGISTRY_URL,
    consumerConfig.put("specific.avro.reader",
        this.config.getString(ConfigurationKeys.SCHEMA_REGISTRY_URL));
        config.getString(ConfigurationKeys.SPECIFIC_AVRO_READER));
    consumerConfig.put(

        KAFKA_CONFIG_SPECIFIC_AVRO_READER,
    final String applicationName = config.getString(ConfigurationKeys.APPLICATION_NAME);
        this.config.getString(ConfigurationKeys.SPECIFIC_AVRO_READER));
    consumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG, applicationName);
    consumerConfig.put(
        ConsumerConfig.GROUP_ID_CONFIG,
        this.config.getString(ConfigurationKeys.APPLICATION_NAME));
    return consumerConfig;
    return consumerConfig;
  }
  }

  /**
   * Builds a simple configuration for a Kafka producer transformation.
   *
   * @return the build configuration.
   */
  public Map<String, Object> buildProducerConfig() {
    final Map<String, Object> config = new HashMap<>();
    config.put(
        KAFKA_CONFIG_SCHEMA_REGISTRY_URL,
        this.config.getString(ConfigurationKeys.SCHEMA_REGISTRY_URL));
    config.put(
        KAFKA_CONFIG_SPECIFIC_AVRO_READER,
        this.config.getString(ConfigurationKeys.SPECIFIC_AVRO_READER));
    return config;
  }
}
}
Original line number Original line Diff line number Diff line
package theodolite.commons.beam.kafka;

import io.confluent.kafka.streams.serdes.avro.SpecificAvroDeserializer;
import org.apache.kafka.common.serialization.Deserializer;
import titan.ccp.model.records.ActivePowerRecord;

/**
 * A Kafka {@link Deserializer} for typed Schema Registry {@link ActivePowerRecord}.
 */
public class ActivePowerRecordDeserializer extends SpecificAvroDeserializer<ActivePowerRecord> {
}
Original line number Original line Diff line number Diff line
package theodolite.commons.beam.kafka;

import io.confluent.kafka.serializers.KafkaAvroDeserializer;
import java.util.Map;
import org.apache.beam.sdk.coders.AvroCoder;
import org.apache.beam.sdk.io.kafka.KafkaIO;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.PBegin;
import org.apache.beam.sdk.values.PCollection;
import org.apache.kafka.common.serialization.StringDeserializer;
import titan.ccp.model.records.ActivePowerRecord;

/**
 * Simple {@link PTransform} that read from Kafka using {@link KafkaIO}.
 */
public class KafkaActivePowerRecordReader extends
    PTransform<PBegin, PCollection<KV<String, ActivePowerRecord>>> {

  private static final long serialVersionUID = 2603286150183186115L;
  private final PTransform<PBegin, PCollection<KV<String, ActivePowerRecord>>> reader;


  /**
   * Instantiates a {@link PTransform} that reads from Kafka with the given Configuration.
   */
  public KafkaActivePowerRecordReader(final String bootstrapServer, final String inputTopic,
                                      final Map<String, Object> consumerConfig) {
    super();

    if (bootstrapServer == null) {
      throw new IllegalArgumentException("bootstrapServer is null");
    }

    if (inputTopic == null) {
      throw new IllegalArgumentException("inputTopic is null");
    }

    // Check if boostrap server and inputTopic are defined
    if (bootstrapServer.isEmpty() || inputTopic.isEmpty()) {
      throw new IllegalArgumentException("bootstrapServer or inputTopic missing");
    }


    reader =
        KafkaIO.<String, ActivePowerRecord>read()
            .withBootstrapServers(bootstrapServer)
            .withTopic(inputTopic)
            .withKeyDeserializer(StringDeserializer.class)
            .withValueDeserializerAndCoder((Class) KafkaAvroDeserializer.class,
                AvroCoder.of(ActivePowerRecord.class))
            .withConsumerConfigUpdates(consumerConfig)
            .withoutMetadata();
  }

  @Override
  public PCollection<KV<String, ActivePowerRecord>> expand(final PBegin input) {
    return input.apply(this.reader);
  }

}
Original line number Original line Diff line number Diff line
package theodolite.commons.beam.kafka;
package theodolite.commons.beam.kafka;


import io.confluent.kafka.serializers.KafkaAvroDeserializer;
import java.util.Map;
import java.util.Map;
import org.apache.beam.sdk.coders.AvroCoder;
import org.apache.beam.sdk.coders.AvroCoder;
import org.apache.beam.sdk.io.kafka.KafkaIO;
import org.apache.beam.sdk.io.kafka.KafkaIO;
@@ -12,39 +11,36 @@ import org.apache.kafka.common.serialization.StringDeserializer;
import titan.ccp.model.records.ActivePowerRecord;
import titan.ccp.model.records.ActivePowerRecord;


/**
/**
 * Simple {@link PTransform} that read from Kafka using {@link KafkaIO}.
 * Simple {@link PTransform} that reads from Kafka using {@link KafkaIO} with event time.
 * Has additional a TimestampPolicy.
 */
 */
public class KafkaActivePowerTimestampReader extends
public class KafkaActivePowerTimestampReader
    PTransform<PBegin, PCollection<KV<String, ActivePowerRecord>>> {
    extends PTransform<PBegin, PCollection<KV<String, ActivePowerRecord>>> {


  private static final long serialVersionUID = 2603286150183186115L;
  private static final long serialVersionUID = 2603286150183186115L;
  private final PTransform<PBegin, PCollection<KV<String, ActivePowerRecord>>> reader;
  private final PTransform<PBegin, PCollection<KV<String, ActivePowerRecord>>> reader;



  /**
  /**
   * Instantiates a {@link PTransform} that reads from Kafka with the given Configuration.
   * Instantiates a {@link PTransform} that reads from Kafka with the given Configuration.
   */
   */
  public KafkaActivePowerTimestampReader(final String bootstrapServer, final String inputTopic,
  public KafkaActivePowerTimestampReader(
      final String bootstrapServer,
      final String inputTopic,
      final Map<String, Object> consumerConfig) {
      final Map<String, Object> consumerConfig) {
    super();
    super();


    // Check if boostrap server and inputTopic are defined
    // Check if bootstrap server and inputTopic are defined
    if (bootstrapServer.isEmpty() || inputTopic.isEmpty()) {
    if (bootstrapServer.isEmpty() || inputTopic.isEmpty()) {
      throw new IllegalArgumentException("bootstrapServer or inputTopic missing");
      throw new IllegalArgumentException("bootstrapServer or inputTopic missing");
    }
    }


    reader =
    this.reader = KafkaIO.<String, ActivePowerRecord>read().withBootstrapServers(bootstrapServer)
        KafkaIO.<String, ActivePowerRecord>read()
        .withTopic(inputTopic).withKeyDeserializer(StringDeserializer.class)
            .withBootstrapServers(bootstrapServer)
        .withValueDeserializerAndCoder(
            .withTopic(inputTopic)
            ActivePowerRecordDeserializer.class,
            .withKeyDeserializer(StringDeserializer.class)
            .withValueDeserializerAndCoder((Class) KafkaAvroDeserializer.class,
            AvroCoder.of(ActivePowerRecord.class))
            AvroCoder.of(ActivePowerRecord.class))
        .withConsumerConfigUpdates(consumerConfig)
        .withConsumerConfigUpdates(consumerConfig)
            // Set TimeStampPolicy for event time
        .withTimestampPolicyFactory(
        .withTimestampPolicyFactory(
                (tp, previousWaterMark) -> new EventTimePolicy(previousWaterMark))
            (tp, previousWatermark) -> new EventTimePolicy(previousWatermark))
        .withoutMetadata();
        .withoutMetadata();
  }
  }


Original line number Original line Diff line number Diff line
package theodolite.commons.beam.kafka;
package theodolite.commons.beam.kafka;


import java.util.Map;
import org.apache.beam.sdk.io.kafka.KafkaIO;
import org.apache.beam.sdk.io.kafka.KafkaIO;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.KV;
@@ -9,23 +10,35 @@ import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.common.serialization.StringSerializer;


/**
/**
 * Wrapper for a Kafka writing Transformation
 * Wrapper for a Kafka writing Transformation where the value type can be generic.
 * where the value type can be generic.
 *
 * @param <T> type of the value.
 * @param <T> type of the value.
 */
 */
public class KafkaWriterTransformation<T> extends
public class KafkaWriterTransformation<T> extends PTransform<PCollection<KV<String, T>>, PDone> {
    PTransform<PCollection<KV<String, T>>, PDone> {


  private static final long serialVersionUID = 3171423303843174723L;
  private static final long serialVersionUID = 3171423303843174723L;
  private final PTransform<PCollection<KV<String, T>>, PDone> writer;
  private final PTransform<PCollection<KV<String, T>>, PDone> writer;


  /**
  /**
   * Creates a new kafka writer transformation.
   * Creates a new Kafka writer transformation.
   */
   */
  public KafkaWriterTransformation(final String bootstrapServer, final String outputTopic,
  public KafkaWriterTransformation(
      final String bootstrapServer,
      final String outputTopic,
      final Class<? extends Serializer<T>> valueSerializer) {
      final Class<? extends Serializer<T>> valueSerializer) {
    this(bootstrapServer, outputTopic, valueSerializer, Map.of());
  }

  /**
   * Creates a new Kafka writer transformation.
   */
  public KafkaWriterTransformation(
      final String bootstrapServer,
      final String outputTopic,
      final Class<? extends Serializer<T>> valueSerializer,
      final Map<String, Object> producerConfig) {
    super();
    super();
    // Check if boostrap server and outputTopic are defined
    // Check if bootstrap server and outputTopic are defined
    if (bootstrapServer.isEmpty() || outputTopic.isEmpty()) {
    if (bootstrapServer.isEmpty() || outputTopic.isEmpty()) {
      throw new IllegalArgumentException("bootstrapServer or outputTopic missing");
      throw new IllegalArgumentException("bootstrapServer or outputTopic missing");
    }
    }
@@ -34,7 +47,8 @@ public class KafkaWriterTransformation<T> extends
        .withBootstrapServers(bootstrapServer)
        .withBootstrapServers(bootstrapServer)
        .withTopic(outputTopic)
        .withTopic(outputTopic)
        .withKeySerializer(StringSerializer.class)
        .withKeySerializer(StringSerializer.class)
        .withValueSerializer(valueSerializer);
        .withValueSerializer(valueSerializer)
        .withProducerConfigUpdates(producerConfig);


  }
  }


Original line number Original line Diff line number Diff line
@@ -19,7 +19,7 @@ services:
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
      KAFKA_CREATE_TOPICS: "input:3:1"
  schema-registry:
  schema-registry:
    image: confluentinc/cp-schema-registry:5.3.1
    image: confluentinc/cp-schema-registry:5.3.1
    depends_on:
    depends_on:
@@ -33,7 +33,7 @@ services:
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
  load-generator: 
  load-generator: 
    image: ghcr.io/cau-se/theodolite-uc1-workload-generator:latest
    image: ghcr.io/cau-se/theodolite-uc1-workload-generator:${THEODOLITE_TAG:-latest}
    depends_on:
    depends_on:
      - schema-registry
      - schema-registry
      - kafka
      - kafka
@@ -44,7 +44,7 @@ services:
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
      NUM_SENSORS: 10
      NUM_SENSORS: 10
  benchmark-jobmanager:
  benchmark-jobmanager:
      image: ghcr.io/cau-se/theodolite-uc1-beam-flink:latest
      image: ghcr.io/cau-se/theodolite-uc1-beam-flink:${THEODOLITE_TAG:-latest}
      #ports:
      #ports:
      #  - "8080:8081"
      #  - "8080:8081"
      command: >
      command: >
@@ -62,7 +62,7 @@ services:
        - schema-registry
        - schema-registry
        - kafka
        - kafka
  benchmark-taskmanager:
  benchmark-taskmanager:
      image: ghcr.io/cau-se/theodolite-uc1-beam-flink:latest
      image: ghcr.io/cau-se/theodolite-uc1-beam-flink:${THEODOLITE_TAG:-latest}
      scale: 1
      scale: 1
      command: taskmanager
      command: taskmanager
      environment:
      environment:
Original line number Original line Diff line number Diff line
@@ -21,7 +21,7 @@ services:
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
      KAFKA_CREATE_TOPICS: "input:3:1"
  schema-registry:
  schema-registry:
    image: confluentinc/cp-schema-registry:5.3.1
    image: confluentinc/cp-schema-registry:5.3.1
    depends_on:
    depends_on:
@@ -35,7 +35,7 @@ services:
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
  benchmark:
  benchmark:
    image: ghcr.io/cau-se/theodolite-uc1-beam-samza:latest
    image: ghcr.io/cau-se/theodolite-uc1-beam-samza:${THEODOLITE_TAG:-latest}
    scale: 1
    scale: 1
    depends_on:
    depends_on:
      - schema-registry
      - schema-registry
@@ -47,7 +47,7 @@ services:
      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
  load-generator: 
  load-generator: 
    image: ghcr.io/cau-se/theodolite-uc1-workload-generator:latest
    image: ghcr.io/cau-se/theodolite-uc1-workload-generator:${THEODOLITE_TAG:-latest}
    depends_on:
    depends_on:
      - schema-registry
      - schema-registry
      - kafka
      - kafka
Original line number Original line Diff line number Diff line
@@ -19,7 +19,7 @@ services:
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
      KAFKA_CREATE_TOPICS: "input:3:1"
  schema-registry:
  schema-registry:
    image: confluentinc/cp-schema-registry:5.3.1
    image: confluentinc/cp-schema-registry:5.3.1
    depends_on:
    depends_on:
@@ -33,7 +33,7 @@ services:
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
  load-generator: 
  load-generator: 
    image: ghcr.io/cau-se/theodolite-uc1-workload-generator:latest
    image: ghcr.io/cau-se/theodolite-uc1-workload-generator:${THEODOLITE_TAG:-latest}
    depends_on:
    depends_on:
      - schema-registry
      - schema-registry
      - kafka
      - kafka
@@ -44,7 +44,7 @@ services:
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
      NUM_SENSORS: 10
      NUM_SENSORS: 10
  benchmark-jobmanager:
  benchmark-jobmanager:
    image: ghcr.io/cau-se/theodolite-uc1-flink:latest
    image: ghcr.io/cau-se/theodolite-uc1-flink:${THEODOLITE_TAG:-latest}
    #ports:
    #ports:
    #  - "8080:8081"
    #  - "8080:8081"
    command: standalone-job --job-classname theodolite.uc1.application.HistoryServiceFlinkJob
    command: standalone-job --job-classname theodolite.uc1.application.HistoryServiceFlinkJob
@@ -59,7 +59,7 @@ services:
      - schema-registry
      - schema-registry
      - kafka
      - kafka
  benchmark-taskmanager:
  benchmark-taskmanager:
    image: ghcr.io/cau-se/theodolite-uc1-flink:latest
    image: ghcr.io/cau-se/theodolite-uc1-flink:${THEODOLITE_TAG:-latest}
    command: taskmanager
    command: taskmanager
    environment:
    environment:
      - |
      - |
Original line number Original line Diff line number Diff line
@@ -19,7 +19,7 @@ services:
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
      KAFKA_CREATE_TOPICS: "input:3:1"
  schema-registry:
  schema-registry:
    image: confluentinc/cp-schema-registry:5.3.1
    image: confluentinc/cp-schema-registry:5.3.1
    depends_on:
    depends_on:
@@ -33,7 +33,7 @@ services:
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
  benchmark:
  benchmark:
    image: ghcr.io/cau-se/theodolite-uc1-kstreams-app:latest
    image: ghcr.io/cau-se/theodolite-uc1-kstreams-app:${THEODOLITE_TAG:-latest}
    depends_on:
    depends_on:
      - schema-registry
      - schema-registry
      - kafka
      - kafka
@@ -41,7 +41,7 @@ services:
      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
  load-generator: 
  load-generator: 
    image: ghcr.io/cau-se/theodolite-uc1-workload-generator:latest
    image: ghcr.io/cau-se/theodolite-uc1-workload-generator:${THEODOLITE_TAG:-latest}
    depends_on:
    depends_on:
      - schema-registry
      - schema-registry
      - kafka
      - kafka
Original line number Original line Diff line number Diff line
@@ -19,7 +19,7 @@ services:
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
  schema-registry:
  schema-registry:
    image: confluentinc/cp-schema-registry:5.3.1
    image: confluentinc/cp-schema-registry:5.3.1
    depends_on:
    depends_on:
@@ -33,7 +33,7 @@ services:
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
  load-generator: 
  load-generator: 
    image: ghcr.io/cau-se/theodolite-uc2-workload-generator:latest
    image: ghcr.io/cau-se/theodolite-uc2-workload-generator:${THEODOLITE_TAG:-latest}
    depends_on:
    depends_on:
      - schema-registry
      - schema-registry
      - kafka
      - kafka
@@ -44,7 +44,7 @@ services:
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
      NUM_SENSORS: 10
      NUM_SENSORS: 10
  benchmark-jobmanager:
  benchmark-jobmanager:
      image: ghcr.io/cau-se/theodolite-uc2-beam-flink:latest
      image: ghcr.io/cau-se/theodolite-uc2-beam-flink:${THEODOLITE_TAG:-latest}
      #ports:
      #ports:
      #  - "8080:8081"
      #  - "8080:8081"
      command: >
      command: >
@@ -62,7 +62,7 @@ services:
        - schema-registry
        - schema-registry
        - kafka
        - kafka
  benchmark-taskmanager:
  benchmark-taskmanager:
      image: ghcr.io/cau-se/theodolite-uc2-beam-flink:latest
      image: ghcr.io/cau-se/theodolite-uc2-beam-flink:${THEODOLITE_TAG:-latest}
      scale: 1
      scale: 1
      command: taskmanager
      command: taskmanager
      environment:
      environment:
Original line number Original line Diff line number Diff line
@@ -21,7 +21,7 @@ services:
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
  schema-registry:
  schema-registry:
    image: confluentinc/cp-schema-registry:5.3.1
    image: confluentinc/cp-schema-registry:5.3.1
    depends_on:
    depends_on:
@@ -35,7 +35,7 @@ services:
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
  benchmark:
  benchmark:
    image: ghcr.io/cau-se/theodolite-uc2-beam-samza:latest
    image: ghcr.io/cau-se/theodolite-uc2-beam-samza:${THEODOLITE_TAG:-latest}
    scale: 1
    scale: 1
    depends_on:
    depends_on:
      - schema-registry
      - schema-registry
@@ -47,7 +47,7 @@ services:
      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
  load-generator: 
  load-generator: 
    image: ghcr.io/cau-se/theodolite-uc2-workload-generator:latest
    image: ghcr.io/cau-se/theodolite-uc2-workload-generator:${THEODOLITE_TAG:-latest}
    depends_on:
    depends_on:
      - schema-registry
      - schema-registry
      - kafka
      - kafka
Original line number Original line Diff line number Diff line
@@ -19,7 +19,7 @@ services:
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
  schema-registry:
  schema-registry:
    image: confluentinc/cp-schema-registry:5.3.1
    image: confluentinc/cp-schema-registry:5.3.1
    depends_on:
    depends_on:
@@ -33,7 +33,7 @@ services:
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
  load-generator: 
  load-generator: 
    image: ghcr.io/cau-se/theodolite-uc2-workload-generator:latest
    image: ghcr.io/cau-se/theodolite-uc2-workload-generator:${THEODOLITE_TAG:-latest}
    depends_on:
    depends_on:
      - schema-registry
      - schema-registry
      - kafka
      - kafka
@@ -44,7 +44,7 @@ services:
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
      NUM_SENSORS: 10  
      NUM_SENSORS: 10  
  benchmark-jobmanager:
  benchmark-jobmanager:
    image: ghcr.io/cau-se/theodolite-uc2-flink:latest
    image: ghcr.io/cau-se/theodolite-uc2-flink:${THEODOLITE_TAG:-latest}
    #ports:
    #ports:
    #  - "8080:8081"
    #  - "8080:8081"
    command: standalone-job --job-classname theodolite.uc2.application.HistoryServiceFlinkJob
    command: standalone-job --job-classname theodolite.uc2.application.HistoryServiceFlinkJob
@@ -59,7 +59,7 @@ services:
      - schema-registry
      - schema-registry
      - kafka
      - kafka
  benchmark-taskmanager:
  benchmark-taskmanager:
    image: ghcr.io/cau-se/theodolite-uc2-flink:latest
    image: ghcr.io/cau-se/theodolite-uc2-flink:${THEODOLITE_TAG:-latest}
    command: taskmanager
    command: taskmanager
    environment:
    environment:
      - |
      - |
Original line number Original line Diff line number Diff line
@@ -19,7 +19,7 @@ services:
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
  schema-registry:
  schema-registry:
    image: confluentinc/cp-schema-registry:5.3.1
    image: confluentinc/cp-schema-registry:5.3.1
    depends_on:
    depends_on:
@@ -33,7 +33,7 @@ services:
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
  benchmark:
  benchmark:
    image: ghcr.io/cau-se/theodolite-uc2-kstreams-app:latest
    image: ghcr.io/cau-se/theodolite-uc2-kstreams-app:${THEODOLITE_TAG:-latest}
    depends_on:
    depends_on:
      - schema-registry
      - schema-registry
      - kafka
      - kafka
@@ -42,7 +42,7 @@ services:
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
      KAFKA_WINDOW_DURATION_MINUTES: 60
      KAFKA_WINDOW_DURATION_MINUTES: 60
  load-generator: 
  load-generator: 
    image: ghcr.io/cau-se/theodolite-uc2-workload-generator:latest
    image: ghcr.io/cau-se/theodolite-uc2-workload-generator:${THEODOLITE_TAG:-latest}
    depends_on:
    depends_on:
      - schema-registry
      - schema-registry
      - kafka
      - kafka
Original line number Original line Diff line number Diff line
@@ -19,7 +19,7 @@ services:
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
  schema-registry:
  schema-registry:
    image: confluentinc/cp-schema-registry:5.3.1
    image: confluentinc/cp-schema-registry:5.3.1
    depends_on:
    depends_on:
@@ -33,7 +33,7 @@ services:
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
  load-generator: 
  load-generator: 
    image: ghcr.io/cau-se/theodolite-uc3-workload-generator:latest
    image: ghcr.io/cau-se/theodolite-uc3-workload-generator:${THEODOLITE_TAG:-latest}
    depends_on:
    depends_on:
      - schema-registry
      - schema-registry
      - kafka
      - kafka
@@ -44,7 +44,7 @@ services:
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
      NUM_SENSORS: 10
      NUM_SENSORS: 10
  benchmark-jobmanager:
  benchmark-jobmanager:
      image: ghcr.io/cau-se/theodolite-uc3-beam-flink:latest
      image: ghcr.io/cau-se/theodolite-uc3-beam-flink:${THEODOLITE_TAG:-latest}
      #ports:
      #ports:
      #  - "8080:8081"
      #  - "8080:8081"
      command: >
      command: >
@@ -64,7 +64,7 @@ services:
        - schema-registry
        - schema-registry
        - kafka
        - kafka
  benchmark-taskmanager:
  benchmark-taskmanager:
      image: ghcr.io/cau-se/theodolite-uc3-beam-flink:latest
      image: ghcr.io/cau-se/theodolite-uc3-beam-flink:${THEODOLITE_TAG:-latest}
      scale: 1
      scale: 1
      command: taskmanager
      command: taskmanager
      environment:
      environment:
Original line number Original line Diff line number Diff line
@@ -21,7 +21,7 @@ services:
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
  schema-registry:
  schema-registry:
    image: confluentinc/cp-schema-registry:5.3.1
    image: confluentinc/cp-schema-registry:5.3.1
    depends_on:
    depends_on:
@@ -35,7 +35,7 @@ services:
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
  benchmark:
  benchmark:
    image: ghcr.io/cau-se/theodolite-uc3-beam-samza:latest
    image: ghcr.io/cau-se/theodolite-uc3-beam-samza:${THEODOLITE_TAG:-latest}
    scale: 1
    scale: 1
    depends_on:
    depends_on:
      - schema-registry
      - schema-registry
@@ -47,7 +47,7 @@ services:
      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
  load-generator: 
  load-generator: 
    image: ghcr.io/cau-se/theodolite-uc3-workload-generator:latest
    image: ghcr.io/cau-se/theodolite-uc3-workload-generator:${THEODOLITE_TAG:-latest}
    depends_on:
    depends_on:
      - schema-registry
      - schema-registry
      - kafka
      - kafka
Original line number Original line Diff line number Diff line
@@ -19,7 +19,7 @@ services:
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
  schema-registry:
  schema-registry:
    image: confluentinc/cp-schema-registry:5.3.1
    image: confluentinc/cp-schema-registry:5.3.1
    depends_on:
    depends_on:
@@ -33,7 +33,7 @@ services:
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
  load-generator: 
  load-generator: 
    image: ghcr.io/cau-se/theodolite-uc3-workload-generator:latest
    image: ghcr.io/cau-se/theodolite-uc3-workload-generator:${THEODOLITE_TAG:-latest}
    depends_on:
    depends_on:
      - schema-registry
      - schema-registry
      - kafka
      - kafka
@@ -44,7 +44,7 @@ services:
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
      NUM_SENSORS: 10
      NUM_SENSORS: 10
  benchmark-jobmanager:
  benchmark-jobmanager:
    image: ghcr.io/cau-se/theodolite-uc3-flink:latest
    image: ghcr.io/cau-se/theodolite-uc3-flink:${THEODOLITE_TAG:-latest}
    #ports:
    #ports:
    #  - "8080:8081"
    #  - "8080:8081"
    command: standalone-job --job-classname theodolite.uc3.application.HistoryServiceFlinkJob
    command: standalone-job --job-classname theodolite.uc3.application.HistoryServiceFlinkJob
@@ -59,7 +59,7 @@ services:
      - schema-registry
      - schema-registry
      - kafka
      - kafka
  benchmark-taskmanager:
  benchmark-taskmanager:
    image: ghcr.io/cau-se/theodolite-uc3-flink:latest
    image: ghcr.io/cau-se/theodolite-uc3-flink:${THEODOLITE_TAG:-latest}
    command: taskmanager
    command: taskmanager
    environment:
    environment:
      - |
      - |
Original line number Original line Diff line number Diff line
@@ -19,7 +19,7 @@ services:
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
      KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
      KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
      KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
  schema-registry:
  schema-registry:
    image: confluentinc/cp-schema-registry:5.3.1
    image: confluentinc/cp-schema-registry:5.3.1
    depends_on:
    depends_on:
@@ -33,7 +33,7 @@ services:
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
  benchmark:
  benchmark:
    image: ghcr.io/cau-se/theodolite-uc3-kstreams-app:latest
    image: ghcr.io/cau-se/theodolite-uc3-kstreams-app:${THEODOLITE_TAG:-latest}
    depends_on:
    depends_on:
      - schema-registry
      - schema-registry
      - kafka
      - kafka
@@ -41,7 +41,7 @@ services:
      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
  load-generator: 
  load-generator: 
    image: ghcr.io/cau-se/theodolite-uc3-workload-generator:latest
    image: ghcr.io/cau-se/theodolite-uc3-workload-generator:${THEODOLITE_TAG:-latest}
    depends_on:
    depends_on:
      - schema-registry
      - schema-registry
      - kafka
      - kafka
Original line number Original line Diff line number Diff line
@@ -33,7 +33,7 @@ services:
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
  load-generator: 
  load-generator: 
    image: ghcr.io/cau-se/theodolite-uc4-workload-generator:latest
    image: ghcr.io/cau-se/theodolite-uc4-workload-generator:${THEODOLITE_TAG:-latest}
    depends_on:
    depends_on:
      - schema-registry
      - schema-registry
      - kafka
      - kafka
@@ -45,7 +45,7 @@ services:
      NUM_SENSORS: 4
      NUM_SENSORS: 4
      NUM_NESTED_GROUPS: 4
      NUM_NESTED_GROUPS: 4
  benchmark-jobmanager:
  benchmark-jobmanager:
      image: ghcr.io/cau-se/theodolite-uc4-beam-flink:latest
      image: ghcr.io/cau-se/theodolite-uc4-beam-flink:${THEODOLITE_TAG:-latest}
      #ports:
      #ports:
      #  - "8080:8081"
      #  - "8080:8081"
      command: >
      command: >
@@ -66,7 +66,7 @@ services:
        - schema-registry
        - schema-registry
        - kafka
        - kafka
  benchmark-taskmanager:
  benchmark-taskmanager:
      image: ghcr.io/cau-se/theodolite-uc4-beam-flink:latest
      image: ghcr.io/cau-se/theodolite-uc4-beam-flink:${THEODOLITE_TAG:-latest}
      scale: 1
      scale: 1
      command: taskmanager
      command: taskmanager
      environment:
      environment:
Original line number Original line Diff line number Diff line
@@ -35,7 +35,7 @@ services:
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
  benchmark:
  benchmark:
    image: ghcr.io/cau-se/theodolite-uc4-beam-samza:latest
    image: ghcr.io/cau-se/theodolite-uc4-beam-samza:${THEODOLITE_TAG:-latest}
    scale: 1
    scale: 1
    depends_on:
    depends_on:
      - schema-registry
      - schema-registry
@@ -47,7 +47,7 @@ services:
      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
  load-generator: 
  load-generator: 
    image: ghcr.io/cau-se/theodolite-uc4-workload-generator:latest
    image: ghcr.io/cau-se/theodolite-uc4-workload-generator:${THEODOLITE_TAG:-latest}
    depends_on:
    depends_on:
      - schema-registry
      - schema-registry
      - kafka
      - kafka
Original line number Original line Diff line number Diff line
@@ -33,7 +33,7 @@ services:
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
  load-generator: 
  load-generator: 
    image: ghcr.io/cau-se/theodolite-uc4-workload-generator:latest
    image: ghcr.io/cau-se/theodolite-uc4-workload-generator:${THEODOLITE_TAG:-latest}
    depends_on:
    depends_on:
      - schema-registry
      - schema-registry
      - kafka
      - kafka
@@ -45,7 +45,7 @@ services:
      NUM_SENSORS: 4
      NUM_SENSORS: 4
      NUM_NESTED_GROUPS: 4
      NUM_NESTED_GROUPS: 4
  benchmark-jobmanager:
  benchmark-jobmanager:
    image: ghcr.io/cau-se/theodolite-uc4-flink:latest
    image: ghcr.io/cau-se/theodolite-uc4-flink:${THEODOLITE_TAG:-latest}
    #ports:
    #ports:
    #  - "8080:8081"
    #  - "8080:8081"
    command: standalone-job --job-classname theodolite.uc4.application.AggregationServiceFlinkJob
    command: standalone-job --job-classname theodolite.uc4.application.AggregationServiceFlinkJob
@@ -60,7 +60,7 @@ services:
      - schema-registry
      - schema-registry
      - kafka
      - kafka
  benchmark-taskmanager:
  benchmark-taskmanager:
    image: ghcr.io/cau-se/theodolite-uc4-flink:latest
    image: ghcr.io/cau-se/theodolite-uc4-flink:${THEODOLITE_TAG:-latest}
    command: taskmanager
    command: taskmanager
    environment:
    environment:
      - |
      - |
Original line number Original line Diff line number Diff line
@@ -33,7 +33,7 @@ services:
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
      SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
  benchmark:
  benchmark:
    image: ghcr.io/cau-se/theodolite-uc4-kstreams-app:latest
    image: ghcr.io/cau-se/theodolite-uc4-kstreams-app:${THEODOLITE_TAG:-latest}
    depends_on:
    depends_on:
      - schema-registry
      - schema-registry
      - kafka
      - kafka
@@ -41,7 +41,7 @@ services:
      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
      KAFKA_BOOTSTRAP_SERVERS: kafka:9092
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
      SCHEMA_REGISTRY_URL: http://schema-registry:8081
  load-generator: 
  load-generator: 
    image: ghcr.io/cau-se/theodolite-uc4-workload-generator:latest
    image: ghcr.io/cau-se/theodolite-uc4-workload-generator:${THEODOLITE_TAG:-latest}
    depends_on:
    depends_on:
      - schema-registry
      - schema-registry
      - kafka
      - kafka
Original line number Original line Diff line number Diff line
@@ -61,7 +61,7 @@ cleanup_settings_version=2
eclipse.preferences.version=1
eclipse.preferences.version=1
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
formatter_profile=_CAU-SE-Style
formatter_profile=_CAU-SE-Style
formatter_settings_version=15
formatter_settings_version=21
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.ondemandthreshold=99
org.eclipse.jdt.ui.ondemandthreshold=99
Original line number Original line Diff line number Diff line
@@ -61,7 +61,7 @@ cleanup_settings_version=2
eclipse.preferences.version=1
eclipse.preferences.version=1
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
formatter_profile=_CAU-SE-Style
formatter_profile=_CAU-SE-Style
formatter_settings_version=15
formatter_settings_version=21
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.ondemandthreshold=99
org.eclipse.jdt.ui.ondemandthreshold=99
Original line number Original line Diff line number Diff line
@@ -61,7 +61,7 @@ cleanup_settings_version=2
eclipse.preferences.version=1
eclipse.preferences.version=1
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
formatter_profile=_CAU-SE-Style
formatter_profile=_CAU-SE-Style
formatter_settings_version=15
formatter_settings_version=21
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.ondemandthreshold=99
org.eclipse.jdt.ui.ondemandthreshold=99
Original line number Original line Diff line number Diff line
@@ -37,7 +37,7 @@ public final class ConfigurationKeys {


  public static final String KAFKA_BUFFER_MEMORY = "KAFKA_BUFFER_MEMORY";
  public static final String KAFKA_BUFFER_MEMORY = "KAFKA_BUFFER_MEMORY";


  public static final String HTTP_URI = "HTTP_URI";
  public static final String HTTP_URL = "HTTP_URL";


  private ConfigurationKeys() {}
  private ConfigurationKeys() {}


Original line number Original line Diff line number Diff line
@@ -95,7 +95,7 @@ public final class LoadGenerator {
            new KeySpace(SENSOR_PREFIX_DEFAULT, NUMBER_OF_KEYS_DEFAULT),
            new KeySpace(SENSOR_PREFIX_DEFAULT, NUMBER_OF_KEYS_DEFAULT),
            Duration.ofMillis(PERIOD_MS_DEFAULT)))
            Duration.ofMillis(PERIOD_MS_DEFAULT)))
        .setGeneratorConfig(new LoadGeneratorConfig(
        .setGeneratorConfig(new LoadGeneratorConfig(
            TitanRecordGeneratorFactory.forConstantValue(VALUE_DEFAULT),
            TitanRecordGenerator.forConstantValue(VALUE_DEFAULT),
            TitanKafkaSenderFactory.forKafkaConfig(
            TitanKafkaSenderFactory.forKafkaConfig(
                KAFKA_BOOTSTRAP_SERVERS_DEFAULT,
                KAFKA_BOOTSTRAP_SERVERS_DEFAULT,
                KAFKA_TOPIC_DEFAULT,
                KAFKA_TOPIC_DEFAULT,
@@ -164,12 +164,16 @@ public final class LoadGenerator {
          kafkaBootstrapServers,
          kafkaBootstrapServers,
          kafkaInputTopic,
          kafkaInputTopic,
          schemaRegistryUrl);
          schemaRegistryUrl);
      LOGGER.info(
          "Use Kafka as target with bootstrap server '{}', schema registry url '{}' and topic '{}'.", // NOCS
          kafkaBootstrapServers, schemaRegistryUrl, kafkaInputTopic);
    } else if (target == LoadGeneratorTarget.HTTP) {
    } else if (target == LoadGeneratorTarget.HTTP) {
      final URI uri = URI.create(
      final URI url = URI.create(
          Objects.requireNonNullElse(
          Objects.requireNonNullElse(
              System.getenv(ConfigurationKeys.HTTP_URI),
              System.getenv(ConfigurationKeys.HTTP_URL),
              HTTP_URI_DEFAULT));
              HTTP_URI_DEFAULT));
      recordSender = new HttpRecordSender<>(uri);
      recordSender = new HttpRecordSender<>(url);
      LOGGER.info("Use HTTP server as target with url '{}'.", url);
    } else {
    } else {
      // Should never happen
      // Should never happen
      throw new IllegalStateException("Target " + target + " is not handled yet.");
      throw new IllegalStateException("Target " + target + " is not handled yet.");
@@ -194,7 +198,7 @@ public final class LoadGenerator {
            new KeySpace(SENSOR_PREFIX_DEFAULT, numSensors),
            new KeySpace(SENSOR_PREFIX_DEFAULT, numSensors),
            Duration.ofMillis(periodMs)))
            Duration.ofMillis(periodMs)))
        .setGeneratorConfig(new LoadGeneratorConfig(
        .setGeneratorConfig(new LoadGeneratorConfig(
            TitanRecordGeneratorFactory.forConstantValue(value),
            TitanRecordGenerator.forConstantValue(value),
            recordSender))
            recordSender))
        .withThreads(threads);
        .withThreads(threads);
  }
  }
Original line number Original line Diff line number Diff line
package theodolite.commons.workloadgeneration;
package theodolite.commons.workloadgeneration;


import java.time.Clock;
import titan.ccp.model.records.ActivePowerRecord;
import titan.ccp.model.records.ActivePowerRecord;


/**
/**
 * A factory for creating {@link RecordGenerator}s that creates Titan {@link ActivePowerRecord}s.
 * A factory for creating {@link RecordGenerator}s that creates Titan {@link ActivePowerRecord}s.
 */
 */
public final class TitanRecordGeneratorFactory {
public final class TitanRecordGenerator implements RecordGenerator<ActivePowerRecord> {


  private final Clock clock;


  private TitanRecordGeneratorFactory() {}
  private final double constantValue;

  private TitanRecordGenerator(final double constantValue) {
    this.constantValue = constantValue;
    this.clock = Clock.systemUTC();
  }

  /* default */ TitanRecordGenerator(final double constantValue, final Clock clock) {
    this.constantValue = constantValue;
    this.clock = clock;
  }


  /**
  /**
   * Create a {@link RecordGenerator} that generates Titan {@link ActivePowerRecord}s with a
   * Create a {@link RecordGenerator} that generates Titan {@link ActivePowerRecord}s with a
   * constant value.
   * constant value.
   */
   */
  public static RecordGenerator<ActivePowerRecord> forConstantValue(final double value) {
  public static RecordGenerator<ActivePowerRecord> forConstantValue(final double value) {
    return sensor -> new ActivePowerRecord(sensor, System.currentTimeMillis(), value);
    return new TitanRecordGenerator(value);
  }

  @Override
  public ActivePowerRecord generate(final String key) {
    return new ActivePowerRecord(key, this.clock.millis(), this.constantValue);
  }
  }


}
}
Original line number Original line Diff line number Diff line
@@ -10,7 +10,6 @@ import static com.github.tomakehurst.wiremock.client.WireMock.urlPathEqualTo;
import static com.github.tomakehurst.wiremock.client.WireMock.verify;
import static com.github.tomakehurst.wiremock.client.WireMock.verify;
import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.options;
import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.options;
import com.github.tomakehurst.wiremock.junit.WireMockRule;
import com.github.tomakehurst.wiremock.junit.WireMockRule;
import com.google.gson.Gson;
import java.net.URI;
import java.net.URI;
import org.junit.Before;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Rule;
@@ -21,8 +20,6 @@ public class HttpRecordSenderTest {


  private HttpRecordSender<ActivePowerRecord> httpRecordSender;
  private HttpRecordSender<ActivePowerRecord> httpRecordSender;


  private Gson gson;

  @Rule
  @Rule
  public WireMockRule wireMockRule = new WireMockRule(options().dynamicPort());
  public WireMockRule wireMockRule = new WireMockRule(options().dynamicPort());


@@ -30,7 +27,6 @@ public class HttpRecordSenderTest {
  public void setup() {
  public void setup() {
    this.httpRecordSender =
    this.httpRecordSender =
        new HttpRecordSender<>(URI.create("http://localhost:" + this.wireMockRule.port()));
        new HttpRecordSender<>(URI.create("http://localhost:" + this.wireMockRule.port()));
    this.gson = new Gson();
  }
  }


  @Test
  @Test
@@ -45,8 +41,9 @@ public class HttpRecordSenderTest {
    final ActivePowerRecord record = new ActivePowerRecord("my-id", 12345L, 12.34);
    final ActivePowerRecord record = new ActivePowerRecord("my-id", 12345L, 12.34);
    this.httpRecordSender.send(record);
    this.httpRecordSender.send(record);


    final String expectedJson = "{\"identifier\":\"my-id\",\"timestamp\":12345,\"valueInW\":12.34}";
    verify(exactly(1), postRequestedFor(urlEqualTo("/"))
    verify(exactly(1), postRequestedFor(urlEqualTo("/"))
        .withRequestBody(equalTo(this.gson.toJson(record)))); // toJson
        .withRequestBody(equalTo(expectedJson))); // toJson
  }
  }


}
}
Original line number Original line Diff line number Diff line
package theodolite.commons.workloadgeneration;

import java.time.Clock;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.ZoneOffset;
import org.junit.Assert;
import org.junit.Test;
import titan.ccp.model.records.ActivePowerRecord;

public class TitanRecordGeneratorTest {

  @Test
  public void testGenerate() {
    final ZoneId zoneId = ZoneOffset.UTC;
    final LocalDateTime dateTime = LocalDateTime.of(2022, 1, 17, 14, 2, 42);
    final Instant instant = dateTime.atZone(zoneId).toInstant();
    final TitanRecordGenerator generator =
        new TitanRecordGenerator(42.0, Clock.fixed(instant, zoneId));

    final ActivePowerRecord activePowerRecord = generator.generate("my-identifier");
    Assert.assertEquals("my-identifier", activePowerRecord.getIdentifier());
    Assert.assertEquals(instant.toEpochMilli(), activePowerRecord.getTimestamp());
    Assert.assertEquals(42.0, activePowerRecord.getValueInW(), 0.001);
  }

  @Test
  public void testTimestampForArbitraryClockTimeZone() {
    final LocalDateTime dateTime = LocalDateTime.of(2022, 1, 17, 14, 2, 42);
    final Instant instant = dateTime.atZone(ZoneId.of("Europe/Paris")).toInstant();
    // Setting of ZoneId should have no impact on result as we request epoch millis
    final Clock clock = Clock.fixed(instant, ZoneId.of("America/Sao_Paulo"));
    final TitanRecordGenerator generator = new TitanRecordGenerator(42.0, clock);

    final ActivePowerRecord activePowerRecord = generator.generate("my-identifier");
    Assert.assertEquals(instant.toEpochMilli(), activePowerRecord.getTimestamp());
  }

}
Original line number Original line Diff line number Diff line
@@ -8,7 +8,6 @@ import org.slf4j.LoggerFactory;
/**
/**
 * Logs all Key Value pairs.
 * Logs all Key Value pairs.
 */
 */
@SuppressWarnings({"unused"})
public class LogKeyValue extends DoFn<KV<String, String>, KV<String, String>> {
public class LogKeyValue extends DoFn<KV<String, String>, KV<String, String>> {
  private static final long serialVersionUID = 4328743;
  private static final long serialVersionUID = 4328743;
  private static final Logger LOGGER = LoggerFactory.getLogger(LogKeyValue.class);
  private static final Logger LOGGER = LoggerFactory.getLogger(LogKeyValue.class);
@@ -19,9 +18,7 @@ public class LogKeyValue extends DoFn<KV<String, String>, KV<String, String>> {
  @ProcessElement
  @ProcessElement
  public void processElement(@Element final KV<String, String> kv,
  public void processElement(@Element final KV<String, String> kv,
      final OutputReceiver<KV<String, String>> out) {
      final OutputReceiver<KV<String, String>> out) {
    if (LOGGER.isInfoEnabled()) {
    LOGGER.info("Key: {}, Value: {}", kv.getKey(), kv.getValue());
    LOGGER.info("Key: {}, Value: {}", kv.getKey(), kv.getValue());
    }
    out.output(kv);
    out.output(kv);
  }
  }
}
}
Original line number Original line Diff line number Diff line
@@ -61,7 +61,7 @@ cleanup_settings_version=2
eclipse.preferences.version=1
eclipse.preferences.version=1
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
formatter_profile=_CAU-SE-Style
formatter_profile=_CAU-SE-Style
formatter_settings_version=15
formatter_settings_version=21
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.ondemandthreshold=99
org.eclipse.jdt.ui.ondemandthreshold=99
Original line number Original line Diff line number Diff line
@@ -61,7 +61,7 @@ cleanup_settings_version=2
eclipse.preferences.version=1
eclipse.preferences.version=1
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
formatter_profile=_CAU-SE-Style
formatter_profile=_CAU-SE-Style
formatter_settings_version=15
formatter_settings_version=21
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.ondemandthreshold=99
org.eclipse.jdt.ui.ondemandthreshold=99
Original line number Original line Diff line number Diff line
@@ -61,7 +61,7 @@ cleanup_settings_version=2
eclipse.preferences.version=1
eclipse.preferences.version=1
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
formatter_profile=_CAU-SE-Style
formatter_profile=_CAU-SE-Style
formatter_settings_version=15
formatter_settings_version=21
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.ondemandthreshold=99
org.eclipse.jdt.ui.ondemandthreshold=99
Original line number Original line Diff line number Diff line
@@ -61,7 +61,7 @@ cleanup_settings_version=2
eclipse.preferences.version=1
eclipse.preferences.version=1
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
formatter_profile=_CAU-SE-Style
formatter_profile=_CAU-SE-Style
formatter_settings_version=15
formatter_settings_version=21
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.ondemandthreshold=99
org.eclipse.jdt.ui.ondemandthreshold=99
Original line number Original line Diff line number Diff line
@@ -61,7 +61,7 @@ cleanup_settings_version=2
eclipse.preferences.version=1
eclipse.preferences.version=1
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
formatter_profile=_CAU-SE-Style
formatter_profile=_CAU-SE-Style
formatter_settings_version=15
formatter_settings_version=21
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.ondemandthreshold=99
org.eclipse.jdt.ui.ondemandthreshold=99
Original line number Original line Diff line number Diff line
@@ -61,7 +61,7 @@ cleanup_settings_version=2
eclipse.preferences.version=1
eclipse.preferences.version=1
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
formatter_profile=_CAU-SE-Style
formatter_profile=_CAU-SE-Style
formatter_settings_version=15
formatter_settings_version=21
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.ondemandthreshold=99
org.eclipse.jdt.ui.ondemandthreshold=99
Original line number Original line Diff line number Diff line
@@ -61,7 +61,7 @@ cleanup_settings_version=2
eclipse.preferences.version=1
eclipse.preferences.version=1
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
formatter_profile=_CAU-SE-Style
formatter_profile=_CAU-SE-Style
formatter_settings_version=15
formatter_settings_version=21
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.ondemandthreshold=99
org.eclipse.jdt.ui.ondemandthreshold=99
Original line number Original line Diff line number Diff line
@@ -61,7 +61,7 @@ cleanup_settings_version=2
eclipse.preferences.version=1
eclipse.preferences.version=1
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
formatter_profile=_CAU-SE-Style
formatter_profile=_CAU-SE-Style
formatter_settings_version=15
formatter_settings_version=21
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.ondemandthreshold=99
org.eclipse.jdt.ui.ondemandthreshold=99
Original line number Original line Diff line number Diff line
@@ -61,7 +61,7 @@ cleanup_settings_version=2
eclipse.preferences.version=1
eclipse.preferences.version=1
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
formatter_profile=_CAU-SE-Style
formatter_profile=_CAU-SE-Style
formatter_settings_version=15
formatter_settings_version=21
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.ondemandthreshold=99
org.eclipse.jdt.ui.ondemandthreshold=99
Original line number Original line Diff line number Diff line
@@ -2,4 +2,6 @@ plugins {
  id 'theodolite.beam'
  id 'theodolite.beam'
}
}


dependencies {
  implementation ('io.confluent:kafka-streams-avro-serde:5.3.2') 
}
Original line number Original line Diff line number Diff line
@@ -14,13 +14,18 @@ import titan.ccp.model.records.ActivePowerRecord;




/**
/**
 * Duplicates the Kv containing the (Children,Parents) pair as a flat map.
 * Duplicates the {@link KV} containing the (children,parents) pairs as flatMap.
 */
 */
public class DuplicateAsFlatMap extends DoFn
public class DuplicateAsFlatMap
    <KV<String, ActivePowerRecord>, KV<SensorParentKey, ActivePowerRecord>> {
    extends DoFn<KV<String, ActivePowerRecord>, KV<SensorParentKey, ActivePowerRecord>> {

  private static final long serialVersionUID = -5132355515723961647L;
  private static final long serialVersionUID = -5132355515723961647L;
  @StateId("parents")

  private static final String STATE_STORE_NAME = "DuplicateParents";

  @StateId(STATE_STORE_NAME)
  private final StateSpec<ValueState<Set<String>>> parents = StateSpecs.value(); // NOPMD
  private final StateSpec<ValueState<Set<String>>> parents = StateSpecs.value(); // NOPMD

  private final PCollectionView<Map<String, Set<String>>> childParentPairMap;
  private final PCollectionView<Map<String, Set<String>>> childParentPairMap;


  public DuplicateAsFlatMap(final PCollectionView<Map<String, Set<String>>> childParentPairMap) {
  public DuplicateAsFlatMap(final PCollectionView<Map<String, Set<String>>> childParentPairMap) {
@@ -28,21 +33,21 @@ public class DuplicateAsFlatMap extends DoFn
    this.childParentPairMap = childParentPairMap;
    this.childParentPairMap = childParentPairMap;
  }
  }



  /**
  /**
   * Generate a KV-pair for every child-parent match.
   * Generate a KV-pair for every child-parent match.
   */
   */
  @ProcessElement
  @ProcessElement
  public void processElement(@Element final KV<String, ActivePowerRecord> kv,
  public void processElement(
      @Element final KV<String, ActivePowerRecord> kv,
      final OutputReceiver<KV<SensorParentKey, ActivePowerRecord>> out,
      final OutputReceiver<KV<SensorParentKey, ActivePowerRecord>> out,
                             @StateId("parents") final ValueState<Set<String>> state,
      @StateId(STATE_STORE_NAME) final ValueState<Set<String>> state,
      final ProcessContext c) {
      final ProcessContext c) {


    final ActivePowerRecord record = kv.getValue() == null ? null : kv.getValue();
    final ActivePowerRecord record = kv.getValue() == null ? null : kv.getValue();
    final Set<String> newParents =
    final Set<String> newParents =
        c.sideInput(childParentPairMap).get(kv.getKey()) == null
        c.sideInput(this.childParentPairMap).get(kv.getKey()) == null
            ? Collections.emptySet()
            ? Collections.emptySet()
            : c.sideInput(childParentPairMap).get(kv.getKey());
            : c.sideInput(this.childParentPairMap).get(kv.getKey());
    final Set<String> oldParents =
    final Set<String> oldParents =
        MoreObjects.firstNonNull(state.read(), Collections.emptySet());
        MoreObjects.firstNonNull(state.read(), Collections.emptySet());
    // Forward new Pairs if they exist
    // Forward new Pairs if they exist
Original line number Original line Diff line number Diff line
@@ -66,8 +66,8 @@ public final class Uc4BeamPipeline extends AbstractPipeline {
    final Duration gracePeriod =
    final Duration gracePeriod =
        Duration.standardSeconds(config.getInt(ConfigurationKeys.GRACE_PERIOD_MS));
        Duration.standardSeconds(config.getInt(ConfigurationKeys.GRACE_PERIOD_MS));


    // Build kafka configuration
    // Build Kafka configuration
    final Map<String, Object> consumerConfig = this.buildConsumerConfig();
    final Map<String, Object> consumerConfig = super.buildConsumerConfig();
    final Map<String, Object> configurationConfig = this.configurationConfig(config);
    final Map<String, Object> configurationConfig = this.configurationConfig(config);


    // Set Coders for Classes that will be distributed
    // Set Coders for Classes that will be distributed
@@ -77,25 +77,34 @@ public final class Uc4BeamPipeline extends AbstractPipeline {
    // Read from Kafka
    // Read from Kafka
    // ActivePowerRecords
    // ActivePowerRecords
    final KafkaActivePowerTimestampReader kafkaActivePowerRecordReader =
    final KafkaActivePowerTimestampReader kafkaActivePowerRecordReader =
        new KafkaActivePowerTimestampReader(this.bootstrapServer, this.inputTopic, consumerConfig);
        new KafkaActivePowerTimestampReader(
            this.bootstrapServer,
            this.inputTopic,
            consumerConfig);


    // Configuration Events
    // Configuration Events
    final KafkaGenericReader<Event, String> kafkaConfigurationReader =
    final KafkaGenericReader<Event, String> kafkaConfigurationReader =
        new KafkaGenericReader<>(
        new KafkaGenericReader<>(
            this.bootstrapServer, configurationTopic, configurationConfig,
            this.bootstrapServer,
            EventDeserializer.class, StringDeserializer.class);
            configurationTopic,

            configurationConfig,
    // Transform into AggregatedActivePowerRecords into ActivePowerRecords
            EventDeserializer.class,
    final AggregatedToActive aggregatedToActive = new AggregatedToActive();
            StringDeserializer.class);


    // Write to Kafka
    // Write to Kafka
    final KafkaWriterTransformation<AggregatedActivePowerRecord> kafkaOutput =
    final KafkaWriterTransformation<AggregatedActivePowerRecord> kafkaOutput =
        new KafkaWriterTransformation<>(
        new KafkaWriterTransformation<>(
            this.bootstrapServer, outputTopic, AggregatedActivePowerRecordSerializer.class);
            this.bootstrapServer,
            outputTopic,
            AggregatedActivePowerRecordSerializer.class,
            super.buildProducerConfig());


    final KafkaWriterTransformation<AggregatedActivePowerRecord> kafkaFeedback =
    final KafkaWriterTransformation<AggregatedActivePowerRecord> kafkaFeedback =
        new KafkaWriterTransformation<>(
        new KafkaWriterTransformation<>(
            this.bootstrapServer, feedbackTopic, AggregatedActivePowerRecordSerializer.class);
            this.bootstrapServer,
            feedbackTopic,
            AggregatedActivePowerRecordSerializer.class,
            super.buildProducerConfig());


    // Apply pipeline transformations
    // Apply pipeline transformations
    final PCollection<KV<String, ActivePowerRecord>> values = this
    final PCollection<KV<String, ActivePowerRecord>> values = this
@@ -115,7 +124,10 @@ public final class Uc4BeamPipeline extends AbstractPipeline {
            .withBootstrapServers(this.bootstrapServer)
            .withBootstrapServers(this.bootstrapServer)
            .withTopic(feedbackTopic)
            .withTopic(feedbackTopic)
            .withKeyDeserializer(StringDeserializer.class)
            .withKeyDeserializer(StringDeserializer.class)
            .withValueDeserializer(AggregatedActivePowerRecordDeserializer.class)
            .withValueDeserializerAndCoder(
                AggregatedActivePowerRecordDeserializer.class,
                AvroCoder.of(AggregatedActivePowerRecord.class))
            .withConsumerConfigUpdates(consumerConfig)
            .withTimestampPolicyFactory(
            .withTimestampPolicyFactory(
                (tp, previousWaterMark) -> new AggregatedActivePowerRecordEventTimePolicy(
                (tp, previousWaterMark) -> new AggregatedActivePowerRecordEventTimePolicy(
                    previousWaterMark))
                    previousWaterMark))
@@ -123,11 +135,12 @@ public final class Uc4BeamPipeline extends AbstractPipeline {
        .apply("Apply Windows", Window.into(FixedWindows.of(duration)))
        .apply("Apply Windows", Window.into(FixedWindows.of(duration)))
        // Convert into the correct data format
        // Convert into the correct data format
        .apply("Convert AggregatedActivePowerRecord to ActivePowerRecord",
        .apply("Convert AggregatedActivePowerRecord to ActivePowerRecord",
            MapElements.via(aggregatedToActive))
            MapElements.via(new AggregatedToActive()))
        .apply("Set trigger for feedback", Window
        .apply("Set trigger for feedback", Window
            .<KV<String, ActivePowerRecord>>configure()
            .<KV<String, ActivePowerRecord>>configure()
            .triggering(Repeatedly.forever(
            .triggering(Repeatedly.forever(
                AfterProcessingTime.pastFirstElementInPane()
                AfterProcessingTime
                    .pastFirstElementInPane()
                    .plusDelayOf(triggerDelay)))
                    .plusDelayOf(triggerDelay)))
            .withAllowedLateness(gracePeriod)
            .withAllowedLateness(gracePeriod)
            .discardingFiredPanes());
            .discardingFiredPanes());
@@ -170,17 +183,13 @@ public final class Uc4BeamPipeline extends AbstractPipeline {
                .accumulatingFiredPanes())
                .accumulatingFiredPanes())
            .apply(View.asMap());
            .apply(View.asMap());


    final FilterNullValues filterNullValues = new FilterNullValues();

    // Build pairs of every sensor reading and parent
    // Build pairs of every sensor reading and parent
    final PCollection<KV<SensorParentKey, ActivePowerRecord>> flatMappedValues =
    final PCollection<KV<SensorParentKey, ActivePowerRecord>> flatMappedValues =
        inputCollection.apply(
        inputCollection.apply(
            "Duplicate as flatMap",
            "Duplicate as flatMap",
            ParDo.of(new DuplicateAsFlatMap(childParentPairMap))
            ParDo.of(new DuplicateAsFlatMap(childParentPairMap)).withSideInputs(childParentPairMap))
                .withSideInputs(childParentPairMap))
            .apply("Filter only latest changes", Latest.perKey())
            .apply("Filter only latest changes", Latest.perKey())
            .apply("Filter out null values",
            .apply("Filter out null values", Filter.by(new FilterNullValues()));
                Filter.by(filterNullValues));


    final SetIdForAggregated setIdForAggregated = new SetIdForAggregated();
    final SetIdForAggregated setIdForAggregated = new SetIdForAggregated();
    final SetKeyToGroup setKeyToGroup = new SetKeyToGroup();
    final SetKeyToGroup setKeyToGroup = new SetKeyToGroup();
@@ -204,8 +213,7 @@ public final class Uc4BeamPipeline extends AbstractPipeline {


    aggregations.apply("Write to aggregation results", kafkaOutput);
    aggregations.apply("Write to aggregation results", kafkaOutput);


    aggregations
    aggregations.apply("Write to feedback topic", kafkaFeedback);
        .apply("Write to feedback topic", kafkaFeedback);


  }
  }


@@ -217,13 +225,14 @@ public final class Uc4BeamPipeline extends AbstractPipeline {
   */
   */
  public Map<String, Object> configurationConfig(final Configuration config) {
  public Map<String, Object> configurationConfig(final Configuration config) {
    final Map<String, Object> consumerConfig = new HashMap<>();
    final Map<String, Object> consumerConfig = new HashMap<>();
    consumerConfig.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,
    consumerConfig.put(
        ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,
        config.getString(ConfigurationKeys.ENABLE_AUTO_COMMIT_CONFIG));
        config.getString(ConfigurationKeys.ENABLE_AUTO_COMMIT_CONFIG));
    consumerConfig.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,
    consumerConfig.put(
        config
        ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,
            .getString(ConfigurationKeys.AUTO_OFFSET_RESET_CONFIG));
        config.getString(ConfigurationKeys.AUTO_OFFSET_RESET_CONFIG));

    consumerConfig.put(
    consumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG, config
        ConsumerConfig.GROUP_ID_CONFIG, config
            .getString(ConfigurationKeys.APPLICATION_NAME) + "-configuration");
            .getString(ConfigurationKeys.APPLICATION_NAME) + "-configuration");
    return consumerConfig;
    return consumerConfig;
  }
  }
Original line number Original line Diff line number Diff line
@@ -12,11 +12,12 @@ import org.apache.beam.sdk.values.KV;
 */
 */
public class UpdateChildParentPairs extends DoFn<KV<String, Set<String>>, KV<String, Set<String>>> {
public class UpdateChildParentPairs extends DoFn<KV<String, Set<String>>, KV<String, Set<String>>> {


  private static final String STATE_STORE_NAME = "UpdateParents";

  private static final long serialVersionUID = 1L;
  private static final long serialVersionUID = 1L;


  @StateId("parents")
  @StateId(STATE_STORE_NAME)
  private final StateSpec<ValueState<Set<String>>> parents = // NOPMD
  private final StateSpec<ValueState<Set<String>>> parents = StateSpecs.value(); // NOPMD
      StateSpecs.value();


  /**
  /**
   * Match the changes accordingly.
   * Match the changes accordingly.
@@ -24,9 +25,10 @@ public class UpdateChildParentPairs extends DoFn<KV<String, Set<String>>, KV<Str
   * @param kv the sensor parents set that contains the changes.
   * @param kv the sensor parents set that contains the changes.
   */
   */
  @ProcessElement
  @ProcessElement
  public void processElement(@Element final KV<String, Set<String>> kv,
  public void processElement(
      @Element final KV<String, Set<String>> kv,
      final OutputReceiver<KV<String, Set<String>>> out,
      final OutputReceiver<KV<String, Set<String>>> out,
      @StateId("parents") final ValueState<Set<String>> state) {
      @StateId(STATE_STORE_NAME) final ValueState<Set<String>> state) {
    if (kv.getValue() == null || !kv.getValue().equals(state.read())) {
    if (kv.getValue() == null || !kv.getValue().equals(state.read())) {
      out.output(kv);
      out.output(kv);
      state.write(kv.getValue());
      state.write(kv.getValue());
Original line number Original line Diff line number Diff line
@@ -11,8 +11,7 @@ import org.apache.beam.sdk.coders.CoderException;
import titan.ccp.model.records.AggregatedActivePowerRecord;
import titan.ccp.model.records.AggregatedActivePowerRecord;


/**
/**
 * Wrapper Class that encapsulates a AggregatedActivePowerRecord Serde in a
 * {@link Coder} for an {@link AggregatedActivePowerRecord}.
 * org.apache.beam.sdk.coders.Coder.
 */
 */
@SuppressWarnings("serial")
@SuppressWarnings("serial")
public class AggregatedActivePowerRecordCoder extends Coder<AggregatedActivePowerRecord>
public class AggregatedActivePowerRecordCoder extends Coder<AggregatedActivePowerRecord>
@@ -45,13 +44,13 @@ public class AggregatedActivePowerRecordCoder extends Coder<AggregatedActivePowe


  @Override
  @Override
  public List<? extends Coder<?>> getCoderArguments() {
  public List<? extends Coder<?>> getCoderArguments() {
    return null;
    return List.of();
  }
  }


  @Override
  @Override
  public void verifyDeterministic() throws NonDeterministicException {
  public void verifyDeterministic() throws NonDeterministicException {
    if (!DETERMINISTIC) {
    if (!DETERMINISTIC) {
      throw new NonDeterministicException(this, "This class should be deterministic!");
      throw new NonDeterministicException(this, "This class should be deterministic.");
    }
    }
  }
  }
}
}
Original line number Original line Diff line number Diff line
package serialization;
package serialization;


import java.io.ByteArrayInputStream;
import io.confluent.kafka.streams.serdes.avro.SpecificAvroDeserializer;
import java.io.IOException;
import org.apache.beam.sdk.coders.AvroCoder;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Deserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import titan.ccp.model.records.AggregatedActivePowerRecord;
import titan.ccp.model.records.AggregatedActivePowerRecord;


/**
/**
 * Wrapper Class that encapsulates a IMonitoringRecordSerde.serializer in a Deserializer
 * {@link Deserializer} for an {@link AggregatedActivePowerRecord}.
 */
 */
public class AggregatedActivePowerRecordDeserializer
public class AggregatedActivePowerRecordDeserializer
    implements Deserializer<AggregatedActivePowerRecord> {
    extends SpecificAvroDeserializer<AggregatedActivePowerRecord> {

  private static final Logger LOGGER =
      LoggerFactory.getLogger(AggregatedActivePowerRecordDeserializer.class);

  private final transient AvroCoder<AggregatedActivePowerRecord> avroEnCoder =
      AvroCoder.of(AggregatedActivePowerRecord.class);

  @Override
  public AggregatedActivePowerRecord deserialize(final String topic, final byte[] data) {
    AggregatedActivePowerRecord value = null;
    try {
      value = this.avroEnCoder.decode(new ByteArrayInputStream(data));
    } catch (final IOException e) {
      LOGGER.error("Could not deserialize AggregatedActivePowerRecord", e);
    }
    return value;
  }

}
}
Original line number Original line Diff line number Diff line
package serialization;
package serialization;


import java.io.ByteArrayOutputStream;
import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerializer;
import java.io.IOException;
import org.apache.beam.sdk.coders.AvroCoder;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.common.serialization.Serializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import titan.ccp.model.records.AggregatedActivePowerRecord;
import titan.ccp.model.records.AggregatedActivePowerRecord;


/**
/**
 * Wrapper Class that encapsulates a IMonitoringRecordSerde.serializer in a Serializer
 * {@link Serializer} for an {@link AggregatedActivePowerRecord}.
 */
 */
public class AggregatedActivePowerRecordSerializer
public class AggregatedActivePowerRecordSerializer
    implements Serializer<AggregatedActivePowerRecord> {
    extends SpecificAvroSerializer<AggregatedActivePowerRecord> {

  private static final Logger LOGGER =
      LoggerFactory.getLogger(AggregatedActivePowerRecordSerializer.class);

  private final transient AvroCoder<AggregatedActivePowerRecord> avroEnCoder =
      AvroCoder.of(AggregatedActivePowerRecord.class);

  @Override
  public byte[] serialize(final String topic, final AggregatedActivePowerRecord data) {
    final ByteArrayOutputStream out = new ByteArrayOutputStream();
    try {
      this.avroEnCoder.encode(data, out);
    } catch (final IOException e) {
      LOGGER.error("Could not serialize AggregatedActivePowerRecord", e);
    }
    final byte[] result = out.toByteArray();
    try {
      out.close();
    } catch (final IOException e) {
      LOGGER.error(
          "Could not close output stream after serialization of AggregatedActivePowerRecord", e);
    }
    return result;
  }

  @Override
  public void close() {
    Serializer.super.close();
  }
}
}
Original line number Original line Diff line number Diff line
@@ -61,7 +61,7 @@ cleanup_settings_version=2
eclipse.preferences.version=1
eclipse.preferences.version=1
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
formatter_profile=_CAU-SE-Style
formatter_profile=_CAU-SE-Style
formatter_settings_version=15
formatter_settings_version=21
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.ondemandthreshold=99
org.eclipse.jdt.ui.ondemandthreshold=99
Original line number Original line Diff line number Diff line
@@ -61,7 +61,7 @@ cleanup_settings_version=2
eclipse.preferences.version=1
eclipse.preferences.version=1
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
formatter_profile=_CAU-SE-Style
formatter_profile=_CAU-SE-Style
formatter_settings_version=15
formatter_settings_version=21
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.ondemandthreshold=99
org.eclipse.jdt.ui.ondemandthreshold=99
Original line number Original line Diff line number Diff line
@@ -61,7 +61,7 @@ cleanup_settings_version=2
eclipse.preferences.version=1
eclipse.preferences.version=1
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
formatter_profile=_CAU-SE-Style
formatter_profile=_CAU-SE-Style
formatter_settings_version=15
formatter_settings_version=21
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.ondemandthreshold=99
org.eclipse.jdt.ui.ondemandthreshold=99
Original line number Original line Diff line number Diff line
# Theodolite project
# Theodolite


This project uses Quarkus, the Supersonic Subatomic Java Framework.
This project uses Quarkus, the Supersonic Subatomic Java Framework.


If you want to learn more about Quarkus, please visit its website: <https://quarkus.io/> .
If you want to learn more about Quarkus, please visit its website: https://quarkus.io/.


## Running the application in dev mode
## Running the application in dev mode


You can run your application in dev mode using:
You can run your application in dev mode that enables live coding using:


```sh
```sh
./gradlew quarkusDev
./gradlew quarkusDev
@@ -23,8 +23,10 @@ The application can be packaged using:
./gradlew build
./gradlew build
```
```


It produces the `theodolite-0.7.0-SNAPSHOT-runner.jar` file in the `/build` directory. Be aware that it’s not
It produces the `quarkus-run.jar` file in the `build/quarkus-app/` directory.
an _über-jar_ as the dependencies are copied into the `build/lib` directory.
Be aware that it’s not an _über-jar_ as the dependencies are copied into the `build/quarkus-app/lib/` directory.

The application is now runnable using `java -jar build/quarkus-app/quarkus-run.jar`.


If you want to build an _über-jar_, execute the following command:
If you want to build an _über-jar_, execute the following command:


@@ -32,12 +34,10 @@ If you want to build an _über-jar_, execute the following command:
./gradlew build -Dquarkus.package.type=uber-jar
./gradlew build -Dquarkus.package.type=uber-jar
```
```


The application is now runnable using `java -jar build/theodolite-0.7.0-SNAPSHOT-runner.jar`.
The application, packaged as an _über-jar_, is now runnable using `java -jar build/*-runner.jar`.


## Creating a native executable
## Creating a native executable


It is recommended to use the native GraalVM images to create executable jars from Theodolite. For more information please visit the [Native Image guide](https://www.graalvm.org/reference-manual/native-image/).

You can create a native executable using:
You can create a native executable using:


```sh
```sh
@@ -55,15 +55,21 @@ You can then execute your native executable with:


If you want to learn more about building native executables, please consult https://quarkus.io/guides/gradle-tooling.
If you want to learn more about building native executables, please consult https://quarkus.io/guides/gradle-tooling.


## Build docker images
## Building container images


For the jvm version use:
For the JVM version use:


```sh
```sh
./gradlew build
./gradlew build
docker build -f src/main/docker/Dockerfile.jvm -t theodolite-jvm .
docker build -f src/main/docker/Dockerfile.jvm -t theodolite-jvm .
```
```


Alternatively, you can also use Kaniko to build the image:

```sh
docker run -it --rm --name kaniko -v "`pwd`":/theodolite --entrypoint "" gcr.io/kaniko-project/executor:debug /kaniko/executor --context /theodolite --dockerfile src/main/docker/Dockerfile.jvm --no-push
```

For the native image version use:
For the native image version use:


```sh
```sh
@@ -71,7 +77,7 @@ For the native image version use:
docker build -f src/main/docker/Dockerfile.native -t theodolite-native .
docker build -f src/main/docker/Dockerfile.native -t theodolite-native .
```
```


## Execute docker images
## Run a container


Remember to set the environment variables first.
Remember to set the environment variables first.


Original line number Original line Diff line number Diff line
plugins {
plugins {
    id 'org.jetbrains.kotlin.jvm' version "1.5.31"
    id 'org.jetbrains.kotlin.jvm' version "1.6.10"
    id "org.jetbrains.kotlin.plugin.allopen" version "1.5.31"
    id "org.jetbrains.kotlin.plugin.allopen" version "1.6.10"
    id 'io.quarkus'
    id 'io.quarkus'
    id "io.gitlab.arturbosch.detekt" version "1.15.0"
    id "io.gitlab.arturbosch.detekt" version "1.15.0"
    id "org.jlleitschuh.gradle.ktlint" version "10.0.0"
    id "org.jlleitschuh.gradle.ktlint" version "10.0.0"
}
}


repositories {
repositories {
    mavenLocal()
    mavenCentral()
    mavenCentral()
    mavenLocal()
    jcenter()
    jcenter()
}
}


@@ -26,19 +26,13 @@ dependencies {
    implementation 'com.google.code.gson:gson:2.8.9'
    implementation 'com.google.code.gson:gson:2.8.9'
    implementation 'org.slf4j:slf4j-simple:1.7.32'
    implementation 'org.slf4j:slf4j-simple:1.7.32'
    implementation 'io.github.microutils:kotlin-logging:2.1.16'
    implementation 'io.github.microutils:kotlin-logging:2.1.16'
    //implementation('io.fabric8:kubernetes-client:5.4.1'){force = true}
    //implementation('io.fabric8:kubernetes-model-core:5.4.1'){force = true}
    //implementation('io.fabric8:kubernetes-model-common:5.4.1'){force = true}
    implementation 'org.apache.kafka:kafka-clients:2.8.0'
    implementation 'org.apache.kafka:kafka-clients:2.8.0'
    implementation 'khttp:khttp:1.0.0'
    implementation 'khttp:khttp:1.0.0'


    // compile 'junit:junit:4.12'

    testImplementation 'io.quarkus:quarkus-junit5'
    testImplementation 'io.quarkus:quarkus-junit5'
    testImplementation 'io.quarkus:quarkus-test-kubernetes-client'
    testImplementation 'io.quarkus:quarkus-test-kubernetes-client'
    testImplementation 'io.rest-assured:rest-assured'
    testImplementation 'io.rest-assured:rest-assured'
    testImplementation 'org.junit-pioneer:junit-pioneer:1.5.0'
    testImplementation 'org.junit-pioneer:junit-pioneer:1.5.0'
    //testImplementation 'io.fabric8:kubernetes-server-mock:5.10.1'
    testImplementation "org.mockito.kotlin:mockito-kotlin:4.0.0"
    testImplementation "org.mockito.kotlin:mockito-kotlin:4.0.0"
}
}


theodolite/build_jvm.sh

deleted100755 → 0
+0 −6
Original line number Original line Diff line number Diff line

./gradlew build -x test

docker build -f src/main/docker/Dockerfile.jvm -t quarkus/theodolite-jvm .

docker run -i --rm -p 8080:8080 quarkus/theodolite-jvm

theodolite/build_native.sh

deleted100755 → 0
+0 −6
Original line number Original line Diff line number Diff line

./gradlew build -Dquarkus.package.type=native -x test

docker build -f src/main/docker/Dockerfile.native -t quarkus/theodolite .

docker run -i --rm -p 8080:8080 quarkus/theodolite
Original line number Original line Diff line number Diff line
@@ -20,7 +20,7 @@ spec:
        properties:
        properties:
          spec:
          spec:
            type: object
            type: object
            required: ["sut", "loadGenerator", "resourceTypes", "loadTypes", "kafkaConfig"]
            required: ["sut", "loadGenerator", "resourceTypes", "loadTypes"]
            properties:
            properties:
              name:
              name:
                description: This field exists only for technical reasons and should not be set by the user. The value of the field will be overwritten.
                description: This field exists only for technical reasons and should not be set by the user. The value of the field will be overwritten.
Original line number Original line Diff line number Diff line
#Gradle properties
#Gradle properties
quarkusPluginVersion=2.5.2.Final
quarkusPlatformArtifactId=quarkus-bom
quarkusPluginId=io.quarkus
quarkusPluginId=io.quarkus
quarkusPluginVersion=2.6.3.Final
quarkusPlatformGroupId=io.quarkus.platform
quarkusPlatformGroupId=io.quarkus.platform
quarkusPlatformVersion=2.5.2.Final
quarkusPlatformArtifactId=quarkus-bom
quarkusPlatformVersion=2.6.3.Final


#org.gradle.logging.level=INFO
#org.gradle.logging.level=INFO
 No newline at end of file
Original line number Original line Diff line number Diff line
@@ -18,38 +18,24 @@
#
#
# Then run the container using :
# Then run the container using :
#
#
# docker run -i --rm -p 8080:8080 -p 5005:5005 -e JAVA_ENABLE_DEBUG="true" quarkus/theodolite-jvm
# docker run -i --rm -p 8080:8080 quarkus/theodolite-jvm
#
#
###
###
FROM registry.access.redhat.com/ubi8/ubi-minimal:8.4 
FROM registry.access.redhat.com/ubi8/openjdk-11-runtime:1.10


ARG JAVA_PACKAGE=java-11-openjdk-headless
ARG RUN_JAVA_VERSION=1.3.8
ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en'
ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en'
# Install java and the run-java script
# Also set up permissions for user `1001`
RUN microdnf install curl ca-certificates ${JAVA_PACKAGE} \
    && microdnf update \
    && microdnf clean all \
    && mkdir /deployments \
    && chown 1001 /deployments \
    && chmod "g+rwX" /deployments \
    && chown 1001:root /deployments \
    && curl https://repo1.maven.org/maven2/io/fabric8/run-java-sh/${RUN_JAVA_VERSION}/run-java-sh-${RUN_JAVA_VERSION}-sh.sh -o /deployments/run-java.sh \
    && chown 1001 /deployments/run-java.sh \
    && chmod 540 /deployments/run-java.sh \
    && echo "securerandom.source=file:/dev/urandom" >> /etc/alternatives/jre/conf/security/java.security


# Configure the JAVA_OPTIONS, you can add -XshowSettings:vm to also display the heap size.
# Configure the JAVA_OPTIONS, you can add -XshowSettings:vm to also display the heap size.
ENV JAVA_OPTIONS="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager"
ENV JAVA_OPTIONS="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager"

# We make four distinct layers so if there are application changes the library layers can be re-used
# We make four distinct layers so if there are application changes the library layers can be re-used
COPY --chown=1001 build/quarkus-app/lib/ /deployments/lib/
COPY --chown=185 build/quarkus-app/lib/ /deployments/lib/
COPY --chown=1001 build/quarkus-app/*.jar /deployments/
COPY --chown=185 build/quarkus-app/*.jar /deployments/
COPY --chown=1001 build/quarkus-app/app/ /deployments/app/
COPY --chown=185 build/quarkus-app/app/ /deployments/app/
COPY --chown=1001 build/quarkus-app/quarkus/ /deployments/quarkus/
COPY --chown=185 build/quarkus-app/quarkus/ /deployments/quarkus/


EXPOSE 8080
EXPOSE 8080
USER 1001
USER 185


ENTRYPOINT [ "/deployments/run-java.sh" ]
ENTRYPOINT [ "java", "-jar", "/deployments/quarkus-run.jar" ]
Original line number Original line Diff line number Diff line
@@ -18,34 +18,20 @@
#
#
# Then run the container using :
# Then run the container using :
#
#
# docker run -i --rm -p 8080:8080 -p 5005:5005 -e JAVA_ENABLE_DEBUG="true" quarkus/theodolite-legacy-jar
# docker run -i --rm -p 8080:8080 quarkus/theodolite-legacy-jar
#
#
###
###
FROM registry.access.redhat.com/ubi8/ubi-minimal:8.4 
FROM registry.access.redhat.com/ubi8/openjdk-11-runtime:1.10


ARG JAVA_PACKAGE=java-11-openjdk-headless
ARG RUN_JAVA_VERSION=1.3.8
ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en'
ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en'
# Install java and the run-java script
# Also set up permissions for user `1001`
RUN microdnf install curl ca-certificates ${JAVA_PACKAGE} \
    && microdnf update \
    && microdnf clean all \
    && mkdir /deployments \
    && chown 1001 /deployments \
    && chmod "g+rwX" /deployments \
    && chown 1001:root /deployments \
    && curl https://repo1.maven.org/maven2/io/fabric8/run-java-sh/${RUN_JAVA_VERSION}/run-java-sh-${RUN_JAVA_VERSION}-sh.sh -o /deployments/run-java.sh \
    && chown 1001 /deployments/run-java.sh \
    && chmod 540 /deployments/run-java.sh \
    && echo "securerandom.source=file:/dev/urandom" >> /etc/alternatives/jre/conf/security/java.security


# Configure the JAVA_OPTIONS, you can add -XshowSettings:vm to also display the heap size.
# Configure the JAVA_OPTIONS, you can add -XshowSettings:vm to also display the heap size.
ENV JAVA_OPTIONS="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager"
ENV JAVA_OPTIONS="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager"

COPY build/lib/* /deployments/lib/
COPY build/lib/* /deployments/lib/
COPY build/*-runner.jar /deployments/app.jar
COPY build/*-runner.jar /deployments/quarkus-run.jar


EXPOSE 8080
EXPOSE 8080
USER 1001
USER 185


ENTRYPOINT [ "/deployments/run-java.sh" ]
ENTRYPOINT [ "java", "-jar", "/deployments/quarkus-run.jar" ]
Original line number Original line Diff line number Diff line
@@ -14,12 +14,12 @@
# docker run -i --rm -p 8080:8080 quarkus/theodolite
# docker run -i --rm -p 8080:8080 quarkus/theodolite
#
#
###
###
FROM registry.access.redhat.com/ubi8/ubi-minimal:8.4
FROM quay.io/quarkus/quarkus-micro-image:1.0
WORKDIR /deployments/
WORKDIR /work/
RUN chown 1001 /deployments \
RUN chown 1001 /work \
    && chmod "g+rwX" /deployments \
    && chmod "g+rwX" /work \
    && chown 1001:root /deployments
    && chown 1001:root /work
COPY --chown=1001:root build/*-runner /deployments/application
COPY --chown=1001:root build/*-runner /work/application


EXPOSE 8080
EXPOSE 8080
USER 1001
USER 1001
Original line number Original line Diff line number Diff line
@@ -15,8 +15,7 @@
#
#
###
###
FROM quay.io/quarkus/quarkus-distroless-image:1.0
FROM quay.io/quarkus/quarkus-distroless-image:1.0
WORKDIR /deployments/
COPY build/*-runner /application
COPY build/*-runner /deployments/application


EXPOSE 8080
EXPOSE 8080
USER nonroot
USER nonroot
Original line number Original line Diff line number Diff line
@@ -26,30 +26,35 @@ class ConfigMapResourceSet: ResourceSet, KubernetesResource {
                .withName(name)
                .withName(name)
                .get() ?: throw DeploymentFailedException("Cannot find ConfigMap with name '$name'."))
                .get() ?: throw DeploymentFailedException("Cannot find ConfigMap with name '$name'."))
                .data
                .data
                .filter { it.key.endsWith(".yaml") }
                .filter { it.key.endsWith(".yaml") || it.key.endsWith(".yml")}
        } catch (e: KubernetesClientException) {
        } catch (e: KubernetesClientException) {
            throw DeploymentFailedException("Cannot find or read ConfigMap with name '$name'.", e)
            throw DeploymentFailedException("Cannot find or read ConfigMap with name '$name'.", e)
        }
        }


        if (::files.isInitialized) {
        if (::files.isInitialized) {
            resources = resources.filter { files.contains(it.key) }
            val filteredResources = resources.filter { files.contains(it.key) }

            if (filteredResources.size != files.size) {
            if (resources.size != files.size) {
                throw DeploymentFailedException("Could not find all specified Kubernetes manifests files")
                throw DeploymentFailedException("Could not find all specified Kubernetes manifests files")
            }
            }
            resources = filteredResources
        }
        }


        return try {
        return try {
            resources
            resources
                .map { Pair(
                .map {
                    Pair(
                        getKind(resource = it.value),
                        getKind(resource = it.value),
                    it) }
                        it
                    )
                }
                .map {
                .map {
                    Pair(
                    Pair(
                        it.second.key,
                        it.second.key,
                        loader.loadK8sResource(it.first, it.second.value)) }
                        loader.loadK8sResource(it.first, it.second.value)
                    )
                }
        } catch (e: IllegalArgumentException) {
        } catch (e: IllegalArgumentException) {
            throw DeploymentFailedException("Can not create resource set from specified configmap", e)
            throw DeploymentFailedException("Cannot create resource set from specified ConfigMap", e)
        }
        }


    }
    }
@@ -58,10 +63,7 @@ class ConfigMapResourceSet: ResourceSet, KubernetesResource {
        val parser = YamlParserFromString()
        val parser = YamlParserFromString()
        val resourceAsMap = parser.parse(resource, HashMap<String, String>()::class.java)
        val resourceAsMap = parser.parse(resource, HashMap<String, String>()::class.java)


        return try {
        return resourceAsMap?.get("kind")
            resourceAsMap?.get("kind") !!
            ?: throw DeploymentFailedException("Could not find field kind of Kubernetes resource: ${resourceAsMap?.get("name")}")
        } catch (e: NullPointerException) {
            throw DeploymentFailedException( "Could not find field kind of Kubernetes resource: ${resourceAsMap?.get("name")}", e)
        }
    }
    }
}
}
 No newline at end of file
Original line number Original line Diff line number Diff line
@@ -28,7 +28,7 @@ class FileSystemResourceSet: ResourceSet, KubernetesResource {
        return try {
        return try {
            File(path)
            File(path)
                .list() !!
                .list() !!
                .filter { it.endsWith(".yaml") } // consider only yaml files, e.g. ignore readme files
                .filter { it.endsWith(".yaml") || it.endsWith(".yml") }
                .map {
                .map {
                    loadSingleResource(resourceURL = it, client = client)
                    loadSingleResource(resourceURL = it, client = client)
                }
                }
Original line number Original line Diff line number Diff line
@@ -39,7 +39,7 @@ class KubernetesBenchmark : KubernetesResource, Benchmark {
    lateinit var name: String
    lateinit var name: String
    lateinit var resourceTypes: List<TypeName>
    lateinit var resourceTypes: List<TypeName>
    lateinit var loadTypes: List<TypeName>
    lateinit var loadTypes: List<TypeName>
    lateinit var kafkaConfig: KafkaConfig
    var kafkaConfig: KafkaConfig? = null
    lateinit var infrastructure: Resources
    lateinit var infrastructure: Resources
    lateinit var sut: Resources
    lateinit var sut: Resources
    lateinit var loadGenerator: Resources
    lateinit var loadGenerator: Resources
@@ -110,6 +110,9 @@ class KubernetesBenchmark : KubernetesResource, Benchmark {
                patcherFactory.createPatcher(it.patcher, appResources + loadGenResources).patch(override.value)
                patcherFactory.createPatcher(it.patcher, appResources + loadGenResources).patch(override.value)
            }
            }
        }
        }

        val kafkaConfig = this.kafkaConfig

        return KubernetesBenchmarkDeployment(
        return KubernetesBenchmarkDeployment(
            sutBeforeActions = sut.beforeActions,
            sutBeforeActions = sut.beforeActions,
            sutAfterActions = sut.afterActions,
            sutAfterActions = sut.afterActions,
@@ -119,8 +122,8 @@ class KubernetesBenchmark : KubernetesResource, Benchmark {
            loadGenResources = loadGenResources.map { it.second },
            loadGenResources = loadGenResources.map { it.second },
            loadGenerationDelay = loadGenerationDelay,
            loadGenerationDelay = loadGenerationDelay,
            afterTeardownDelay = afterTeardownDelay,
            afterTeardownDelay = afterTeardownDelay,
            kafkaConfig = hashMapOf("bootstrap.servers" to kafkaConfig.bootstrapServer),
            kafkaConfig = if (kafkaConfig != null) hashMapOf("bootstrap.servers" to kafkaConfig.bootstrapServer) else mapOf(),
            topics = kafkaConfig.topics,
            topics = kafkaConfig?.topics ?: listOf(),
            client = this.client
            client = this.client
        )
        )
    }
    }
Original line number Original line Diff line number Diff line
@@ -31,7 +31,7 @@ class KubernetesBenchmarkDeployment(
    val loadGenResources: List<KubernetesResource>,
    val loadGenResources: List<KubernetesResource>,
    private val loadGenerationDelay: Long,
    private val loadGenerationDelay: Long,
    private val afterTeardownDelay: Long,
    private val afterTeardownDelay: Long,
    private val kafkaConfig: HashMap<String, Any>,
    private val kafkaConfig: Map<String, Any>,
    private val topics: List<KafkaConfig.TopicWrapper>,
    private val topics: List<KafkaConfig.TopicWrapper>,
    private val client: NamespacedKubernetesClient
    private val client: NamespacedKubernetesClient
) : BenchmarkDeployment {
) : BenchmarkDeployment {
@@ -46,9 +46,12 @@ class KubernetesBenchmarkDeployment(
     *  - Deploy the needed resources.
     *  - Deploy the needed resources.
     */
     */
    override fun setup() {
    override fun setup() {
        val kafkaTopics = this.topics.filter { !it.removeOnly }
        if (this.topics.isNotEmpty()) {
            val kafkaTopics = this.topics
                .filter { !it.removeOnly }
                .map { NewTopic(it.name, it.numPartitions, it.replicationFactor) }
                .map { NewTopic(it.name, it.numPartitions, it.replicationFactor) }
            kafkaController.createTopics(kafkaTopics)
            kafkaController.createTopics(kafkaTopics)
        }
        sutBeforeActions.forEach { it.exec(client = client) }
        sutBeforeActions.forEach { it.exec(client = client) }
        appResources.forEach { kubernetesManager.deploy(it) }
        appResources.forEach { kubernetesManager.deploy(it) }
        logger.info { "Wait ${this.loadGenerationDelay} seconds before starting the load generator." }
        logger.info { "Wait ${this.loadGenerationDelay} seconds before starting the load generator." }
@@ -69,7 +72,9 @@ class KubernetesBenchmarkDeployment(
        loadGenAfterActions.forEach { it.exec(client = client) }
        loadGenAfterActions.forEach { it.exec(client = client) }
        appResources.forEach { kubernetesManager.remove(it) }
        appResources.forEach { kubernetesManager.remove(it) }
        sutAfterActions.forEach { it.exec(client = client) }
        sutAfterActions.forEach { it.exec(client = client) }
        if (this.topics.isNotEmpty()) {
            kafkaController.removeTopics(this.topics.map { topic -> topic.name })
            kafkaController.removeTopics(this.topics.map { topic -> topic.name })
        }
        ResourceByLabelHandler(client).removePods(
        ResourceByLabelHandler(client).removePods(
            labelName = LAG_EXPORTER_POD_LABEL_NAME,
            labelName = LAG_EXPORTER_POD_LABEL_NAME,
            labelValue = LAG_EXPORTER_POD_LABEL_VALUE
            labelValue = LAG_EXPORTER_POD_LABEL_VALUE
Original line number Original line Diff line number Diff line
@@ -37,9 +37,9 @@ class TheodoliteController(
     */
     */
    fun run() {
    fun run() {
        sleep(5000) // wait until all states are correctly set
        sleep(5000) // wait until all states are correctly set
        benchmarkStateChecker.start(true)
        while (true) {
        while (true) {
            reconcile()
            reconcile()
            benchmarkStateChecker.start(true)
            sleep(2000)
            sleep(2000)
        }
        }
    }
    }
@@ -98,7 +98,7 @@ class TheodoliteController(
                    }
                    }
                else -> {
                else -> {
                    executionStateHandler.setExecutionState(execution.name, ExecutionState.FAILURE)
                    executionStateHandler.setExecutionState(execution.name, ExecutionState.FAILURE)
                    logger.warn { "Unexpected execution state, set state to ${ExecutionState.FAILURE.value}" }
                    logger.warn { "Unexpected execution state, set state to ${ExecutionState.FAILURE.value}." }
                }
                }
            }
            }
        } catch (e: Exception) {
        } catch (e: Exception) {