diff --git a/.gitignore b/.gitignore index 71305e60a1056e58f281da4c2ab397539b63ba52..e898f39a2d997d01680ff63d1fe375b4b83b00b7 100644 --- a/.gitignore +++ b/.gitignore @@ -33,3 +33,6 @@ tmp/ # Python cache files *.pyc + +# Helm +Chart.lock diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 5743d9f732630259ad5401b53e39db64536d35d2..9b10ffeabbc08a1f25a88d2b351f3e8dd6309443 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -15,6 +15,17 @@ stages: variables: DOCKER_TLS_CERTDIR: "/certs" +# Theodolite Helm Chart + +lint-helm: + stage: check + image: + name: alpine/helm:3.5.2 + entrypoint: [""] + tags: + - exec-docker + script: helm lint execution/helm/ + # Theodolite Benchmarks @@ -28,7 +39,7 @@ stages: paths: - .gradle before_script: - - cd benchmarks + - cd theodolite-benchmarks - export GRADLE_USER_HOME=`pwd`/.gradle build-benchmarks: @@ -37,8 +48,9 @@ build-benchmarks: script: ./gradlew --build-cache assemble artifacts: paths: - - "benchmarks/build/libs/*.jar" - - "benchmarks/*/build/distributions/*.tar" + - "theodolite-benchmarks/build/libs/*.jar" + - "theodolite-benchmarks/*/build/libs/*.jar" + - "theodolite-benchmarks/*/build/distributions/*.tar" expire_in: 1 day test-benchmarks: @@ -50,7 +62,7 @@ test-benchmarks: artifacts: reports: junit: - - "benchmarks/**/build/test-results/test/TEST-*.xml" + - "theodolite-benchmarks/**/build/test-results/test/TEST-*.xml" checkstyle-benchmarks: stage: check @@ -61,7 +73,7 @@ checkstyle-benchmarks: script: ./gradlew checkstyle --continue artifacts: paths: - - "benchmarks/*/build/reports/checkstyle/main.html" + - "theodolite-benchmarks/*/build/reports/checkstyle/main.html" when: on_failure expire_in: 1 day @@ -74,7 +86,7 @@ pmd-benchmarks: script: ./gradlew pmd --continue artifacts: paths: - - "benchmarks/*/build/reports/pmd/*.html" + - "theodolite-benchmarks/*/build/reports/pmd/*.html" when: on_failure expire_in: 1 day @@ -87,7 +99,7 @@ spotbugs-benchmarks: script: ./gradlew spotbugs --continue artifacts: paths: - - "benchmarks/*/build/reports/spotbugs/*.html" + - "theodolite-benchmarks/*/build/reports/spotbugs/*.html" when: on_failure expire_in: 1 day @@ -114,69 +126,94 @@ spotbugs-benchmarks: - if: "$CR_HOST && $CR_ORG && $CR_USER && $CR_PW && $IMAGE_NAME && $JAVA_PROJECT_NAME && $CI_COMMIT_TAG" when: always - changes: - - benchmarks/* - - benchmarks/$JAVA_PROJECT_NAME/**/* - - benchmarks/application-kafkastreams-commons/**/* - - benchmarks/workload-generator-commons/**/* + - theodolite-benchmarks/* + - theodolite-benchmarks/$JAVA_PROJECT_NAME/**/* + - theodolite-benchmarks/kstreams-commons/**/* + - theodolite-benchmarks/flink-commons/**/* + - theodolite-benchmarks/load-generator-commons/**/* if: "$CR_HOST && $CR_ORG && $CR_USER && $CR_PW && $IMAGE_NAME && $JAVA_PROJECT_NAME" when: always - if: "$CR_HOST && $CR_ORG && $CR_USER && $CR_PW && $IMAGE_NAME && $JAVA_PROJECT_NAME" when: manual allow_failure: true -deploy-uc1-kstreams-app: +deploy-uc1-kstreams: extends: .deploy-benchmarks variables: IMAGE_NAME: "theodolite-uc1-kstreams-app" - JAVA_PROJECT_NAME: "uc1-application" + JAVA_PROJECT_NAME: "uc1-kstreams" -deploy-uc2-kstreams-app: +deploy-uc2-kstreams: extends: .deploy-benchmarks variables: IMAGE_NAME: "theodolite-uc2-kstreams-app" - JAVA_PROJECT_NAME: "uc2-application" + JAVA_PROJECT_NAME: "uc2-kstreams" -deploy-uc3-kstreams-app: +deploy-uc3-kstreams: extends: .deploy-benchmarks variables: IMAGE_NAME: "theodolite-uc3-kstreams-app" - JAVA_PROJECT_NAME: "uc3-application" + JAVA_PROJECT_NAME: "uc3-kstreams" -deploy-uc4-kstreams-app: +deploy-uc4-kstreams: extends: .deploy-benchmarks variables: IMAGE_NAME: "theodolite-uc4-kstreams-app" - JAVA_PROJECT_NAME: "uc4-application" + JAVA_PROJECT_NAME: "uc4-kstreams" + +deploy-uc1-flink: + extends: .deploy-benchmarks + variables: + IMAGE_NAME: "theodolite-uc1-flink" + JAVA_PROJECT_NAME: "uc1-flink" + +deploy-uc2-flink: + extends: .deploy-benchmarks + variables: + IMAGE_NAME: "theodolite-uc2-flink" + JAVA_PROJECT_NAME: "uc2-flink" + +deploy-uc3-flink: + extends: .deploy-benchmarks + variables: + IMAGE_NAME: "theodolite-uc3-flink" + JAVA_PROJECT_NAME: "uc3-flink" + +deploy-uc4-flink: + extends: .deploy-benchmarks + variables: + IMAGE_NAME: "theodolite-uc4-flink" + JAVA_PROJECT_NAME: "uc4-flink" deploy-uc1-load-generator: extends: .deploy-benchmarks variables: IMAGE_NAME: "theodolite-uc1-workload-generator" - JAVA_PROJECT_NAME: "uc1-workload-generator" + JAVA_PROJECT_NAME: "uc1-load-generator" deploy-uc2-load-generator: extends: .deploy-benchmarks variables: IMAGE_NAME: "theodolite-uc2-workload-generator" - JAVA_PROJECT_NAME: "uc2-workload-generator" + JAVA_PROJECT_NAME: "uc2-load-generator" deploy-uc3-load-generator: extends: .deploy-benchmarks variables: IMAGE_NAME: "theodolite-uc3-workload-generator" - JAVA_PROJECT_NAME: "uc3-workload-generator" + JAVA_PROJECT_NAME: "uc3-load-generator" deploy-uc4-load-generator: extends: .deploy-benchmarks variables: IMAGE_NAME: "theodolite-uc4-workload-generator" - JAVA_PROJECT_NAME: "uc4-workload-generator" + JAVA_PROJECT_NAME: "uc4-load-generator" # Theodolite Framework .theodolite: - image: openjdk:11-jdk + image: ghcr.io/graalvm/graalvm-ce:java11-21.0.0.2 tags: - exec-docker variables: @@ -189,23 +226,33 @@ deploy-uc4-load-generator: - cd theodolite-quarkus - export GRADLE_USER_HOME=`pwd`/.gradle -build-theodolite: +build-theodolite-jvm: stage: build extends: .theodolite - # script: ./gradlew --build-cache assemble -Dquarkus.package.type=native script: ./gradlew --build-cache assemble artifacts: paths: - "theodolite-quarkus/build/lib/*" - "theodolite-quarkus/build/*-runner.jar" - # - "theodolite-quarkus/build/*-runner" # For native image + expire_in: 1 day + +build-theodolite-native: + stage: build + extends: .theodolite + script: + - gu install native-image # TODO move to image + - ./gradlew --build-cache assemble -Dquarkus.package.type=native + artifacts: + paths: + - "theodolite-quarkus/build/*-runner" expire_in: 1 day test-theodolite: stage: test extends: .theodolite needs: - - build-theodolite + - build-theodolite-jvm + - build-theodolite-native script: ./gradlew test --stacktrace # Disabled for now @@ -213,7 +260,7 @@ test-theodolite: stage: check extends: .theodolite needs: - - build-theodolite + - build-theodolite-jvm - test-theodolite script: ./gradlew ktlintCheck --continue @@ -222,7 +269,7 @@ test-theodolite: stage: check extends: .theodolite needs: - - build-theodolite + - build-theodolite-jvm - test-theodolite script: ./gradlew detekt --continue @@ -232,12 +279,12 @@ deploy-theodolite: - .theodolite - .dind needs: - - build-theodolite + - build-theodolite-native - test-theodolite script: - DOCKER_TAG_NAME=$(echo $CI_COMMIT_REF_SLUG- | sed 's/^master-$//') - #- docker build -f src/main/docker/Dockerfile.native -t theodolite . - - docker build -f src/main/docker/Dockerfile.jvm -t theodolite . + - docker build -f src/main/docker/Dockerfile.native -t theodolite . + #- docker build -f src/main/docker/Dockerfile.jvm -t theodolite . - "[ ! $CI_COMMIT_TAG ] && docker tag theodolite $CR_HOST/$CR_ORG/theodolite:${DOCKER_TAG_NAME}latest" - "[ ! $CI_COMMIT_TAG ] && docker tag theodolite $CR_HOST/$CR_ORG/theodolite:$DOCKER_TAG_NAME$CI_COMMIT_SHORT_SHA" - "[ $CI_COMMIT_TAG ] && docker tag theodolite $CR_HOST/$CR_ORG/theodolite:$CI_COMMIT_TAG" @@ -254,3 +301,30 @@ deploy-theodolite: - if: "$CR_HOST && $CR_ORG && $CR_USER && $CR_PW" when: manual allow_failure: true + + +# Theodolite Random Scheduler + +deploy-random-scheduler: + stage: deploy + extends: + - .dind + script: + - DOCKER_TAG_NAME=$(echo $CI_COMMIT_REF_SLUG- | sed 's/^master-$//') + - docker build --pull -t theodolite-random-scheduler execution/infrastructure/random-scheduler + - "[ ! $CI_COMMIT_TAG ] && docker tag theodolite-random-scheduler $CR_HOST/$CR_ORG/theodolite-random-scheduler:${DOCKER_TAG_NAME}latest" + - "[ $CI_COMMIT_TAG ] && docker tag theodolite-random-scheduler $CR_HOST/$CR_ORG/theodolite-random-scheduler:$CI_COMMIT_TAG" + - echo $CR_PW | docker login $CR_HOST -u $CR_USER --password-stdin + - docker push $CR_HOST/$CR_ORG/theodolite-random-scheduler + - docker logout + rules: + - if: "$CR_HOST && $CR_ORG && $CR_USER && $CR_PW && $CI_COMMIT_TAG" + when: always + - changes: + - execution/infrastructure/random-scheduler/**/* + if: "$CR_HOST && $CR_ORG && $CR_USER && $CR_PW" + when: always + - if: "$CR_HOST && $CR_ORG && $CR_USER && $CR_PW" + when: manual + allow_failure: true + \ No newline at end of file diff --git a/CITATION.cff b/CITATION.cff index ae409536b477586aaabde687b0bfbaef1ae422d3..b6afbc8d75810b530e3a15538301c03323e7c6e2 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -10,7 +10,7 @@ authors: given-names: Wilhelm orcid: "https://orcid.org/0000-0001-6625-4335" title: Theodolite -version: "0.3.0" +version: "0.4.0" repository-code: "https://github.com/cau-se/theodolite" license: "Apache-2.0" doi: "10.1016/j.bdr.2021.100209" diff --git a/README.md b/README.md index 9dcceb9e65a8a50d96e579a1d14c9861eb22cc82..f2673f4b9ed0c46987963f8b455e19def802db79 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ Theodolite is a framework for benchmarking the horizontal and vertical scalabili ## Theodolite Benchmarks -Theodolite contains 4 application benchmarks, which are based on typical use cases for stream processing within microservices. For each benchmark, a corresponding workload generator is provided. Currently, this repository provides benchmark implementations for Apache Kafka Streams. Benchmark implementation for Apache Flink are currently under development and can be found in the *apache-flink* branch of this repository. The benchmark sources can be found in [Thedolite benchmarks](benchmarks). +Theodolite contains 4 application benchmarks, which are based on typical use cases for stream processing within microservices. For each benchmark, a corresponding workload generator is provided. Currently, this repository provides benchmark implementations for Apache Kafka Streams and Apache Flink. The benchmark sources can be found in [Thedolite benchmarks](benchmarks). ## Theodolite Execution Framework diff --git a/benchmarks/build.gradle b/benchmarks/build.gradle deleted file mode 100644 index ea8fb80bb2c2bac6121dbaaf72f742aa0e9c62bb..0000000000000000000000000000000000000000 --- a/benchmarks/build.gradle +++ /dev/null @@ -1,178 +0,0 @@ -// Inherited to all subprojects -buildscript { - repositories { - maven { - url "https://plugins.gradle.org/m2/" - } - } - dependencies { - classpath "gradle.plugin.com.github.spotbugs.snom:spotbugs-gradle-plugin:4.6.0" - } -} - -// Variables used to distinct different subprojects -def useCaseProjects = subprojects.findAll {it -> it.name.matches('uc(.)*')} -def useCaseApplications = subprojects.findAll {it -> it.name.matches('uc[0-9]+-application')} -def useCaseGenerators = subprojects.findAll {it -> it.name.matches('uc[0-9]+-workload-generator*')} -def commonProjects = subprojects.findAll {it -> it.name.matches('(.)*commons(.)*')} - -// Plugins -allprojects { - apply plugin: 'eclipse' -} - -subprojects { - apply plugin: 'checkstyle' - apply plugin: 'pmd' - apply plugin: 'com.github.spotbugs' - apply plugin: 'java-library' -} - -configure(useCaseProjects){ - apply plugin: 'application' -} - -// Java version for all subprojects -subprojects { - java { - sourceCompatibility = JavaVersion.VERSION_11 - targetCompatibility = JavaVersion.VERSION_11 - } -} - -// Check for updates every build -configurations.all { - resolutionStrategy.cacheChangingModulesFor 0, 'seconds' -} - -// Repositories for all projects -allprojects { - repositories { - jcenter() - maven { - url "https://oss.sonatype.org/content/repositories/snapshots/" - } - maven { - url 'https://packages.confluent.io/maven/' - } - } -} - -// Dependencies for all use case applications -configure(useCaseApplications) { - dependencies { - // These dependencies are used internally, and not exposed to consumers on their own compile classpath. - implementation('org.industrial-devops:titan-ccp-common:0.1.0-SNAPSHOT') { changing = true } - implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true } - implementation 'org.apache.kafka:kafka-streams:2.6.0' // enable TransformerSuppliers - implementation 'com.google.code.gson:gson:2.8.2' - implementation 'com.google.guava:guava:24.1-jre' - implementation 'org.slf4j:slf4j-simple:1.7.25' - implementation project(':application-kafkastreams-commons') - - // Use JUnit test framework - testImplementation 'junit:junit:4.12' - } -} - -// Dependencies for all use case generators -configure(useCaseGenerators) { - dependencies { - // These dependencies are used internally, and not exposed to consumers on their own compile classpath. - implementation('org.industrial-devops:titan-ccp-common:0.1.0-SNAPSHOT') { changing = true } - implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true } - implementation 'org.slf4j:slf4j-simple:1.7.25' - - // These dependencies are used for the workload-generator-commmon - implementation project(':workload-generator-commons') - - // Use JUnit test framework - testImplementation 'junit:junit:4.12' - } -} - -// Dependencies for all commons -configure(commonProjects) { - dependencies { - // These dependencies are used internally, and not exposed to consumers on their own compile classpath. - implementation 'org.slf4j:slf4j-simple:1.7.25' - implementation('org.industrial-devops:titan-ccp-common:0.1.0-SNAPSHOT') { changing = true } - implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true } - implementation 'org.apache.kafka:kafka-streams:2.6.0' - - // Use JUnit test framework - testImplementation 'junit:junit:4.12' - } -} - -// Per default XML reports for SpotBugs are generated -// Include this to generate HTML reports -tasks.withType(com.github.spotbugs.snom.SpotBugsTask) { - reports { - // Either HTML or XML reports can be activated - html.enabled true - xml.enabled false - } -} - -// Subprojects quality tools tasks -subprojects { - task pmd { - group 'Quality Assurance' - description 'Run PMD' - - dependsOn 'pmdMain' - dependsOn 'pmdTest' - } - - task checkstyle { - group 'Quality Assurance' - description 'Run Checkstyle' - - dependsOn 'checkstyleMain' - dependsOn 'checkstyleTest' - } - - task spotbugs { - group 'Quality Assurance' - description 'Run SpotBugs' - - dependsOn 'spotbugsMain' - dependsOn 'spotbugsTest' - } -} - -// Subprojects quality tools configuration -subprojects { - pmd { - ruleSets = [] // Gradle requires to clean the rule sets first - ruleSetFiles = files("$rootProject.projectDir/config/pmd.xml") - ignoreFailures = false - toolVersion = "6.7.0" - } - - checkstyle { - configDirectory = file("$rootProject.projectDir/config") - configFile = file("$rootProject.projectDir/config/checkstyle.xml") - maxWarnings = 0 - ignoreFailures = false - toolVersion = "8.12" - } - - spotbugs { - excludeFilter = file("$rootProject.projectDir/config/spotbugs-exclude-filter.xml") - reportLevel = "low" - effort = "max" - ignoreFailures = false - toolVersion = '4.1.4' - } -} - -allprojects { - eclipse { - classpath { - downloadSources=true - downloadJavadoc=true - } - } -} diff --git a/benchmarks/gradle/wrapper/gradle-wrapper.jar b/benchmarks/gradle/wrapper/gradle-wrapper.jar deleted file mode 100644 index 457aad0d98108420a977756b7145c93c8910b076..0000000000000000000000000000000000000000 Binary files a/benchmarks/gradle/wrapper/gradle-wrapper.jar and /dev/null differ diff --git a/benchmarks/settings.gradle b/benchmarks/settings.gradle deleted file mode 100644 index 9104525ce160a25957f9731f820a723b4f36f7d5..0000000000000000000000000000000000000000 --- a/benchmarks/settings.gradle +++ /dev/null @@ -1,16 +0,0 @@ -rootProject.name = 'scalability-benchmarking' - -include 'workload-generator-commons' -include 'application-kafkastreams-commons' - -include 'uc1-workload-generator' -include 'uc1-application' - -include 'uc2-workload-generator' -include 'uc2-application' - -include 'uc3-workload-generator' -include 'uc3-application' - -include 'uc4-workload-generator' -include 'uc4-application' diff --git a/benchmarks/uc1-workload-generator/Dockerfile b/benchmarks/uc1-workload-generator/Dockerfile deleted file mode 100644 index 91f18d740fa87d7b03480a3352a1fa0eccc845db..0000000000000000000000000000000000000000 --- a/benchmarks/uc1-workload-generator/Dockerfile +++ /dev/null @@ -1,6 +0,0 @@ -FROM openjdk:11-slim - -ADD build/distributions/uc1-workload-generator.tar / - -CMD JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \ - /uc1-workload-generator/bin/uc1-workload-generator \ No newline at end of file diff --git a/benchmarks/uc2-workload-generator/Dockerfile b/benchmarks/uc2-workload-generator/Dockerfile deleted file mode 100644 index 55593e0295efb0c4f7d4c484b1b104c256f9b958..0000000000000000000000000000000000000000 --- a/benchmarks/uc2-workload-generator/Dockerfile +++ /dev/null @@ -1,6 +0,0 @@ -FROM openjdk:11-slim - -ADD build/distributions/uc2-workload-generator.tar / - -CMD JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \ - /uc2-workload-generator/bin/uc2-workload-generator \ No newline at end of file diff --git a/benchmarks/uc3-workload-generator/Dockerfile b/benchmarks/uc3-workload-generator/Dockerfile deleted file mode 100644 index 8422c9d5371b86ced0a38c141c461aef452133ac..0000000000000000000000000000000000000000 --- a/benchmarks/uc3-workload-generator/Dockerfile +++ /dev/null @@ -1,6 +0,0 @@ -FROM openjdk:11-slim - -ADD build/distributions/uc3-workload-generator.tar / - -CMD JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \ - /uc3-workload-generator/bin/uc3-workload-generator diff --git a/benchmarks/uc4-workload-generator/Dockerfile b/benchmarks/uc4-workload-generator/Dockerfile deleted file mode 100644 index f39923e59d3079d3b163ffc5d2e4906599de026d..0000000000000000000000000000000000000000 --- a/benchmarks/uc4-workload-generator/Dockerfile +++ /dev/null @@ -1,6 +0,0 @@ -FROM openjdk:11-slim - -ADD build/distributions/uc4-workload-generator.tar / - -CMD JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \ - /uc4-workload-generator/bin/uc4-workload-generator diff --git a/benchmarks/workload-generator-commons/build.gradle b/benchmarks/workload-generator-commons/build.gradle deleted file mode 100644 index 98d820b480ba0b357b74f82ebce5a647ee392461..0000000000000000000000000000000000000000 --- a/benchmarks/workload-generator-commons/build.gradle +++ /dev/null @@ -1,5 +0,0 @@ -dependencies { - implementation 'com.google.guava:guava:30.1-jre' - implementation 'com.hazelcast:hazelcast:4.1.1' - implementation 'com.hazelcast:hazelcast-kubernetes:2.2.1' -} \ No newline at end of file diff --git a/codemeta.json b/codemeta.json index eff1f1ba4f3c9a70a46c3cf83c47c279e1838cf9..5696996592f63bf8ece23239d8204e0f25b9cce1 100644 --- a/codemeta.json +++ b/codemeta.json @@ -5,10 +5,10 @@ "codeRepository": "https://github.com/cau-se/theodolite", "dateCreated": "2020-03-13", "datePublished": "2020-07-27", - "dateModified": "2021-02-11", + "dateModified": "2021-03-18", "downloadUrl": "https://github.com/cau-se/theodolite/releases", "name": "Theodolite", - "version": "0.3.0", + "version": "0.4.0", "description": "Theodolite is a framework for benchmarking the horizontal and vertical scalability of stream processing engines.", "developmentStatus": "active", "referencePublication": "https://doi.org/10.1016/j.bdr.2021.100209", diff --git a/docs/index.yaml b/docs/index.yaml new file mode 100644 index 0000000000000000000000000000000000000000..087124d158794e1b48dfc880e26da2c91d78808f --- /dev/null +++ b/docs/index.yaml @@ -0,0 +1,39 @@ +apiVersion: v1 +entries: + theodolite: + - apiVersion: v2 + appVersion: 0.4.0 + created: "2021-03-18T15:50:50.930902088+01:00" + dependencies: + - condition: grafana.enabled + name: grafana + repository: https://grafana.github.io/helm-charts + version: 6.0.0 + - condition: kube-prometheus-stack.enabled + name: kube-prometheus-stack + repository: https://prometheus-community.github.io/helm-charts + version: 12.0.0 + - condition: cp-helm-charts.enabled + name: cp-helm-charts + repository: https://soerenhenning.github.io/cp-helm-charts + version: 0.6.0 + - condition: kafka-lag-exporter.enabled + name: kafka-lag-exporter + repository: https://lightbend.github.io/kafka-lag-exporter/repo/ + version: 0.6.6 + description: Theodolite is a framework for benchmarking the scalability stream + processing engines. + digest: 45975b61b79547b152241cfc6dcf5e640090ff2c08ff9120275c77c9d9054155 + home: https://cau-se.github.io/theodolite + maintainers: + - email: soeren.henning@email.uni-kiel.de + name: Sören Henning + url: https://www.se.informatik.uni-kiel.de/en/team/soeren-henning-m-sc + name: theodolite + sources: + - https://github.com/cau-se/theodolite + type: application + urls: + - https://github.com/cau-se/theodolite/releases/download/v0.4.0/theodolite-0.4.0.tgz + version: 0.4.0 +generated: "2021-03-18T15:50:50.897801281+01:00" diff --git a/docs/release-process.md b/docs/release-process.md index 961106247fd0967a2dd6ffdd980e35235ceed168..981306b0762e43eacb29a434cc1e505593548fce 100644 --- a/docs/release-process.md +++ b/docs/release-process.md @@ -11,20 +11,42 @@ This document describes how to perform a new Theodolite release. We assume that we are creating the release `v0.3.1`. Please make sure to adjust the following steps according to the release, you are actually performing. -1. Update `codemeta.json` to match the new version. In particular, make sure that `version` points to the version you are releasing and `dateModified` points to the date you are relasing this version. [CodeMeata generator](https://codemeta.github.io/codemeta-generator/) may help you in updating the file. +1. Create a new branch `v0.3` if it does not already exist. This branch will never +again be merged into master. -2. Update `CITATION.cff` to match the new version. At least update the `version` field. +2. Checkout the `v0.3` branch. -3. Create a new branch `v0.3` if it does not already exists. This branch will never -again be merged into master. +3. Update all references to artifacts which are versioned. This includes: + + 1. Update all references to Theodolite Docker images to tag `v0.3.1`. These are the Kubernetes resource definitions in +`execution`, the references to *latest* in `run_uc.py`, the Docker Compose files in `theodolite-benchmarks/docker-test` and the example `theodolite.yaml` job. + + 2. Update both, the `version` and the `appVersion` fields, in the Helm `Charts.yaml` file to `0.3.1`. + + 3. Update `codemeta.json` to match the new version. In particular, make sure that `version` points to the version you are releasing and `dateModified` points to the date you are relasing this version. [CodeMeata generator](https://codemeta.github.io/codemeta-generator/) may help you in updating the file. + + 4. Update `CITATION.cff` to match the new version. At least update the `version` field. + +4. Create a Helm package by running `./build-package.sh` from the chart directory. + +5. Update the Helm repository index of located at `/docs` by running `./update-index.sh v0.3.1`. + +6. Commit these changes to the `v0.3` branch. + +7. Tag this commit `v0.3.1` (can be done via GitLab). The corresponding Docker images will be uploaded. + +8. Create *releases* on GitLab and GitHub. Upload the generated Helm package to these releases. + +9. Switch to the `master` branch. + +10. Re-run `./update-index.sh v0.3.1` to include the latest release in the *upstream* Helm repository. You can now delete the packaged Helm chart. -4. Checkout the `v0.3` branch. +11. If this release increments Theodolite's *latest* version number, -5. Update all references to Theodolite Docker images to tag `v0.3.1`. These are the Kubernetes resource definitions in -`execution`, the references to *latest* in `run_uc.py`, the Docker Compose files in `docker-test` and the example `theodolite.yaml` job. + 1. Update the Helm `Charts.yaml` file to `0.4.0-SNAPSHOT` (see Step 3). -6. Commit these changes. + 2. Update the `codemeta.json` file according to Step 3. -7. Tag this commit with `v0.3.1`. The corresponding Docker images will be uploaded. + 3. Update the `CITATION.cff` file according to Step 3. -8. Create *releases* for this tag in both, GitLab and GitHub. +12. Commit these changes to the `master` branch. diff --git a/execution/README.md b/execution/README.md index 442f1c71929f9c7367909ce6609c9122faf3e814..ff94dfed2fa887c300e8449b7c41dc3f65c7a1e2 100644 --- a/execution/README.md +++ b/execution/README.md @@ -6,6 +6,8 @@ in realistic execution environments, some third-party components are [required]( After everything is installed and configured, you can move on the [execution of benchmarks](#execution). +*Note: The currently released version of Theodolite only allows running Kafka Streams benchmarks. With the upcoming release `0.5`, we plan to support arbitrary stream processing engines, in particular, our already available implementations for Apache Flink. To already run them now, please contact us.* + ## Installation For executing benchmarks, access to a Kubernetes cluster is required. If you already run other applications inside your diff --git a/execution/helm/.helmignore b/execution/helm/.helmignore new file mode 100644 index 0000000000000000000000000000000000000000..0e8a0eb36f4ca2c939201c0d54b5d82a1ea34778 --- /dev/null +++ b/execution/helm/.helmignore @@ -0,0 +1,23 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*.orig +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ diff --git a/execution/helm/Chart.yaml b/execution/helm/Chart.yaml new file mode 100644 index 0000000000000000000000000000000000000000..b09b4022d6727029311815b3b2b1bfcf4b4d2bd1 --- /dev/null +++ b/execution/helm/Chart.yaml @@ -0,0 +1,34 @@ +apiVersion: v2 +name: theodolite +description: Theodolite is a framework for benchmarking the scalability stream processing engines. +home: https://cau-se.github.io/theodolite +sources: + - https://github.com/cau-se/theodolite +maintainers: +- name: Sören Henning + email: soeren.henning@email.uni-kiel.de + url: https://www.se.informatik.uni-kiel.de/en/team/soeren-henning-m-sc + +type: application + +dependencies: + - name: grafana + version: 6.0.0 + repository: https://grafana.github.io/helm-charts + condition: grafana.enabled + - name: kube-prometheus-stack + version: 12.0.0 + repository: https://prometheus-community.github.io/helm-charts + condition: kube-prometheus-stack.enabled + - name: cp-helm-charts + version: 0.6.0 + repository: https://soerenhenning.github.io/cp-helm-charts + condition: cp-helm-charts.enabled + - name: kafka-lag-exporter + version: 0.6.6 + repository: https://lightbend.github.io/kafka-lag-exporter/repo/ + condition: kafka-lag-exporter.enabled + +version: 0.5.0-SNAPSHOT + +appVersion: 0.5.0-SNAPSHOT diff --git a/execution/helm/README.md b/execution/helm/README.md new file mode 100644 index 0000000000000000000000000000000000000000..4cacd06c8181970e78cb4f62e93b77fa169fcdfa --- /dev/null +++ b/execution/helm/README.md @@ -0,0 +1,54 @@ +# Theodolite Helm Chart + +## Installation + +Install the chart via: + +```sh +helm dependencies update . +helm install my-confluent . +``` + +**Please note: Theodolite currently uses hard-coded URLs, to connect to Kafka and Zookeeper. For that reason, the name of this chart must be `my-confluent`.** We will change this behavior soon. + +This chart installs requirements to execute benchmarks with Theodolite. + +Dependencies and subcharts: + +- Prometheus Operator +- Prometheus +- Grafana (incl. dashboard and data source configuration) +- Kafka +- Zookeeper +- A Kafka client pod + +## Test + +Test the installation: + +```sh +helm test <release-name> +``` + +Our test files are located [here](templates/../../theodolite-chart/templates/tests). Many subcharts have their own tests, these are also executed and are placed in the respective /templates folders. + +Please note: If a test fails, Helm will stop testing. + +It is possible that the tests are not running successfully at the moment. This is because the Helm tests of the subchart cp-confluent receive a timeout exception. There is an [issue](https://github.com/confluentinc/cp-helm-charts/issues/318) for this problem on GitHub. + +## Configuration + +In development environments Kubernetes resources are often low. To reduce resource consumption, we provide an `one-broker-value.yaml` file. This file can be used with: + +```sh +helm install theodolite . -f preconfigs/one-broker-values.yaml +``` + +## Development + +**Hints**: + +- Grafana configuration: Grafana ConfigMaps contains expressions like {{ topic }}. Helm uses the same syntax for template function. More information [here](https://github.com/helm/helm/issues/2798) + - Escape braces: {{ "{{" topic }} + - Let Helm render the template as raw string: {{ `{{ <config>}}` }} + \ No newline at end of file diff --git a/execution/helm/build-package.sh b/execution/helm/build-package.sh new file mode 100755 index 0000000000000000000000000000000000000000..e79d0497d883a8e1e0fab56ddeeb8d4ee1053648 --- /dev/null +++ b/execution/helm/build-package.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env sh + +helm package . --dependency-update && rm -r charts # We don't want to include dependencies in our index diff --git a/execution/helm/preconfigs/one-broker-values.yaml b/execution/helm/preconfigs/one-broker-values.yaml new file mode 100644 index 0000000000000000000000000000000000000000..fdbc3207ee37f49cf176645851d91e62ba354d28 --- /dev/null +++ b/execution/helm/preconfigs/one-broker-values.yaml @@ -0,0 +1,15 @@ +cp-helm-charts: + ## ------------------------------------------------------ + ## Zookeeper + ## ------------------------------------------------------ + cp-zookeeper: + servers: 1 # default: 3 + + ## ------------------------------------------------------ + ## Kafka + ## ------------------------------------------------------ + cp-kafka: + brokers: 1 # deauflt: 10 + + configurationOverrides: + offsets.topic.replication.factor: "1" \ No newline at end of file diff --git a/execution/helm/templates/NOTES.txt b/execution/helm/templates/NOTES.txt new file mode 100644 index 0000000000000000000000000000000000000000..ef1eea71080f55d08e193b9741327189865fa3dd --- /dev/null +++ b/execution/helm/templates/NOTES.txt @@ -0,0 +1,3 @@ +Welcome to Theodolite! + +Visit https://cau-se.github.io/theodolite for getting started and more information. diff --git a/execution/helm/templates/_helpers.tpl b/execution/helm/templates/_helpers.tpl new file mode 100644 index 0000000000000000000000000000000000000000..f59f74d369b64ec89a44cbf2048fda9e844df92b --- /dev/null +++ b/execution/helm/templates/_helpers.tpl @@ -0,0 +1,62 @@ +{{/* +Expand the name of the chart. +*/}} +{{- define "theodolite.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "theodolite.fullname" -}} +{{- if .Values.fullnameOverride }} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- $name := default .Chart.Name .Values.nameOverride }} +{{- if contains $name .Release.Name }} +{{- .Release.Name | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} +{{- end }} +{{- end }} +{{- end }} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "theodolite.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Common labels +*/}} +{{- define "theodolite.labels" -}} +helm.sh/chart: {{ include "theodolite.chart" . }} +{{ include "theodolite.selectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end }} + +{{/* +Selector labels +*/}} +{{- define "theodolite.selectorLabels" -}} +app.kubernetes.io/name: {{ include "theodolite.name" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end }} + +{{/* +Create the name of the service account to use +*/}} +{{- define "theodolite.serviceAccountName" -}} +{{- if .Values.serviceAccount.create }} +{{- default (include "theodolite.fullname" .) .Values.serviceAccount.name }} +{{- else }} +{{- default "default" .Values.serviceAccount.name }} +{{- end }} +{{- end }} diff --git a/execution/helm/templates/cluster-role-binding.yaml b/execution/helm/templates/cluster-role-binding.yaml new file mode 100644 index 0000000000000000000000000000000000000000..400a972cdb73dca181b621f49e7a3e79c926e65b --- /dev/null +++ b/execution/helm/templates/cluster-role-binding.yaml @@ -0,0 +1,14 @@ +{{- if .Values.prometheus.clusterRoleBinding.enabled -}} +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRoleBinding +metadata: + name: prometheus +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: ClusterRole + name: prometheus +subjects: +- kind: ServiceAccount + name: prometheus + namespace: {{ .Release.Namespace }} +{{- end}} \ No newline at end of file diff --git a/execution/helm/templates/cluster-role.yaml b/execution/helm/templates/cluster-role.yaml new file mode 100644 index 0000000000000000000000000000000000000000..2a272718da1413a466d6afed51b3bca1f37a1fe0 --- /dev/null +++ b/execution/helm/templates/cluster-role.yaml @@ -0,0 +1,20 @@ +{{- if .Values.prometheus.clusterRole.enabled -}} +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRole +metadata: + name: prometheus +rules: +- apiGroups: [""] + resources: + - nodes + - services + - endpoints + - pods + verbs: ["get", "list", "watch"] +- apiGroups: [""] + resources: + - configmaps + verbs: ["get"] +- nonResourceURLs: ["/metrics"] + verbs: ["get"] +{{- end }} \ No newline at end of file diff --git a/execution/helm/templates/dashboard-config-map.yaml b/execution/helm/templates/dashboard-config-map.yaml new file mode 100644 index 0000000000000000000000000000000000000000..87e588f29df7446d0b12000eb53487a9bb88ea6c --- /dev/null +++ b/execution/helm/templates/dashboard-config-map.yaml @@ -0,0 +1,1007 @@ +{{- if .Values.grafana.enabled -}} +apiVersion: v1 +kind: ConfigMap +metadata: + name: scalability + labels: + grafana_dashboard: "1" +data: + k8s-dashboard.json: |- + {{`{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "id": 2, + "iteration": 1589140028684, + "links": [], + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 0 + }, + "hiddenSeries": false, + "id": 2, + "legend": { + "alignAsTable": false, + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "sum(cp_kafka_server_brokertopicmetrics_messagesinpersec_topic_input)", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "{{Messages In Per Second}}", + "refId": "D" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Messages In Per Second", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 7, + "w": 12, + "x": 12, + "y": 0 + }, + "hiddenSeries": false, + "id": 3, + "legend": { + "alignAsTable": false, + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "sum(cp_kafka_server_brokertopicmetrics_messagesinpersec_topic_output)", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "{{Messages Out Per Second}}", + "refId": "D" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Messages Out Per Second", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 7 + }, + "hiddenSeries": false, + "id": 9, + "legend": { + "alignAsTable": false, + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": true, + "steppedLine": false, + "targets": [ + { + "expr": "sum by(group, topic) (kafka_consumergroup_group_lag > 0)", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "{{topic}}", + "refId": "D" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Record Lag", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 7 + }, + "hiddenSeries": false, + "id": 5, + "legend": { + "alignAsTable": false, + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "count(count (kafka_consumer_consumer_fetch_manager_metrics_records_lag) by(pod))", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "instances", + "refId": "D" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of Instances", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "decimals": 0, + "format": "short", + "label": "", + "logBase": 1, + "max": null, + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 15 + }, + "hiddenSeries": false, + "id": 10, + "legend": { + "alignAsTable": false, + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "sum by(group,topic) (kafka_consumergroup_group_offset > 0)", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "{{topic}}", + "refId": "D" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Records Consumed", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 15 + }, + "hiddenSeries": false, + "id": 12, + "legend": { + "alignAsTable": false, + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "count by(group,topic) (kafka_consumergroup_group_offset > 0)", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "{{topic}}", + "refId": "D" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of Partitions", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 23 + }, + "hiddenSeries": false, + "id": 11, + "legend": { + "alignAsTable": false, + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "sum by(group,topic) (kafka_partition_latest_offset)", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "{{topic}}", + "refId": "D" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Records Produced (Kafka Lag Exporter)", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 23 + }, + "hiddenSeries": false, + "id": 8, + "legend": { + "alignAsTable": false, + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "count by(job, topic) (kafka_consumer_consumer_fetch_manager_metrics_records_lag)", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "{{topic}}", + "refId": "D" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Number of Partitions (Kafka Streams Export)", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "decimals": null, + "format": "short", + "label": "", + "logBase": 1, + "max": null, + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 31 + }, + "hiddenSeries": false, + "id": 4, + "legend": { + "alignAsTable": false, + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "sum by(job, topic) (kafka_consumer_consumer_fetch_manager_metrics_records_lag)", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "{{topic}}", + "refId": "D" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Record Lag (Kafka Streams Export)", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 31 + }, + "hiddenSeries": false, + "id": 13, + "legend": { + "alignAsTable": false, + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": true, + "steppedLine": false, + "targets": [ + { + "expr": "sum by(group) (kafka_consumergroup_group_lag > 0)", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "total lag", + "refId": "D" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Total Record Lag (Kafka Lag Exporter)", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + } + ], + "refresh": "10s", + "schemaVersion": 21, + "style": "dark", + "tags": [], + "templating": { + "list": [ + { + "allValue": null, + "current": { + "tags": [], + "text": "titan-ccp-aggregation", + "value": "titan-ccp-aggregation" + }, + "datasource": "Prometheus", + "definition": "label_values(kafka_consumer_consumer_fetch_manager_metrics_records_lag, job)", + "hide": 0, + "includeAll": false, + "label": "Job", + "multi": false, + "name": "Job", + "options": [ + { + "selected": true, + "text": "titan-ccp-aggregation", + "value": "titan-ccp-aggregation" + } + ], + "query": "label_values(kafka_consumer_consumer_fetch_manager_metrics_records_lag, job)", + "refresh": 0, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + } + ] + }, + "time": { + "from": "now-1h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ] + }, + "timezone": "", + "title": "Scalability Benchmarking", + "uid": "dad0CNlZz", + "version": 25 + }`}} +{{- end }} \ No newline at end of file diff --git a/execution/helm/templates/kafka-client.yaml b/execution/helm/templates/kafka-client.yaml new file mode 100644 index 0000000000000000000000000000000000000000..853cb768672d8888085a3881df81cbdb806ec39d --- /dev/null +++ b/execution/helm/templates/kafka-client.yaml @@ -0,0 +1,19 @@ +{{- if .Values.kafkaClient.enabled -}} +apiVersion: v1 +kind: Pod +metadata: + # name: {{ template "theodolite.fullname" . }}-kafka-client + name: kafka-client +spec: + containers: + - name: kafka-client + image: confluentinc/cp-enterprise-kafka:5.4.0 + command: + - sh + - -c + - "exec tail -f /dev/null" + {{- with .Values.kafkaClient.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} +{{- end }} \ No newline at end of file diff --git a/execution/helm/templates/prometheus-datasource-config-map.yaml b/execution/helm/templates/prometheus-datasource-config-map.yaml new file mode 100644 index 0000000000000000000000000000000000000000..4e793ff83668ac7a7582a924750ca729d9e277ae --- /dev/null +++ b/execution/helm/templates/prometheus-datasource-config-map.yaml @@ -0,0 +1,31 @@ +{{- if .Values.grafana.enabled -}} +apiVersion: v1 +kind: ConfigMap +metadata: + name: prometheus + labels: + grafana_datasource: "1" +data: + datasource.yaml: |- + # config file version + apiVersion: 1 + datasources: + # <string, required> name of the datasource. Required + - name: Prometheus + # <string, required> datasource type. Required + type: prometheus + # <string, required> access mode. proxy or direct (Server or Browser in the UI). Required + access: proxy + # <bool> mark as default datasource. Max one per org + isDefault: true + # <int> org id. will default to orgId 1 if not specified + orgId: 1 + # <string> url + url: http://prometheus-operated:9090 #http://localhost:9090 + # <map> fields that will be converted to json and stored in json_data + jsonData: + timeInterval: "15s" + version: 1 + # <bool> allow users to edit datasources from the UI. + editable: true +{{- end }} \ No newline at end of file diff --git a/execution/helm/templates/prometheus.yaml b/execution/helm/templates/prometheus.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a3060798a8a3b000f730525805c0d050becc7a68 --- /dev/null +++ b/execution/helm/templates/prometheus.yaml @@ -0,0 +1,21 @@ +{{- if .Values.prometheus.enabled -}} +apiVersion: monitoring.coreos.com/v1 +kind: Prometheus +metadata: + name: {{ template "theodolite.fullname" . }}-prometheus +spec: + serviceAccountName: prometheus + serviceMonitorSelector: + matchLabels: + #app: cp-kafka + appScope: titan-ccp + resources: + requests: + memory: 400Mi + #scrapeInterval: 1s + enableAdminAPI: true + {{- with .Values.prometheus.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end}} +{{- end}} \ No newline at end of file diff --git a/execution/helm/templates/service-account.yaml b/execution/helm/templates/service-account.yaml new file mode 100644 index 0000000000000000000000000000000000000000..2e14c8eb8ffd912f3d34d1b94aa481cb497b4b90 --- /dev/null +++ b/execution/helm/templates/service-account.yaml @@ -0,0 +1,6 @@ +{{- if .Values.prometheus.serviceAccount.enabled -}} +apiVersion: v1 +kind: ServiceAccount +metadata: + name: prometheus +{{- end}} \ No newline at end of file diff --git a/execution/helm/templates/service-monitor.yaml b/execution/helm/templates/service-monitor.yaml new file mode 100644 index 0000000000000000000000000000000000000000..50e4688cf7f9b919afdc9455462034f682975893 --- /dev/null +++ b/execution/helm/templates/service-monitor.yaml @@ -0,0 +1,16 @@ +{{- if .Values.kafkaClient.enabled -}} +apiVersion: monitoring.coreos.com/v1 +kind: ServiceMonitor +metadata: + labels: + app: cp-kafka + appScope: titan-ccp + name: kafka +spec: + selector: + matchLabels: + app: cp-kafka + endpoints: + - port: metrics + interval: 7s +{{- end}} diff --git a/execution/helm/templates/tests/test-connection.yaml b/execution/helm/templates/tests/test-connection.yaml new file mode 100644 index 0000000000000000000000000000000000000000..7af87e98920c11bcfaccb27724e6f29fc76771a0 --- /dev/null +++ b/execution/helm/templates/tests/test-connection.yaml @@ -0,0 +1,15 @@ +apiVersion: v1 +kind: Pod +metadata: + name: "{{ include "theodolite.fullname" . }}-test-prometheus" + labels: + {{- include "theodolite.labels" . | nindent 4 }} + annotations: + "helm.sh/hook": test-success +spec: + containers: + - name: wget + image: busybox + command: ['wget'] + args: ['http://prometheus-operated:9090'] + restartPolicy: Never diff --git a/execution/helm/update-index.sh b/execution/helm/update-index.sh new file mode 100755 index 0000000000000000000000000000000000000000..286724dd87718387df58ed993af417bf0fd4d8ec --- /dev/null +++ b/execution/helm/update-index.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env sh + +RELEASE_NAME=$1 # Supposed to be equal to tag, e.g., v0.3.0 + +RELEASE_PATH="https://github.com/cau-se/theodolite/releases/download" +REPO_INDEX="../../docs/index.yaml" + +helm repo index . --url $RELEASE_PATH/$RELEASE_NAME --merge $REPO_INDEX && \ + mv index.yaml $REPO_INDEX \ No newline at end of file diff --git a/execution/helm/values.yaml b/execution/helm/values.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d35912de6ffc72fff27f5d389c94761bc80eecd1 --- /dev/null +++ b/execution/helm/values.yaml @@ -0,0 +1,229 @@ +### +# Theodolite resources +### + +kafkaClient: + enabled: true + nodeSelector: {} + + +#### +## configuration of sub charts +### + +### +# Grafana +### +grafana: + enabled: true + nodeSelector: {} + image: + repository: grafana/grafana + tag: 6.7.3 + pullPolicy: IfNotPresent + # Administrator credentials when not using an existing secret (see below) + adminUser: admin + adminPassword: admin + ## Sidecars that collect the configmaps with specified label and stores the included files them into the respective folders + ## Requires at least Grafana 5 to work and can't be used together with parameters dashboardProviders, datasources and dashboards + sidecar: + image: + repository: "kiwigrid/k8s-sidecar" + tag: "0.1.99" + imagePullPolicy: IfNotPresent + dashboards: + enabled: true + provider: + # allow updating provisioned dashboards from the UI + allowUiUpdates: true + datasources: + enabled: true + service: + nodePort: 31199 + type: NodePort + + +### +# Confluent Platform +### + +cp-helm-charts: + enabled: true + ## ------------------------------------------------------ + ## Zookeeper + ## ------------------------------------------------------ + cp-zookeeper: + enabled: true + nodeSelector: {} + servers: 3 # default: 3 + image: confluentinc/cp-zookeeper + imageTag: 5.4.0 + ## Optionally specify an array of imagePullSecrets. Secrets must be manually created in the namespace. + ## https://kubernetes.io/docs/concepts/containers/images/#specifying-imagepullsecrets-on-a-pod + imagePullSecrets: + # - name: "regcred" + heapOptions: "-Xms512M -Xmx512M" + persistence: + enabled: false + + ## ------------------------------------------------------ + ## Kafka + ## ------------------------------------------------------ + cp-kafka: + enabled: true + nodeSelector: {} + brokers: 10 # default: 10 + image: confluentinc/cp-enterprise-kafka + imageTag: 5.4.0 + ## Optionally specify an array of imagePullSecrets. Secrets must be manually created in the namespace. + ## https://kubernetes.io/docs/concepts/containers/images/#specifying-imagepullsecrets-on-a-pod + imagePullSecrets: + # - name: "regcred" + heapOptions: "-Xms512M -Xmx512M" + persistence: + enabled: false + resources: {} + configurationOverrides: + #offsets.topic.replication.factor: 1 + "message.max.bytes": "134217728" # 128 MB + "replica.fetch.max.bytes": "134217728" # 128 MB + #default.replication.factor: 1 + # "min.insync.replicas": 2 + # "auto.create.topics.enable": false + "log.retention.ms": "10000" # 10s + "metrics.sample.window.ms": "5000" #5s + "advertised.listeners": |- + EXTERNAL://${HOST_IP}:$((31090 + ${KAFKA_BROKER_ID})) + "listener.security.protocol.map": |- + PLAINTEXT:PLAINTEXT,EXTERNAL:PLAINTEXT + + nodeport: + enabled: false + servicePort: 19092 + firstListenerPort: 31090 + + + ## ------------------------------------------------------ + ## Schema Registry + ## ------------------------------------------------------ + cp-schema-registry: + enabled: true + nodeSelector: {} + image: confluentinc/cp-schema-registry + imageTag: 5.4.0 + ## Optionally specify an array of imagePullSecrets. Secrets must be manually created in the namespace. + ## https://kubernetes.io/docs/concepts/containers/images/#specifying-imagepullsecrets-on-a-pod + imagePullSecrets: + # - name: "regcred" + heapOptions: "-Xms512M -Xmx512M" + resources: {} + + external: + enabled: true + type: NodePort + servicePort: 8081 + nodePort: 30099 + annotations: {} + + cp-kafka-rest: + enabled: false + + cp-kafka-connect: + enabled: false + + cp-ksql-server: + enabled: false + + cp-control-center: + enabled: false + + +### +# Kafka Lag Exporter +### +kafka-lag-exporter: + enabled: true + nodeSelector: {} + clusters: + - name: "my-confluent-cp-kafka" + bootstrapBrokers: "my-confluent-cp-kafka:9092" + + ## The interval between refreshing metrics + pollIntervalSeconds: 15 + + prometheus: + serviceMonitor: + enabled: true + interval: "5s" + additionalLabels: + appScope: titan-ccp + + +### +# Prometheus Monitoring Stack (Prometheus Operator) +### +kube-prometheus-stack: + commonLabels: + appScope: titan-ccp + + alertmanager: + enabled: false + + grafana: + enabled: false + + kubeApiServer: + enabled: false + + kubelet: + enabled: false + + kubeControllerManager: + enabled: false + + coreDns: + enabled: false + + kubeDns: + enabled: false + + kubeEtcd: + enabled: false + + kubeScheduler: + enabled: false + + kubeProxy: + enabled: false + + kubeStateMetrics: + enabled: false + + nodeExporter: + enabled: false + + prometheusOperator: + enabled: true + namespaces: + releaseNamespace: true + additional: [] + nodeSelector: {} + + prometheus: + enabled: false + + +### +# Prometheus +### +prometheus: + enabled: true + nodeSelector: {} + + # depends on your cluster security and permission settings, you may need to create the following resources + serviceAccount: + enabled: true + clusterRole: + enabled: true + clusterRoleBinding: + enabled: true \ No newline at end of file diff --git a/execution/infrastructure/random-scheduler/Dockerfile b/execution/infrastructure/random-scheduler/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..45f8ae9632022b458853013c1a52370c364e0002 --- /dev/null +++ b/execution/infrastructure/random-scheduler/Dockerfile @@ -0,0 +1,10 @@ +FROM alpine:3.12 + +RUN apk update && apk add bash curl jq +RUN curl -LO https://storage.googleapis.com/kubernetes-release/release/$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)/bin/linux/amd64/kubectl \ + && chmod +x ./kubectl \ + && mv ./kubectl /usr/local/bin/kubectl + +ADD schedule.sh /bin/schedule + +CMD /bin/schedule diff --git a/execution/infrastructure/random-scheduler/README.md b/execution/infrastructure/random-scheduler/README.md new file mode 100644 index 0000000000000000000000000000000000000000..59b9acb0aefd48a5afe581ebb96d871370760b10 --- /dev/null +++ b/execution/infrastructure/random-scheduler/README.md @@ -0,0 +1,12 @@ +# Theodolite Random Scheduler +This directory contains the Theodolite Random Scheduler that schedules pods on random nodes. + +## Build and Push +Run the following commands + +- `docker build -t theodolite-random-scheduler .` +- `docker tag theodolite-random-scheduler <user>/theodolite-random-scheduler` +- `docker push <user>/theodolite-random-scheduler` + +## Deployment +Deploy the `deployment.yaml` file into Kubernetes. Note, that the `TARGET_NAMESPACE` environment variable specifies the operating namespace of the random scheduler. diff --git a/execution/infrastructure/random-scheduler/deployment.yaml b/execution/infrastructure/random-scheduler/deployment.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3c8c0ed5a8657146c1e9aba3e6715ec9c6456651 --- /dev/null +++ b/execution/infrastructure/random-scheduler/deployment.yaml @@ -0,0 +1,25 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: random-scheduler + labels: + app: random-scheduler + namespace: kube-system +spec: + replicas: 1 + selector: + matchLabels: + app: random-scheduler + template: + metadata: + labels: + app: random-scheduler + spec: + serviceAccount: random-scheduler + containers: + - name: random-scheduler + image: ghcr.io/cau-se/theodolite-random-scheduler:latest + imagePullPolicy: Always + env: + - name: TARGET_NAMESPACE + value: default diff --git a/execution/infrastructure/random-scheduler/rbac.yaml b/execution/infrastructure/random-scheduler/rbac.yaml new file mode 100644 index 0000000000000000000000000000000000000000..ba463cc54a575730cacac6b905603892572b11ec --- /dev/null +++ b/execution/infrastructure/random-scheduler/rbac.yaml @@ -0,0 +1,21 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + namespace: kube-system + name: random-scheduler + labels: + app: random-scheduler + component: random-scheduler +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRoleBinding +metadata: + name: random-scheduler +subjects: +- kind: ServiceAccount + name: random-scheduler + namespace: kube-system +roleRef: + kind: ClusterRole + apiGroup: rbac.authorization.k8s.io + name: system:kube-scheduler \ No newline at end of file diff --git a/execution/infrastructure/random-scheduler/schedule.sh b/execution/infrastructure/random-scheduler/schedule.sh new file mode 100755 index 0000000000000000000000000000000000000000..06745354d061225cfc1b3a746d361036b647051b --- /dev/null +++ b/execution/infrastructure/random-scheduler/schedule.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +# use kubectl in proxy mode in order to allow curl requesting the k8's api server +kubectl proxy --port 8080 & + +echo "Target Namespace: $TARGET_NAMESPACE" +while true; +do + for PODNAME in $(kubectl get pods -n $TARGET_NAMESPACE -o json | jq '.items[] | select(.spec.schedulerName == "random-scheduler") | select(.spec.nodeName == null) | .metadata.name' | tr -d '"'); + do + NODE_SELECTOR=$(kubectl get pod $PODNAME -n $TARGET_NAMESPACE -o json | jq -S 'if .spec.nodeSelector != null then .spec.nodeSelector else {} end') + NODES=($(kubectl get nodes -o json | jq --argjson nodeSelector "$NODE_SELECTOR" '.items[] | select(.metadata.labels | contains($nodeSelector)) | .metadata.name' | tr -d '"')) + NUMNODES=${#NODES[@]} + if [ $NUMNODES -eq 0 ]; then + echo "No nodes found matching the node selector: $NODE_SELECTOR from pod $PODNAME" + echo "Pod $PODNAME cannot be scheduled." + continue; + fi + echo "Found $NUM_NODES suitable nodes for pod $PODNAME" + CHOSEN=${NODES[$[$RANDOM % $NUMNODES]]} + curl --header "Content-Type:application/json" --request POST --data '{"apiVersion":"v1", "kind": "Binding", "metadata": {"name": "'$PODNAME'"}, "target": {"apiVersion": "v1", "kind": "Node", "name": "'$CHOSEN'"}}' localhost:8080/api/v1/namespaces/$TARGET_NAMESPACE/pods/$PODNAME/binding/ + echo "Assigned $PODNAME to $CHOSEN" + done + sleep 1 +done diff --git a/benchmarks/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/.settings/org.eclipse.jdt.ui.prefs similarity index 100% rename from benchmarks/.settings/org.eclipse.jdt.ui.prefs rename to theodolite-benchmarks/.settings/org.eclipse.jdt.ui.prefs diff --git a/benchmarks/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/.settings/qa.eclipse.plugin.checkstyle.prefs similarity index 100% rename from benchmarks/.settings/qa.eclipse.plugin.checkstyle.prefs rename to theodolite-benchmarks/.settings/qa.eclipse.plugin.checkstyle.prefs diff --git a/benchmarks/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/.settings/qa.eclipse.plugin.pmd.prefs similarity index 100% rename from benchmarks/.settings/qa.eclipse.plugin.pmd.prefs rename to theodolite-benchmarks/.settings/qa.eclipse.plugin.pmd.prefs diff --git a/theodolite-benchmarks/build.gradle b/theodolite-benchmarks/build.gradle new file mode 100644 index 0000000000000000000000000000000000000000..cd2ca985538ff84ec6b21aa5dccea86940d8fc6d --- /dev/null +++ b/theodolite-benchmarks/build.gradle @@ -0,0 +1,18 @@ +// Plugins +allprojects { + apply plugin: 'eclipse' +} + +// Check for updates every build +configurations.all { + resolutionStrategy.cacheChangingModulesFor 0, 'seconds' +} + +allprojects { + eclipse { + classpath { + downloadSources=true + downloadJavadoc=true + } + } +} diff --git a/theodolite-benchmarks/buildSrc/build.gradle b/theodolite-benchmarks/buildSrc/build.gradle new file mode 100644 index 0000000000000000000000000000000000000000..4c099de32dc97ed3aa0417e8fff1f06e2a50dfd8 --- /dev/null +++ b/theodolite-benchmarks/buildSrc/build.gradle @@ -0,0 +1,24 @@ +buildscript { + repositories { + maven { + url "https://plugins.gradle.org/m2/" + } + } + dependencies { + classpath "com.github.jengelman.gradle.plugins:shadow:6.0.0" + } +} + +// to discover the precompiled script plugins +plugins { + id 'groovy-gradle-plugin' +} + +repositories { + gradlePluginPortal() // so that external plugins can be resolved in dependencies section +} + +dependencies { + implementation 'gradle.plugin.com.github.spotbugs.snom:spotbugs-gradle-plugin:4.6.0' + implementation 'com.github.jengelman.gradle.plugins:shadow:6.0.0' +} diff --git a/benchmarks/application-kafkastreams-commons/build.gradle b/theodolite-benchmarks/buildSrc/settings.gradle similarity index 100% rename from benchmarks/application-kafkastreams-commons/build.gradle rename to theodolite-benchmarks/buildSrc/settings.gradle diff --git a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.flink.gradle b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.flink.gradle new file mode 100644 index 0000000000000000000000000000000000000000..f9c1d98d9f88a95bdc3fa25e7c1bec2f3c9bddb4 --- /dev/null +++ b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.flink.gradle @@ -0,0 +1,64 @@ +plugins { + id 'theodolite.java-conventions' + id 'application' // executable + id 'com.github.johnrengelman.shadow' // create fat jar +} + +applicationDefaultJvmArgs = ["-Dlog4j.configuration=log4j.properties"] + +ext { + flinkVersion = '1.12.2' + scalaBinaryVersion = '2.12' +} + +repositories { + jcenter() + maven { + url "https://oss.sonatype.org/content/repositories/snapshots/" + } + maven { + url 'https://packages.confluent.io/maven/' + } +} + +dependencies { + // Special version required because of https://issues.apache.org/jira/browse/FLINK-13703 + implementation('org.industrial-devops:titan-ccp-common:0.1.0-flink-ready-SNAPSHOT') { changing = true } + implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true } + + // These dependencies are used internally, and not exposed to consumers on their own compile classpath. + implementation 'org.apache.kafka:kafka-clients:2.2.0' + implementation 'com.google.guava:guava:24.1-jre' + implementation 'com.google.code.gson:gson:2.8.2' + implementation 'org.slf4j:slf4j-simple:1.6.1' + compile project(':flink-commons') + + //compile group: 'org.apache.kafka', name: 'kafka-clients', version: "2.2.0" + compile group: 'org.apache.flink', name: 'flink-java', version: "${flinkVersion}" + compile group: 'org.apache.flink', name: "flink-streaming-java_${scalaBinaryVersion}", version:"${flinkVersion}" + compile group: 'org.apache.flink', name: "flink-table-api-java-bridge_${scalaBinaryVersion}", version: "${flinkVersion}" + compile group: 'org.apache.flink', name: "flink-table-planner-blink_${scalaBinaryVersion}", version: "${flinkVersion}" + compile group: 'org.apache.flink', name: "flink-connector-kafka_${scalaBinaryVersion}", version: "${flinkVersion}" + implementation "org.apache.flink:flink-avro:${flinkVersion}" + implementation "org.apache.flink:flink-avro-confluent-registry:${flinkVersion}" + compile group: 'org.apache.flink', name: "flink-runtime-web_${scalaBinaryVersion}", version: "${flinkVersion}" // TODO: remove after development + compile group: 'org.apache.flink', name: "flink-statebackend-rocksdb_${scalaBinaryVersion}", version: "${flinkVersion}" + compile group: 'org.apache.flink', name: "flink-metrics-prometheus_${scalaBinaryVersion}", version: "${flinkVersion}" + + // Use JUnit test framework + testImplementation 'junit:junit:4.12' +} + +run.classpath = sourceSets.main.runtimeClasspath + +jar { + manifest { + attributes 'Built-By': System.getProperty('user.name'), + 'Build-Jdk': System.getProperty('java.version') + } +} + +shadowJar { + configurations = [project.configurations.compile] + zip64 true +} diff --git a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.java-commons.gradle b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.java-commons.gradle new file mode 100644 index 0000000000000000000000000000000000000000..f195d6e117d29cad7a6d7494835626f92fb1c2b0 --- /dev/null +++ b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.java-commons.gradle @@ -0,0 +1,7 @@ +plugins { + // common java conventions + id 'theodolite.java-conventions' + + // provide library capability in commons + id 'java-library' +} diff --git a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.java-conventions.gradle b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.java-conventions.gradle new file mode 100644 index 0000000000000000000000000000000000000000..773872648edfd4b30218a99d307b6e7c45ed3470 --- /dev/null +++ b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.java-conventions.gradle @@ -0,0 +1,70 @@ +plugins { + id 'java' + id 'checkstyle' + id 'pmd' + + // NOTE: external plugin version is specified in implementation dependency artifact of the project's build file + id 'com.github.spotbugs' +} + +java { + sourceCompatibility = JavaVersion.VERSION_11 + targetCompatibility = JavaVersion.VERSION_11 +} + +// Per default XML reports for SpotBugs are generated +// Include this to generate HTML reports +tasks.withType(com.github.spotbugs.snom.SpotBugsTask) { + reports { + // Either HTML or XML reports can be activated + html.enabled true + xml.enabled false + } +} + +task pmd { + group 'Quality Assurance' + description 'Run PMD' + + dependsOn 'pmdMain' + dependsOn 'pmdTest' +} + +task checkstyle { + group 'Quality Assurance' + description 'Run Checkstyle' + + dependsOn 'checkstyleMain' + dependsOn 'checkstyleTest' +} + +task spotbugs { + group 'Quality Assurance' + description 'Run SpotBugs' + + dependsOn 'spotbugsMain' + dependsOn 'spotbugsTest' +} + +pmd { + ruleSets = [] // Gradle requires to clean the rule sets first + ruleSetFiles = files("$rootProject.projectDir/config/pmd.xml") + ignoreFailures = false + toolVersion = "6.7.0" +} + +checkstyle { + configDirectory = file("$rootProject.projectDir/config") + configFile = file("$rootProject.projectDir/config/checkstyle.xml") + maxWarnings = 0 + ignoreFailures = false + toolVersion = "8.12" +} + +spotbugs { + excludeFilter = file("$rootProject.projectDir/config/spotbugs-exclude-filter.xml") + reportLevel = "low" + effort = "max" + ignoreFailures = false + toolVersion = '4.1.4' +} diff --git a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.kstreams.gradle b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.kstreams.gradle new file mode 100644 index 0000000000000000000000000000000000000000..c6779fbc4348a8d665776e68688858ab3d2f4146 --- /dev/null +++ b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.kstreams.gradle @@ -0,0 +1,31 @@ +plugins { + // common java conventions + id 'theodolite.java-conventions' + + // make executable + id 'application' +} + +repositories { + jcenter() + maven { + url "https://oss.sonatype.org/content/repositories/snapshots/" + } + maven { + url 'https://packages.confluent.io/maven/' + } +} + +dependencies { + // These dependencies are used internally, and not exposed to consumers on their own compile classpath. + implementation('org.industrial-devops:titan-ccp-common:0.1.0-SNAPSHOT') { changing = true } + implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true } + implementation 'org.apache.kafka:kafka-streams:2.6.0' // enable TransformerSuppliers + implementation 'com.google.code.gson:gson:2.8.2' + implementation 'com.google.guava:guava:24.1-jre' + implementation 'org.slf4j:slf4j-simple:1.7.25' + implementation project(':kstreams-commons') + + // Use JUnit test framework + testImplementation 'junit:junit:4.12' +} diff --git a/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.load-generator.gradle b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.load-generator.gradle new file mode 100644 index 0000000000000000000000000000000000000000..13b7ea191d11c942cd0ca58b882ffda7bc7912be --- /dev/null +++ b/theodolite-benchmarks/buildSrc/src/main/groovy/theodolite.load-generator.gradle @@ -0,0 +1,30 @@ +plugins { + // common java conventions + id 'theodolite.java-conventions' + + // make executable + id 'application' +} + +repositories { + jcenter() + maven { + url "https://oss.sonatype.org/content/repositories/snapshots/" + } + maven { + url 'https://packages.confluent.io/maven/' + } +} + +dependencies { + // These dependencies are used internally, and not exposed to consumers on their own compile classpath. + implementation('org.industrial-devops:titan-ccp-common:0.1.0-SNAPSHOT') { changing = true } + implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true } + implementation 'org.slf4j:slf4j-simple:1.7.25' + + // These dependencies are used for the workload-generator-commmon + implementation project(':load-generator-commons') + + // Use JUnit test framework + testImplementation 'junit:junit:4.12' +} diff --git a/benchmarks/config/README.md b/theodolite-benchmarks/config/README.md similarity index 100% rename from benchmarks/config/README.md rename to theodolite-benchmarks/config/README.md diff --git a/benchmarks/config/checkstyle-suppression.xml b/theodolite-benchmarks/config/checkstyle-suppression.xml similarity index 100% rename from benchmarks/config/checkstyle-suppression.xml rename to theodolite-benchmarks/config/checkstyle-suppression.xml diff --git a/benchmarks/config/checkstyle.xml b/theodolite-benchmarks/config/checkstyle.xml similarity index 100% rename from benchmarks/config/checkstyle.xml rename to theodolite-benchmarks/config/checkstyle.xml diff --git a/benchmarks/config/eclipse-cleanup.xml b/theodolite-benchmarks/config/eclipse-cleanup.xml similarity index 100% rename from benchmarks/config/eclipse-cleanup.xml rename to theodolite-benchmarks/config/eclipse-cleanup.xml diff --git a/benchmarks/config/eclipse-formatter.xml b/theodolite-benchmarks/config/eclipse-formatter.xml similarity index 100% rename from benchmarks/config/eclipse-formatter.xml rename to theodolite-benchmarks/config/eclipse-formatter.xml diff --git a/benchmarks/config/eclipse-import-order.importorder b/theodolite-benchmarks/config/eclipse-import-order.importorder similarity index 100% rename from benchmarks/config/eclipse-import-order.importorder rename to theodolite-benchmarks/config/eclipse-import-order.importorder diff --git a/benchmarks/config/pmd.xml b/theodolite-benchmarks/config/pmd.xml similarity index 100% rename from benchmarks/config/pmd.xml rename to theodolite-benchmarks/config/pmd.xml diff --git a/benchmarks/config/spotbugs-exclude-filter.xml b/theodolite-benchmarks/config/spotbugs-exclude-filter.xml similarity index 100% rename from benchmarks/config/spotbugs-exclude-filter.xml rename to theodolite-benchmarks/config/spotbugs-exclude-filter.xml diff --git a/theodolite-benchmarks/docker-test/uc1-flink-docker-compose/docker-compose.yml b/theodolite-benchmarks/docker-test/uc1-flink-docker-compose/docker-compose.yml new file mode 100755 index 0000000000000000000000000000000000000000..aa35ac2d1dee01cdf25d2eb2ac77bd056865479a --- /dev/null +++ b/theodolite-benchmarks/docker-test/uc1-flink-docker-compose/docker-compose.yml @@ -0,0 +1,69 @@ +version: '2' +services: + zookeeper: + image: confluentinc/cp-zookeeper + expose: + - "9092" + environment: + ZOOKEEPER_CLIENT_PORT: 2181 + kafka: + image: wurstmeister/kafka + expose: + - "9092" + #ports: + # - 19092:19092 + environment: + KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092 + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000 + KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1" + schema-registry: + image: confluentinc/cp-schema-registry:5.3.1 + depends_on: + - zookeeper + - kafka + expose: + - "8081" + #ports: + # - 8081:8081 + environment: + SCHEMA_REGISTRY_HOST_NAME: schema-registry + SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181' + load-generator: + image: ghcr.io/cau-se/theodolite-uc1-workload-generator:latest + depends_on: + - schema-registry + - kafka + environment: + BOOTSTRAP_SERVER: uc-wg:5701 + PORT: 5701 + KAFKA_BOOTSTRAP_SERVERS: kafka:9092 + SCHEMA_REGISTRY_URL: http://schema-registry:8081 + NUM_SENSORS: 10 + benchmark-jobmanager: + image: ghcr.io/cau-se/theodolite-uc1-flink:latest + ports: + - "8080:8081" + command: standalone-job --job-classname theodolite.uc1.application.HistoryServiceFlinkJob + environment: + - KAFKA_BOOTSTRAP_SERVERS=kafka:9092 + - SCHEMA_REGISTRY_URL=http://schema-registry:8081 + - | + FLINK_PROPERTIES= + jobmanager.rpc.address: benchmark-jobmanager + parallelism.default: 1 + depends_on: + - schema-registry + - kafka + benchmark-taskmanager: + image: ghcr.io/cau-se/theodolite-uc1-flink:latest + command: taskmanager + environment: + - | + FLINK_PROPERTIES= + jobmanager.rpc.address: benchmark-jobmanager + depends_on: + - schema-registry + - kafka diff --git a/docker-test/uc1-docker-compose/docker-compose.yml b/theodolite-benchmarks/docker-test/uc1-kstreams-docker-compose/docker-compose.yml similarity index 89% rename from docker-test/uc1-docker-compose/docker-compose.yml rename to theodolite-benchmarks/docker-test/uc1-kstreams-docker-compose/docker-compose.yml index cdc9df40257362934a93fcbe2de24b6035d40bca..403becacff5a386eddfaa8e59fe7873d2adb006c 100755 --- a/docker-test/uc1-docker-compose/docker-compose.yml +++ b/theodolite-benchmarks/docker-test/uc1-kstreams-docker-compose/docker-compose.yml @@ -31,16 +31,16 @@ services: environment: SCHEMA_REGISTRY_HOST_NAME: schema-registry SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181' - uc-app: - image: theodolite/theodolite-uc1-kstreams-app:latest + benchmark: + image: ghcr.io/cau-se/theodolite-uc1-kstreams-app:latest depends_on: - schema-registry - kafka environment: KAFKA_BOOTSTRAP_SERVERS: kafka:9092 SCHEMA_REGISTRY_URL: http://schema-registry:8081 - uc-wg: - image: theodolite/theodolite-uc1-workload-generator:latest + load-generator: + image: ghcr.io/cau-se/theodolite-uc1-workload-generator:latest depends_on: - schema-registry - kafka diff --git a/theodolite-benchmarks/docker-test/uc2-flink-docker-compose/docker-compose.yml b/theodolite-benchmarks/docker-test/uc2-flink-docker-compose/docker-compose.yml new file mode 100755 index 0000000000000000000000000000000000000000..a8bf56d52c1be7fea3f172d86f6deac27fcc24f7 --- /dev/null +++ b/theodolite-benchmarks/docker-test/uc2-flink-docker-compose/docker-compose.yml @@ -0,0 +1,70 @@ +version: '2' +services: + zookeeper: + #image: wurstmeister/zookeeper + image: confluentinc/cp-zookeeper + ports: + - "2181:2181" + environment: + ZOOKEEPER_CLIENT_PORT: 2181 + kafka: + image: wurstmeister/kafka + expose: + - "9092" + #ports: + # - 19092:19092 + environment: + KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092 + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000 + KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1" + schema-registry: + image: confluentinc/cp-schema-registry:5.3.1 + depends_on: + - zookeeper + - kafka + #ports: + # - "8081:8081" + expose: + - "8081" + environment: + SCHEMA_REGISTRY_HOST_NAME: schema-registry + SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181' + load-generator: + image: ghcr.io/cau-se/theodolite-uc2-workload-generator:latest + depends_on: + - schema-registry + - kafka + environment: + BOOTSTRAP_SERVER: uc-wg:5701 + PORT: 5701 + KAFKA_BOOTSTRAP_SERVERS: kafka:9092 + SCHEMA_REGISTRY_URL: http://schema-registry:8081 + NUM_SENSORS: 10 + benchmark-jobmanager: + image: ghcr.io/cau-se/theodolite-uc2-flink:latest + ports: + - "8080:8081" + command: standalone-job --job-classname theodolite.uc2.application.HistoryServiceFlinkJob + environment: + - KAFKA_BOOTSTRAP_SERVERS=kafka:9092 + - SCHEMA_REGISTRY_URL=http://schema-registry:8081 + - | + FLINK_PROPERTIES= + jobmanager.rpc.address: benchmark-jobmanager + parallelism.default: 1 + depends_on: + - schema-registry + - kafka + benchmark-taskmanager: + image: ghcr.io/cau-se/theodolite-uc2-flink:latest + command: taskmanager + environment: + - | + FLINK_PROPERTIES= + jobmanager.rpc.address: benchmark-jobmanager + depends_on: + - schema-registry + - kafka diff --git a/docker-test/uc2-docker-compose/docker-compose.yml b/theodolite-benchmarks/docker-test/uc2-kstreams-docker-compose/docker-compose.yml similarity index 90% rename from docker-test/uc2-docker-compose/docker-compose.yml rename to theodolite-benchmarks/docker-test/uc2-kstreams-docker-compose/docker-compose.yml index 613553fcfa53122205b6e58d85fb7225eae90d7c..20d2c62dac13af29ec50439670308f2911f0d57a 100755 --- a/docker-test/uc2-docker-compose/docker-compose.yml +++ b/theodolite-benchmarks/docker-test/uc2-kstreams-docker-compose/docker-compose.yml @@ -32,8 +32,8 @@ services: environment: SCHEMA_REGISTRY_HOST_NAME: schema-registry SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181' - uc-app: - image: theodolite/theodolite-uc2-kstreams-app:latest + benchmark: + image: ghcr.io/cau-se/theodolite-uc2-kstreams-app:latest depends_on: - schema-registry - kafka @@ -41,8 +41,8 @@ services: KAFKA_BOOTSTRAP_SERVERS: kafka:9092 SCHEMA_REGISTRY_URL: http://schema-registry:8081 KAFKA_WINDOW_DURATION_MINUTES: 60 - uc-wg: - image: theodolite/theodolite-uc2-workload-generator:latest + load-generator: + image: ghcr.io/cau-se/theodolite-uc2-workload-generator:latest depends_on: - schema-registry - kafka diff --git a/theodolite-benchmarks/docker-test/uc3-flink-docker-compose/docker-compose.yml b/theodolite-benchmarks/docker-test/uc3-flink-docker-compose/docker-compose.yml new file mode 100755 index 0000000000000000000000000000000000000000..9999caf046e844d066200ecfbf15d3351c167d31 --- /dev/null +++ b/theodolite-benchmarks/docker-test/uc3-flink-docker-compose/docker-compose.yml @@ -0,0 +1,70 @@ +version: '2' +services: + zookeeper: + #image: wurstmeister/zookeeper + image: confluentinc/cp-zookeeper + ports: + - "2181:2181" + environment: + ZOOKEEPER_CLIENT_PORT: 2181 + kafka: + image: wurstmeister/kafka + expose: + - "9092" + #ports: + # - 19092:19092 + environment: + KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092 + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000 + KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1" + schema-registry: + image: confluentinc/cp-schema-registry:5.3.1 + depends_on: + - zookeeper + - kafka + #ports: + # - "8081:8081" + expose: + - "8081" + environment: + SCHEMA_REGISTRY_HOST_NAME: schema-registry + SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181' + load-generator: + image: ghcr.io/cau-se/theodolite-uc3-workload-generator:latest + depends_on: + - schema-registry + - kafka + environment: + BOOTSTRAP_SERVER: uc-wg:5701 + PORT: 5701 + KAFKA_BOOTSTRAP_SERVERS: kafka:9092 + SCHEMA_REGISTRY_URL: http://schema-registry:8081 + NUM_SENSORS: 10 + benchmark-jobmanager: + image: ghcr.io/cau-se/theodolite-uc3-flink:latest + ports: + - "8080:8081" + command: standalone-job --job-classname theodolite.uc3.application.HistoryServiceFlinkJob + environment: + - KAFKA_BOOTSTRAP_SERVERS=kafka:9092 + - SCHEMA_REGISTRY_URL=http://schema-registry:8081 + - | + FLINK_PROPERTIES= + jobmanager.rpc.address: benchmark-jobmanager + parallelism.default: 1 + depends_on: + - schema-registry + - kafka + benchmark-taskmanager: + image: ghcr.io/cau-se/theodolite-uc3-flink:latest + command: taskmanager + environment: + - | + FLINK_PROPERTIES= + jobmanager.rpc.address: benchmark-jobmanager + depends_on: + - schema-registry + - kafka diff --git a/docker-test/uc3-docker-compose/docker-compose.yml b/theodolite-benchmarks/docker-test/uc3-kstreams-docker-compose/docker-compose.yml similarity index 90% rename from docker-test/uc3-docker-compose/docker-compose.yml rename to theodolite-benchmarks/docker-test/uc3-kstreams-docker-compose/docker-compose.yml index d321318b4024b678cf8f37007e90dc62a2042ece..ef16b858536b0d133dc49d002d16cf6c04193297 100755 --- a/docker-test/uc3-docker-compose/docker-compose.yml +++ b/theodolite-benchmarks/docker-test/uc3-kstreams-docker-compose/docker-compose.yml @@ -32,16 +32,16 @@ services: environment: SCHEMA_REGISTRY_HOST_NAME: schema-registry SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181' - uc-app: - image: theodolite/theodolite-uc3-kstreams-app:latest + benchmark: + image: ghcr.io/cau-se/theodolite-uc3-kstreams-app:latest depends_on: - schema-registry - kafka environment: KAFKA_BOOTSTRAP_SERVERS: kafka:9092 SCHEMA_REGISTRY_URL: http://schema-registry:8081 - uc-wg: - image: theodolite/theodolite-uc3-workload-generator:latest + load-generator: + image: ghcr.io/cau-se/theodolite-uc3-workload-generator:latest depends_on: - schema-registry - kafka diff --git a/theodolite-benchmarks/docker-test/uc4-flink-docker-compose/docker-compose.yml b/theodolite-benchmarks/docker-test/uc4-flink-docker-compose/docker-compose.yml new file mode 100755 index 0000000000000000000000000000000000000000..80720063991100bae2c8c148f14cd6f1a32bb0ff --- /dev/null +++ b/theodolite-benchmarks/docker-test/uc4-flink-docker-compose/docker-compose.yml @@ -0,0 +1,70 @@ +version: '2' +services: + zookeeper: + image: confluentinc/cp-zookeeper + expose: + - "2181" + environment: + ZOOKEEPER_CLIENT_PORT: 2181 + kafka: + image: wurstmeister/kafka + expose: + - "9092" + #ports: + # - 19092:19092 + environment: + KAFKA_LISTENERS: PLAINTEXT://:9092,CONNECTIONS_FROM_HOST://:19092 + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,CONNECTIONS_FROM_HOST://localhost:19092 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000 + KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1" + schema-registry: + image: confluentinc/cp-schema-registry:5.3.1 + depends_on: + - zookeeper + - kafka + expose: + - "8081" + #ports: + # - 8081:8081 + environment: + SCHEMA_REGISTRY_HOST_NAME: schema-registry + SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181' + load-generator: + image: ghcr.io/cau-se/theodolite-uc4-workload-generator:latest + depends_on: + - schema-registry + - kafka + environment: + BOOTSTRAP_SERVER: uc-wg:5701 + PORT: 5701 + KAFKA_BOOTSTRAP_SERVERS: kafka:9092 + SCHEMA_REGISTRY_URL: http://schema-registry:8081 + NUM_SENSORS: 4 + NUM_NESTED_GROUPS: 4 + benchmark-jobmanager: + image: ghcr.io/cau-se/theodolite-uc4-flink:latest + ports: + - "8080:8081" + command: standalone-job --job-classname theodolite.uc4.application.AggregationServiceFlinkJob + environment: + - KAFKA_BOOTSTRAP_SERVERS=kafka:9092 + - SCHEMA_REGISTRY_URL=http://schema-registry:8081 + - | + FLINK_PROPERTIES= + jobmanager.rpc.address: benchmark-jobmanager + parallelism.default: 1 + depends_on: + - schema-registry + - kafka + benchmark-taskmanager: + image: ghcr.io/cau-se/theodolite-uc4-flink:latest + command: taskmanager + environment: + - | + FLINK_PROPERTIES= + jobmanager.rpc.address: benchmark-jobmanager + depends_on: + - schema-registry + - kafka diff --git a/docker-test/uc4-docker-compose/docker-compose.yml b/theodolite-benchmarks/docker-test/uc4-kstreams-docker-compose/docker-compose.yml similarity index 90% rename from docker-test/uc4-docker-compose/docker-compose.yml rename to theodolite-benchmarks/docker-test/uc4-kstreams-docker-compose/docker-compose.yml index d478d74e55a1b5423a390c624848b20f5faf2969..5e4cb94469f2f6cc8c48694a7ea6c885f066622d 100755 --- a/docker-test/uc4-docker-compose/docker-compose.yml +++ b/theodolite-benchmarks/docker-test/uc4-kstreams-docker-compose/docker-compose.yml @@ -31,16 +31,16 @@ services: environment: SCHEMA_REGISTRY_HOST_NAME: schema-registry SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181' - uc-app: - image: theodolite/theodolite-uc4-kstreams-app:latest + benchmark: + image: ghcr.io/cau-se/theodolite-uc4-kstreams-app:latest depends_on: - schema-registry - kafka environment: KAFKA_BOOTSTRAP_SERVERS: kafka:9092 SCHEMA_REGISTRY_URL: http://schema-registry:8081 - uc-wg: - image: theodolite/theodolite-uc4-workload-generator:latest + load-generator: + image: ghcr.io/cau-se/theodolite-uc4-workload-generator:latest depends_on: - schema-registry - kafka diff --git a/theodolite-benchmarks/flink-commons/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/flink-commons/.settings/org.eclipse.jdt.ui.prefs new file mode 100644 index 0000000000000000000000000000000000000000..66b402b58f39b79066638ce679c27c0378d5be54 --- /dev/null +++ b/theodolite-benchmarks/flink-commons/.settings/org.eclipse.jdt.ui.prefs @@ -0,0 +1,128 @@ +cleanup.add_default_serial_version_id=true +cleanup.add_generated_serial_version_id=false +cleanup.add_missing_annotations=true +cleanup.add_missing_deprecated_annotations=true +cleanup.add_missing_methods=false +cleanup.add_missing_nls_tags=false +cleanup.add_missing_override_annotations=true +cleanup.add_missing_override_annotations_interface_methods=true +cleanup.add_serial_version_id=false +cleanup.always_use_blocks=true +cleanup.always_use_parentheses_in_expressions=false +cleanup.always_use_this_for_non_static_field_access=true +cleanup.always_use_this_for_non_static_method_access=true +cleanup.convert_functional_interfaces=false +cleanup.convert_to_enhanced_for_loop=true +cleanup.correct_indentation=true +cleanup.format_source_code=true +cleanup.format_source_code_changes_only=false +cleanup.insert_inferred_type_arguments=false +cleanup.make_local_variable_final=true +cleanup.make_parameters_final=true +cleanup.make_private_fields_final=true +cleanup.make_type_abstract_if_missing_method=false +cleanup.make_variable_declarations_final=true +cleanup.never_use_blocks=false +cleanup.never_use_parentheses_in_expressions=true +cleanup.organize_imports=true +cleanup.qualify_static_field_accesses_with_declaring_class=false +cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true +cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true +cleanup.qualify_static_member_accesses_with_declaring_class=true +cleanup.qualify_static_method_accesses_with_declaring_class=false +cleanup.remove_private_constructors=true +cleanup.remove_redundant_modifiers=false +cleanup.remove_redundant_semicolons=true +cleanup.remove_redundant_type_arguments=true +cleanup.remove_trailing_whitespaces=true +cleanup.remove_trailing_whitespaces_all=true +cleanup.remove_trailing_whitespaces_ignore_empty=false +cleanup.remove_unnecessary_casts=true +cleanup.remove_unnecessary_nls_tags=true +cleanup.remove_unused_imports=true +cleanup.remove_unused_local_variables=false +cleanup.remove_unused_private_fields=true +cleanup.remove_unused_private_members=false +cleanup.remove_unused_private_methods=true +cleanup.remove_unused_private_types=true +cleanup.sort_members=false +cleanup.sort_members_all=false +cleanup.use_anonymous_class_creation=false +cleanup.use_blocks=true +cleanup.use_blocks_only_for_return_and_throw=false +cleanup.use_lambda=true +cleanup.use_parentheses_in_expressions=true +cleanup.use_this_for_non_static_field_access=true +cleanup.use_this_for_non_static_field_access_only_if_necessary=false +cleanup.use_this_for_non_static_method_access=true +cleanup.use_this_for_non_static_method_access_only_if_necessary=false +cleanup_profile=_CAU-SE-Style +cleanup_settings_version=2 +eclipse.preferences.version=1 +editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true +formatter_profile=_CAU-SE-Style +formatter_settings_version=15 +org.eclipse.jdt.ui.ignorelowercasenames=true +org.eclipse.jdt.ui.importorder=; +org.eclipse.jdt.ui.ondemandthreshold=99 +org.eclipse.jdt.ui.staticondemandthreshold=99 +org.eclipse.jdt.ui.text.custom_code_templates= +sp_cleanup.add_default_serial_version_id=true +sp_cleanup.add_generated_serial_version_id=false +sp_cleanup.add_missing_annotations=true +sp_cleanup.add_missing_deprecated_annotations=true +sp_cleanup.add_missing_methods=false +sp_cleanup.add_missing_nls_tags=false +sp_cleanup.add_missing_override_annotations=true +sp_cleanup.add_missing_override_annotations_interface_methods=true +sp_cleanup.add_serial_version_id=false +sp_cleanup.always_use_blocks=true +sp_cleanup.always_use_parentheses_in_expressions=false +sp_cleanup.always_use_this_for_non_static_field_access=true +sp_cleanup.always_use_this_for_non_static_method_access=true +sp_cleanup.convert_functional_interfaces=false +sp_cleanup.convert_to_enhanced_for_loop=true +sp_cleanup.correct_indentation=true +sp_cleanup.format_source_code=true +sp_cleanup.format_source_code_changes_only=false +sp_cleanup.insert_inferred_type_arguments=false +sp_cleanup.make_local_variable_final=true +sp_cleanup.make_parameters_final=true +sp_cleanup.make_private_fields_final=true +sp_cleanup.make_type_abstract_if_missing_method=false +sp_cleanup.make_variable_declarations_final=true +sp_cleanup.never_use_blocks=false +sp_cleanup.never_use_parentheses_in_expressions=true +sp_cleanup.on_save_use_additional_actions=true +sp_cleanup.organize_imports=true +sp_cleanup.qualify_static_field_accesses_with_declaring_class=false +sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true +sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true +sp_cleanup.qualify_static_member_accesses_with_declaring_class=true +sp_cleanup.qualify_static_method_accesses_with_declaring_class=false +sp_cleanup.remove_private_constructors=true +sp_cleanup.remove_redundant_modifiers=false +sp_cleanup.remove_redundant_semicolons=false +sp_cleanup.remove_redundant_type_arguments=true +sp_cleanup.remove_trailing_whitespaces=true +sp_cleanup.remove_trailing_whitespaces_all=true +sp_cleanup.remove_trailing_whitespaces_ignore_empty=false +sp_cleanup.remove_unnecessary_casts=true +sp_cleanup.remove_unnecessary_nls_tags=true +sp_cleanup.remove_unused_imports=true +sp_cleanup.remove_unused_local_variables=false +sp_cleanup.remove_unused_private_fields=true +sp_cleanup.remove_unused_private_members=false +sp_cleanup.remove_unused_private_methods=true +sp_cleanup.remove_unused_private_types=true +sp_cleanup.sort_members=false +sp_cleanup.sort_members_all=false +sp_cleanup.use_anonymous_class_creation=false +sp_cleanup.use_blocks=true +sp_cleanup.use_blocks_only_for_return_and_throw=false +sp_cleanup.use_lambda=true +sp_cleanup.use_parentheses_in_expressions=true +sp_cleanup.use_this_for_non_static_field_access=true +sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=false +sp_cleanup.use_this_for_non_static_method_access=true +sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=false diff --git a/benchmarks/application-kafkastreams-commons/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/flink-commons/.settings/qa.eclipse.plugin.checkstyle.prefs similarity index 100% rename from benchmarks/application-kafkastreams-commons/.settings/qa.eclipse.plugin.checkstyle.prefs rename to theodolite-benchmarks/flink-commons/.settings/qa.eclipse.plugin.checkstyle.prefs diff --git a/benchmarks/application-kafkastreams-commons/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/flink-commons/.settings/qa.eclipse.plugin.pmd.prefs similarity index 100% rename from benchmarks/application-kafkastreams-commons/.settings/qa.eclipse.plugin.pmd.prefs rename to theodolite-benchmarks/flink-commons/.settings/qa.eclipse.plugin.pmd.prefs diff --git a/theodolite-benchmarks/flink-commons/build.gradle b/theodolite-benchmarks/flink-commons/build.gradle new file mode 100644 index 0000000000000000000000000000000000000000..1b0b9359a406bf2ab16fbbe52631877cf360df2a --- /dev/null +++ b/theodolite-benchmarks/flink-commons/build.gradle @@ -0,0 +1,35 @@ +plugins { + id 'theodolite.java-commons' +} + +ext { + flinkVersion = '1.12.0' + scalaBinaryVersion = '2.12' +} + +repositories { + jcenter() + maven { + url "https://oss.sonatype.org/content/repositories/snapshots/" + } + maven { + url 'https://packages.confluent.io/maven/' + } +} + +dependencies { + // Special version required because of https://issues.apache.org/jira/browse/FLINK-13703 + implementation('org.industrial-devops:titan-ccp-common:0.1.0-flink-ready-SNAPSHOT') { changing = true } + implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true } + implementation 'com.google.guava:guava:30.1-jre' + compile group: 'org.apache.flink', name: "flink-connector-kafka_${scalaBinaryVersion}", version: "${flinkVersion}" + compile group: 'org.apache.flink', name: "flink-statebackend-rocksdb_${scalaBinaryVersion}", version: "${flinkVersion}" + compile group: 'org.apache.flink', name: "flink-runtime_${scalaBinaryVersion}", version: "${flinkVersion}" + compile group: 'org.apache.flink', name: 'flink-java', version: "${flinkVersion}" + compile group: 'org.apache.flink', name: "flink-streaming-java_${scalaBinaryVersion}", version:"${flinkVersion}" + implementation "org.apache.flink:flink-avro:${flinkVersion}" + implementation "org.apache.flink:flink-avro-confluent-registry:${flinkVersion}" + + // Use JUnit test framework + testImplementation 'junit:junit:4.12' + } diff --git a/theodolite-benchmarks/flink-commons/src/main/java/theodolite/commons/flink/ConfigurationKeys.java b/theodolite-benchmarks/flink-commons/src/main/java/theodolite/commons/flink/ConfigurationKeys.java new file mode 100644 index 0000000000000000000000000000000000000000..2847ede440ecd65bdf35fc8e825d0f7b723a3f8f --- /dev/null +++ b/theodolite-benchmarks/flink-commons/src/main/java/theodolite/commons/flink/ConfigurationKeys.java @@ -0,0 +1,19 @@ +package theodolite.commons.flink; + +/** + * Keys to access configuration parameters. + */ +public final class ConfigurationKeys { + + public static final String FLINK_STATE_BACKEND = "flink.state.backend"; + + public static final String FLINK_STATE_BACKEND_PATH = "flink.state.backend.path"; + + public static final String FLINK_STATE_BACKEND_MEMORY_SIZE = // NOPMD + "flink.state.backend.memory.size"; + + public static final String FLINK_CHECKPOINTING = "checkpointing"; + + private ConfigurationKeys() {} + +} diff --git a/theodolite-benchmarks/flink-commons/src/main/java/theodolite/commons/flink/KafkaConnectorFactory.java b/theodolite-benchmarks/flink-commons/src/main/java/theodolite/commons/flink/KafkaConnectorFactory.java new file mode 100644 index 0000000000000000000000000000000000000000..55d73b0fb9274b0ae67468d50b7978799d7e6257 --- /dev/null +++ b/theodolite-benchmarks/flink-commons/src/main/java/theodolite/commons/flink/KafkaConnectorFactory.java @@ -0,0 +1,154 @@ +package theodolite.commons.flink; + +import java.time.Duration; +import java.util.Properties; +import org.apache.avro.specific.SpecificRecord; +import org.apache.flink.api.common.eventtime.WatermarkStrategy; +import org.apache.flink.api.common.serialization.DeserializationSchema; +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.java.tuple.Tuple2; +import org.apache.flink.formats.avro.registry.confluent.ConfluentRegistryAvroDeserializationSchema; +import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer; +import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer; +import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema; +import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.common.serialization.Serde; +import theodolite.commons.flink.serialization.FlinkKafkaKeyValueSerde; +import theodolite.commons.flink.util.SerializableSupplier; + +/** + * A class for creating {@link FlinkKafkaConsumer} and {@link FlinkKafkaProducer}. + */ +public class KafkaConnectorFactory { + + private static final Duration PRODUCER_TRANSACTION_TIMEOUT = Duration.ofMinutes(5); + + private final Properties kafkaProps = new Properties(); + private final boolean checkpointingEnabled; + private final String schemaRegistryUrl; + + /** + * Create a new {@link KafkaConnectorFactory} from the provided parameters. + */ + public KafkaConnectorFactory( + final String appName, + final String bootstrapServers, + final boolean checkpointingEnabled, + final String schemaRegistryUrl) { + this.checkpointingEnabled = checkpointingEnabled; + this.schemaRegistryUrl = schemaRegistryUrl; + this.kafkaProps.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); + this.kafkaProps.setProperty(ConsumerConfig.GROUP_ID_CONFIG, appName); + } + + /** + * Create a new {@link FlinkKafkaConsumer} that consumes data using a + * {@link DeserializationSchema}. + */ + public <T> FlinkKafkaConsumer<T> createConsumer(final String topic, + final DeserializationSchema<T> deserializationSchema) { + return this.createBaseConsumer( + new FlinkKafkaConsumer<>(topic, deserializationSchema, this.cloneProperties())); + } + + /** + * Create a new {@link FlinkKafkaConsumer} that consumes data using a + * {@link KafkaDeserializationSchema}. + */ + public <T> FlinkKafkaConsumer<T> createConsumer(final String topic, + final KafkaDeserializationSchema<T> deserializationSchema) { + return this.createBaseConsumer( + new FlinkKafkaConsumer<>(topic, deserializationSchema, this.cloneProperties())); + } + + /** + * Create a new {@link FlinkKafkaConsumer} that consumes {@link Tuple2}s using two Kafka + * {@link Serde}s. + */ + public <K, V> FlinkKafkaConsumer<Tuple2<K, V>> createConsumer( + final String topic, + final SerializableSupplier<Serde<K>> kafkaKeySerde, + final SerializableSupplier<Serde<V>> kafkaValueSerde, + final TypeInformation<Tuple2<K, V>> typeInformation) { + return this.<Tuple2<K, V>>createConsumer( + topic, + new FlinkKafkaKeyValueSerde<>( + topic, + kafkaKeySerde, + kafkaValueSerde, + typeInformation)); + } + + /** + * Create a new {@link FlinkKafkaConsumer} that consumes from a topic associated with Confluent + * Schema Registry. + */ + public <T extends SpecificRecord> FlinkKafkaConsumer<T> createConsumer(final String topic, + final Class<T> typeClass) { + // Maybe move to subclass for Confluent-Schema-Registry-specific things + final DeserializationSchema<T> deserializationSchema = + ConfluentRegistryAvroDeserializationSchema.forSpecific(typeClass, this.schemaRegistryUrl); + return this.createConsumer(topic, deserializationSchema); + } + + private <T> FlinkKafkaConsumer<T> createBaseConsumer(final FlinkKafkaConsumer<T> baseConsumer) { + baseConsumer.setStartFromGroupOffsets(); + if (this.checkpointingEnabled) { + baseConsumer.setCommitOffsetsOnCheckpoints(true); // TODO Validate if this is sensible + } + baseConsumer.assignTimestampsAndWatermarks(WatermarkStrategy.forMonotonousTimestamps()); + return baseConsumer; + } + + + /** + * Create a new {@link FlinkKafkaProducer} that produces data using a + * {@link KafkaSerializationSchema}. + */ + public <T> FlinkKafkaProducer<T> createProducer(final String topic, + final KafkaSerializationSchema<T> serializationSchema) { + final Properties producerProps = this.buildProducerProperties(); + return this.createBaseProducer(new FlinkKafkaProducer<>( + topic, serializationSchema, producerProps, FlinkKafkaProducer.Semantic.AT_LEAST_ONCE)); + } + + /** + * Create a new {@link FlinkKafkaProducer} that produces {@link Tuple2}s using two Kafka + * {@link Serde}s. + */ + public <K, V> FlinkKafkaProducer<Tuple2<K, V>> createProducer( + final String topic, + final SerializableSupplier<Serde<K>> kafkaKeySerde, + final SerializableSupplier<Serde<V>> kafkaValueSerde, + final TypeInformation<Tuple2<K, V>> typeInformation) { + return this.createProducer( + topic, + new FlinkKafkaKeyValueSerde<>( + topic, + kafkaKeySerde, + kafkaValueSerde, + typeInformation)); + } + + private <T> FlinkKafkaProducer<T> createBaseProducer(final FlinkKafkaProducer<T> baseProducer) { + baseProducer.setWriteTimestampToKafka(true); + return baseProducer; + } + + private Properties buildProducerProperties() { + final Properties producerProps = this.cloneProperties(); + producerProps.setProperty( + ProducerConfig.TRANSACTION_TIMEOUT_CONFIG, + String.valueOf(PRODUCER_TRANSACTION_TIMEOUT.toMillis())); // TODO necessary? + return producerProps; + } + + private Properties cloneProperties() { + final Properties props = new Properties(); + props.putAll(this.kafkaProps); + return props; + } + +} diff --git a/theodolite-benchmarks/flink-commons/src/main/java/theodolite/commons/flink/StateBackends.java b/theodolite-benchmarks/flink-commons/src/main/java/theodolite/commons/flink/StateBackends.java new file mode 100644 index 0000000000000000000000000000000000000000..a94927e4bf49e1dbe6d109eb8f19f7d292f3d879 --- /dev/null +++ b/theodolite-benchmarks/flink-commons/src/main/java/theodolite/commons/flink/StateBackends.java @@ -0,0 +1,68 @@ +package theodolite.commons.flink; + +import java.io.IOException; +import org.apache.commons.configuration2.Configuration; +import org.apache.flink.contrib.streaming.state.RocksDBStateBackend; +import org.apache.flink.runtime.state.StateBackend; +import org.apache.flink.runtime.state.filesystem.FsStateBackend; +import org.apache.flink.runtime.state.memory.MemoryStateBackend; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Provides factory methods for creating Flink {@link StateBackend}s. + */ +public final class StateBackends { + + public static final String STATE_BACKEND_TYPE_MEMORY = "memory"; + public static final String STATE_BACKEND_TYPE_FILESYSTEM = "filesystem"; + public static final String STATE_BACKEND_TYPE_ROCKSDB = "rocksdb"; + // public static final String STATE_BACKEND_TYPE_DEFAULT = STATE_BACKEND_TYPE_ROCKSDB; + public static final String STATE_BACKEND_TYPE_DEFAULT = STATE_BACKEND_TYPE_MEMORY; + public static final String DEFAULT_STATE_BACKEND_PATH = "file:///opt/flink/statebackend"; + + private static final Logger LOGGER = LoggerFactory.getLogger(StateBackends.class); + + private StateBackends() {} + + /** + * Create a Flink {@link StateBackend} from a {@link Configuration} and the + * {@code ConfigurationKeys#FLINK_STATE_BACKEND}, + * {@code ConfigurationKeys#FLINK_STATE_BACKEND_MEMORY_SIZE} and + * {@code ConfigurationKeys#FLINK_STATE_BACKEND_PATH} configuration keys. Possible options for the + * {@code ConfigurationKeys#FLINK_STATE_BACKEND} configuration are + * {@code #STATE_BACKEND_TYPE_ROCKSDB}, {@code #STATE_BACKEND_TYPE_FILESYSTEM} and + * {@code StateBackendFactory#STATE_BACKEND_TYPE_MEMORY}, where + * {@code StateBackendFactory#STATE_BACKEND_TYPE_ROCKSDB} is the default. + */ + public static StateBackend fromConfiguration(final Configuration configuration) { + final String stateBackendType = + configuration.getString(ConfigurationKeys.FLINK_STATE_BACKEND, STATE_BACKEND_TYPE_DEFAULT); + switch (stateBackendType) { + case STATE_BACKEND_TYPE_MEMORY: + final int memoryStateBackendSize = configuration.getInt( + ConfigurationKeys.FLINK_STATE_BACKEND_MEMORY_SIZE, + MemoryStateBackend.DEFAULT_MAX_STATE_SIZE); + return new MemoryStateBackend(memoryStateBackendSize); + case STATE_BACKEND_TYPE_FILESYSTEM: + final String stateBackendPath = configuration.getString( + ConfigurationKeys.FLINK_STATE_BACKEND_PATH, + DEFAULT_STATE_BACKEND_PATH); + return new FsStateBackend(stateBackendPath); + case STATE_BACKEND_TYPE_ROCKSDB: + final String stateBackendPath2 = configuration.getString( + ConfigurationKeys.FLINK_STATE_BACKEND_PATH, + DEFAULT_STATE_BACKEND_PATH); + try { + return new RocksDBStateBackend(stateBackendPath2, true); + } catch (final IOException e) { + LOGGER.error("Cannot create RocksDB state backend.", e); + throw new IllegalStateException(e); + } + default: + throw new IllegalArgumentException( + "Unsupported state backend '" + stateBackendType + "' configured."); + } + } + +} diff --git a/theodolite-benchmarks/flink-commons/src/main/java/theodolite/commons/flink/TupleType.java b/theodolite-benchmarks/flink-commons/src/main/java/theodolite/commons/flink/TupleType.java new file mode 100644 index 0000000000000000000000000000000000000000..360331e4d1e4fdc47a24ac8ae995b7590301f7fd --- /dev/null +++ b/theodolite-benchmarks/flink-commons/src/main/java/theodolite/commons/flink/TupleType.java @@ -0,0 +1,22 @@ +package theodolite.commons.flink; + +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.common.typeinfo.Types; +import org.apache.flink.api.java.tuple.Tuple; +import org.apache.flink.api.java.tuple.Tuple2; + +/** + * Helper methods for creating {@link TypeInformation} for {@link Tuple}s. In contrast to + * {@code Types#TUPLE(TypeInformation...)}, these methods bring real type safety. + */ +public final class TupleType { + + private TupleType() {} + + public static <T1, T2> TypeInformation<Tuple2<T1, T2>> of(// NOPMD + final TypeInformation<T1> t0, + final TypeInformation<T2> t1) { + return Types.TUPLE(t0, t1); + } + +} diff --git a/theodolite-benchmarks/flink-commons/src/main/java/theodolite/commons/flink/serialization/FlinkKafkaKeyValueSerde.java b/theodolite-benchmarks/flink-commons/src/main/java/theodolite/commons/flink/serialization/FlinkKafkaKeyValueSerde.java new file mode 100644 index 0000000000000000000000000000000000000000..22f615a6af4caf575af57dbe9b7f989889c4095f --- /dev/null +++ b/theodolite-benchmarks/flink-commons/src/main/java/theodolite/commons/flink/serialization/FlinkKafkaKeyValueSerde.java @@ -0,0 +1,80 @@ +package theodolite.commons.flink.serialization; + +import javax.annotation.Nullable; +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.java.tuple.Tuple2; +import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema; +import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.serialization.Serde; +import theodolite.commons.flink.util.SerializableSupplier; + +/** + * A {@link KafkaSerializationSchema} and {@link KafkaDeserializationSchema} for an arbitrary + * key-value-pair in Kafka, mapped to/from a Flink {@link Tuple2}. + * + * @param <K> Type of the key. + * @param <V> Type of the value. + */ +public class FlinkKafkaKeyValueSerde<K, V> + implements KafkaDeserializationSchema<Tuple2<K, V>>, KafkaSerializationSchema<Tuple2<K, V>> { + + private static final long serialVersionUID = 2469569396501933443L; // NOPMD + + private final SerializableSupplier<Serde<K>> keySerdeSupplier; + private final SerializableSupplier<Serde<V>> valueSerdeSupplier; + private final String topic; + private final TypeInformation<Tuple2<K, V>> typeInfo; + + private transient Serde<K> keySerde; + private transient Serde<V> valueSerde; + + /** + * Create a new {@link FlinkKafkaKeyValueSerde}. + */ + public FlinkKafkaKeyValueSerde(final String topic, + final SerializableSupplier<Serde<K>> keySerdeSupplier, + final SerializableSupplier<Serde<V>> valueSerdeSupplier, + final TypeInformation<Tuple2<K, V>> typeInfo) { + this.topic = topic; + this.typeInfo = typeInfo; + this.keySerdeSupplier = keySerdeSupplier; + this.valueSerdeSupplier = valueSerdeSupplier; + } + + @Override + public boolean isEndOfStream(final Tuple2<K, V> nextElement) { + return false; + } + + @Override + public Tuple2<K, V> deserialize(final ConsumerRecord<byte[], byte[]> record) { + this.ensureInitialized(); + final K key = this.keySerde.deserializer().deserialize(this.topic, record.key()); + final V value = this.valueSerde.deserializer().deserialize(this.topic, record.value()); + return new Tuple2<>(key, value); + } + + @Override + public TypeInformation<Tuple2<K, V>> getProducedType() { + return this.typeInfo; + } + + @Override + public ProducerRecord<byte[], byte[]> serialize(final Tuple2<K, V> element, + @Nullable final Long timestamp) { + this.ensureInitialized(); + final byte[] key = this.keySerde.serializer().serialize(this.topic, element.f0); + final byte[] value = this.valueSerde.serializer().serialize(this.topic, element.f1); + return new ProducerRecord<>(this.topic, key, value); + } + + private void ensureInitialized() { + if (this.keySerde == null || this.valueSerde == null) { + this.keySerde = this.keySerdeSupplier.get(); + this.valueSerde = this.valueSerdeSupplier.get(); + } + } + +} diff --git a/theodolite-benchmarks/flink-commons/src/main/java/theodolite/commons/flink/serialization/StatsSerializer.java b/theodolite-benchmarks/flink-commons/src/main/java/theodolite/commons/flink/serialization/StatsSerializer.java new file mode 100644 index 0000000000000000000000000000000000000000..f1f9870fda73ccec0fc25c5c70665759ab07d893 --- /dev/null +++ b/theodolite-benchmarks/flink-commons/src/main/java/theodolite/commons/flink/serialization/StatsSerializer.java @@ -0,0 +1,30 @@ +package theodolite.commons.flink.serialization; + +import com.esotericsoftware.kryo.Kryo; +import com.esotericsoftware.kryo.Serializer; +import com.esotericsoftware.kryo.io.Input; +import com.esotericsoftware.kryo.io.Output; +import com.google.common.math.Stats; + +import java.io.Serializable; + +/** + * Custom Kryo {@link Serializer} for efficient transmission between Flink instances. + */ +public class StatsSerializer extends Serializer<Stats> implements Serializable { + + private static final long serialVersionUID = -1276866176534267373L; //NOPMD + + @Override + public void write(final Kryo kryo, final Output output, final Stats object) { + final byte[] data = object.toByteArray(); + output.writeInt(data.length); + output.writeBytes(data); + } + + @Override + public Stats read(final Kryo kryo, final Input input, final Class<Stats> type) { + final int numBytes = input.readInt(); + return Stats.fromByteArray(input.readBytes(numBytes)); + } +} diff --git a/theodolite-benchmarks/flink-commons/src/main/java/theodolite/commons/flink/util/SerializableSupplier.java b/theodolite-benchmarks/flink-commons/src/main/java/theodolite/commons/flink/util/SerializableSupplier.java new file mode 100644 index 0000000000000000000000000000000000000000..bcc51a9ef7b8bb0f36398ea401f1d2c898472081 --- /dev/null +++ b/theodolite-benchmarks/flink-commons/src/main/java/theodolite/commons/flink/util/SerializableSupplier.java @@ -0,0 +1,13 @@ +package theodolite.commons.flink.util; + +import java.io.Serializable; +import java.util.function.Supplier; + +/** + * Interface for {@link Supplier}s which are serializable. + * + * @param <T> the type of results supplied by this supplier + */ +public interface SerializableSupplier<T> extends Supplier<T>, Serializable { // NOPMD + // Nothing to do here +} diff --git a/theodolite-benchmarks/gradle/wrapper/gradle-wrapper.jar b/theodolite-benchmarks/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000000000000000000000000000000000000..e708b1c023ec8b20f512888fe07c5bd3ff77bb8f Binary files /dev/null and b/theodolite-benchmarks/gradle/wrapper/gradle-wrapper.jar differ diff --git a/benchmarks/gradle/wrapper/gradle-wrapper.properties b/theodolite-benchmarks/gradle/wrapper/gradle-wrapper.properties similarity index 92% rename from benchmarks/gradle/wrapper/gradle-wrapper.properties rename to theodolite-benchmarks/gradle/wrapper/gradle-wrapper.properties index 4d9ca1649142b0c20144adce78e2472e2da01c30..442d9132ea32808ad980df4bd233b359f76341a7 100644 --- a/benchmarks/gradle/wrapper/gradle-wrapper.properties +++ b/theodolite-benchmarks/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-6.7.1-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-6.8.3-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/benchmarks/gradlew b/theodolite-benchmarks/gradlew similarity index 75% rename from benchmarks/gradlew rename to theodolite-benchmarks/gradlew index af6708ff229fda75da4f7cc4da4747217bac4d53..4f906e0c811fc9e230eb44819f509cd0627f2600 100755 --- a/benchmarks/gradlew +++ b/theodolite-benchmarks/gradlew @@ -1,5 +1,21 @@ #!/usr/bin/env sh +# +# Copyright 2015 the original author or authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + ############################################################################## ## ## Gradle start up script for UN*X @@ -28,7 +44,7 @@ APP_NAME="Gradle" APP_BASE_NAME=`basename "$0"` # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS='"-Xmx64m"' +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD="maximum" @@ -66,6 +82,7 @@ esac CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + # Determine the Java command to use to start the JVM. if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then @@ -109,10 +126,11 @@ if $darwin; then GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" fi -# For Cygwin, switch paths to Windows format before running java -if $cygwin ; then +# For Cygwin or MSYS, switch paths to Windows format before running java +if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then APP_HOME=`cygpath --path --mixed "$APP_HOME"` CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + JAVACMD=`cygpath --unix "$JAVACMD"` # We build the pattern for arguments to be converted via cygpath @@ -138,19 +156,19 @@ if $cygwin ; then else eval `echo args$i`="\"$arg\"" fi - i=$((i+1)) + i=`expr $i + 1` done case $i in - (0) set -- ;; - (1) set -- "$args0" ;; - (2) set -- "$args0" "$args1" ;; - (3) set -- "$args0" "$args1" "$args2" ;; - (4) set -- "$args0" "$args1" "$args2" "$args3" ;; - (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; - (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; - (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; - (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; - (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + 0) set -- ;; + 1) set -- "$args0" ;; + 2) set -- "$args0" "$args1" ;; + 3) set -- "$args0" "$args1" "$args2" ;; + 4) set -- "$args0" "$args1" "$args2" "$args3" ;; + 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; esac fi @@ -159,14 +177,9 @@ save () { for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done echo " " } -APP_ARGS=$(save "$@") +APP_ARGS=`save "$@"` # Collect all arguments for the java command, following the shell quoting and substitution rules eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" -# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong -if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then - cd "$(dirname "$0")" -fi - exec "$JAVACMD" "$@" diff --git a/benchmarks/gradlew.bat b/theodolite-benchmarks/gradlew.bat similarity index 64% rename from benchmarks/gradlew.bat rename to theodolite-benchmarks/gradlew.bat index 0f8d5937c4ad18feb44a19e55ad1e37cc159260f..ac1b06f93825db68fb0c0b5150917f340eaa5d02 100644 --- a/benchmarks/gradlew.bat +++ b/theodolite-benchmarks/gradlew.bat @@ -1,3 +1,19 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + @if "%DEBUG%" == "" @echo off @rem ########################################################################## @rem @@ -13,15 +29,18 @@ if "%DIRNAME%" == "" set DIRNAME=. set APP_BASE_NAME=%~n0 set APP_HOME=%DIRNAME% +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -set DEFAULT_JVM_OPTS="-Xmx64m" +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" @rem Find java.exe if defined JAVA_HOME goto findJavaFromJavaHome set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto init +if "%ERRORLEVEL%" == "0" goto execute echo. echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. @@ -35,7 +54,7 @@ goto fail set JAVA_HOME=%JAVA_HOME:"=% set JAVA_EXE=%JAVA_HOME%/bin/java.exe -if exist "%JAVA_EXE%" goto init +if exist "%JAVA_EXE%" goto execute echo. echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% @@ -45,28 +64,14 @@ echo location of your Java installation. goto fail -:init -@rem Get command-line arguments, handling Windows variants - -if not "%OS%" == "Windows_NT" goto win9xME_args - -:win9xME_args -@rem Slurp the command line arguments. -set CMD_LINE_ARGS= -set _SKIP=2 - -:win9xME_args_slurp -if "x%~1" == "x" goto execute - -set CMD_LINE_ARGS=%* - :execute @rem Setup the command line set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + @rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* :end @rem End local scope for the variables with windows NT shell diff --git a/benchmarks/application-kafkastreams-commons/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/kstreams-commons/.settings/org.eclipse.jdt.ui.prefs similarity index 100% rename from benchmarks/application-kafkastreams-commons/.settings/org.eclipse.jdt.ui.prefs rename to theodolite-benchmarks/kstreams-commons/.settings/org.eclipse.jdt.ui.prefs diff --git a/benchmarks/uc1-application/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/kstreams-commons/.settings/qa.eclipse.plugin.checkstyle.prefs similarity index 100% rename from benchmarks/uc1-application/.settings/qa.eclipse.plugin.checkstyle.prefs rename to theodolite-benchmarks/kstreams-commons/.settings/qa.eclipse.plugin.checkstyle.prefs diff --git a/benchmarks/uc1-application/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/kstreams-commons/.settings/qa.eclipse.plugin.pmd.prefs similarity index 100% rename from benchmarks/uc1-application/.settings/qa.eclipse.plugin.pmd.prefs rename to theodolite-benchmarks/kstreams-commons/.settings/qa.eclipse.plugin.pmd.prefs diff --git a/theodolite-benchmarks/kstreams-commons/build.gradle b/theodolite-benchmarks/kstreams-commons/build.gradle new file mode 100644 index 0000000000000000000000000000000000000000..e177aa5c4770c1a77fc21084a7766741fdb9bdec --- /dev/null +++ b/theodolite-benchmarks/kstreams-commons/build.gradle @@ -0,0 +1,21 @@ +plugins { + id 'theodolite.java-commons' +} + +repositories { + jcenter() + maven { + url "https://oss.sonatype.org/content/repositories/snapshots/" + } +} + +dependencies { + // These dependencies are used internally, and not exposed to consumers on their own compile classpath. + // implementation 'org.slf4j:slf4j-simple:1.7.25' + implementation('org.industrial-devops:titan-ccp-common:0.1.0-SNAPSHOT') { changing = true } + implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true } + implementation 'org.apache.kafka:kafka-streams:2.6.0' + + // Use JUnit test framework + testImplementation 'junit:junit:4.12' +} diff --git a/benchmarks/application-kafkastreams-commons/src/main/java/theodolite/commons/kafkastreams/ConfigurationKeys.java b/theodolite-benchmarks/kstreams-commons/src/main/java/theodolite/commons/kafkastreams/ConfigurationKeys.java similarity index 100% rename from benchmarks/application-kafkastreams-commons/src/main/java/theodolite/commons/kafkastreams/ConfigurationKeys.java rename to theodolite-benchmarks/kstreams-commons/src/main/java/theodolite/commons/kafkastreams/ConfigurationKeys.java diff --git a/benchmarks/application-kafkastreams-commons/src/main/java/theodolite/commons/kafkastreams/KafkaStreamsBuilder.java b/theodolite-benchmarks/kstreams-commons/src/main/java/theodolite/commons/kafkastreams/KafkaStreamsBuilder.java similarity index 100% rename from benchmarks/application-kafkastreams-commons/src/main/java/theodolite/commons/kafkastreams/KafkaStreamsBuilder.java rename to theodolite-benchmarks/kstreams-commons/src/main/java/theodolite/commons/kafkastreams/KafkaStreamsBuilder.java diff --git a/benchmarks/uc1-application/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/load-generator-commons/.settings/org.eclipse.jdt.ui.prefs similarity index 100% rename from benchmarks/uc1-application/.settings/org.eclipse.jdt.ui.prefs rename to theodolite-benchmarks/load-generator-commons/.settings/org.eclipse.jdt.ui.prefs diff --git a/benchmarks/uc1-workload-generator/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/load-generator-commons/.settings/qa.eclipse.plugin.checkstyle.prefs similarity index 100% rename from benchmarks/uc1-workload-generator/.settings/qa.eclipse.plugin.checkstyle.prefs rename to theodolite-benchmarks/load-generator-commons/.settings/qa.eclipse.plugin.checkstyle.prefs diff --git a/benchmarks/uc1-workload-generator/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/load-generator-commons/.settings/qa.eclipse.plugin.pmd.prefs similarity index 100% rename from benchmarks/uc1-workload-generator/.settings/qa.eclipse.plugin.pmd.prefs rename to theodolite-benchmarks/load-generator-commons/.settings/qa.eclipse.plugin.pmd.prefs diff --git a/theodolite-benchmarks/load-generator-commons/build.gradle b/theodolite-benchmarks/load-generator-commons/build.gradle new file mode 100644 index 0000000000000000000000000000000000000000..118f3e648f829a3eafe719ddf660d35ac8563574 --- /dev/null +++ b/theodolite-benchmarks/load-generator-commons/build.gradle @@ -0,0 +1,26 @@ +plugins { + id 'theodolite.java-commons' +} + +repositories { + jcenter() + maven { + url "https://oss.sonatype.org/content/repositories/snapshots/" + } + maven { + url 'https://packages.confluent.io/maven/' + } +} + +dependencies { + implementation 'com.google.guava:guava:30.1-jre' + implementation 'com.hazelcast:hazelcast:4.1.1' + implementation 'com.hazelcast:hazelcast-kubernetes:2.2.1' + implementation 'org.slf4j:slf4j-simple:1.7.25' + implementation('org.industrial-devops:titan-ccp-common:0.1.0-SNAPSHOT') { changing = true } + implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true } + implementation 'org.apache.kafka:kafka-streams:2.6.0' // TODO required? + + // Use JUnit test framework + testImplementation 'junit:junit:4.12' +} diff --git a/benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/BeforeAction.java b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/BeforeAction.java similarity index 100% rename from benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/BeforeAction.java rename to theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/BeforeAction.java diff --git a/benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/ClusterConfig.java b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/ClusterConfig.java similarity index 100% rename from benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/ClusterConfig.java rename to theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/ClusterConfig.java diff --git a/benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/ConfigurationKeys.java b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/ConfigurationKeys.java similarity index 100% rename from benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/ConfigurationKeys.java rename to theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/ConfigurationKeys.java diff --git a/benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/HazelcastRunner.java b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/HazelcastRunner.java similarity index 100% rename from benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/HazelcastRunner.java rename to theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/HazelcastRunner.java diff --git a/benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/HazelcastRunnerStateInstance.java b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/HazelcastRunnerStateInstance.java similarity index 100% rename from benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/HazelcastRunnerStateInstance.java rename to theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/HazelcastRunnerStateInstance.java diff --git a/benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/KafkaRecordSender.java b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/KafkaRecordSender.java similarity index 93% rename from benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/KafkaRecordSender.java rename to theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/KafkaRecordSender.java index dd17234bf1adb1f0fcf3ff3ab134a0743b917369..6e4a43271fbf1e0193c2d39569a0814d1f7935cd 100644 --- a/benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/KafkaRecordSender.java +++ b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/KafkaRecordSender.java @@ -6,6 +6,7 @@ import org.apache.avro.specific.SpecificRecord; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.errors.SerializationException; import org.apache.kafka.common.serialization.StringSerializer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -116,7 +117,13 @@ public class KafkaRecordSender<T extends SpecificRecord> implements RecordSender this.keyAccessor.apply(monitoringRecord), monitoringRecord); LOGGER.debug("Send record to Kafka topic {}: {}", this.topic, record); - this.producer.send(record); + try { + this.producer.send(record); + } catch (final SerializationException e) { + LOGGER.warn( + "Record could not be serialized and thus not sent to Kafka due to exception. Skipping this record.", // NOCS + e); + } } public void terminate() { diff --git a/benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/KeySpace.java b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/KeySpace.java similarity index 100% rename from benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/KeySpace.java rename to theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/KeySpace.java diff --git a/benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/LoadGenerator.java b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/LoadGenerator.java similarity index 100% rename from benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/LoadGenerator.java rename to theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/LoadGenerator.java diff --git a/benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/LoadGeneratorConfig.java b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/LoadGeneratorConfig.java similarity index 100% rename from benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/LoadGeneratorConfig.java rename to theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/LoadGeneratorConfig.java diff --git a/benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/LoadGeneratorExecution.java b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/LoadGeneratorExecution.java similarity index 100% rename from benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/LoadGeneratorExecution.java rename to theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/LoadGeneratorExecution.java diff --git a/benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/MessageGenerator.java b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/MessageGenerator.java similarity index 100% rename from benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/MessageGenerator.java rename to theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/MessageGenerator.java diff --git a/benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/RecordGenerator.java b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/RecordGenerator.java similarity index 100% rename from benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/RecordGenerator.java rename to theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/RecordGenerator.java diff --git a/benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/RecordSender.java b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/RecordSender.java similarity index 100% rename from benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/RecordSender.java rename to theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/RecordSender.java diff --git a/benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/TitanMessageGeneratorFactory.java b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/TitanMessageGeneratorFactory.java similarity index 100% rename from benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/TitanMessageGeneratorFactory.java rename to theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/TitanMessageGeneratorFactory.java diff --git a/benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/WorkloadDefinition.java b/theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/WorkloadDefinition.java similarity index 100% rename from benchmarks/workload-generator-commons/src/main/java/theodolite/commons/workloadgeneration/WorkloadDefinition.java rename to theodolite-benchmarks/load-generator-commons/src/main/java/theodolite/commons/workloadgeneration/WorkloadDefinition.java diff --git a/benchmarks/workload-generator-commons/src/test/java/theodolite/commons/workloadgeneration/KeySpaceTest.java b/theodolite-benchmarks/load-generator-commons/src/test/java/theodolite/commons/workloadgeneration/KeySpaceTest.java similarity index 100% rename from benchmarks/workload-generator-commons/src/test/java/theodolite/commons/workloadgeneration/KeySpaceTest.java rename to theodolite-benchmarks/load-generator-commons/src/test/java/theodolite/commons/workloadgeneration/KeySpaceTest.java diff --git a/benchmarks/workload-generator-commons/src/test/java/theodolite/commons/workloadgeneration/WorkloadDefinitionTest.java b/theodolite-benchmarks/load-generator-commons/src/test/java/theodolite/commons/workloadgeneration/WorkloadDefinitionTest.java similarity index 100% rename from benchmarks/workload-generator-commons/src/test/java/theodolite/commons/workloadgeneration/WorkloadDefinitionTest.java rename to theodolite-benchmarks/load-generator-commons/src/test/java/theodolite/commons/workloadgeneration/WorkloadDefinitionTest.java diff --git a/theodolite-benchmarks/settings.gradle b/theodolite-benchmarks/settings.gradle new file mode 100644 index 0000000000000000000000000000000000000000..5602e816bb21dce72162b085de99836b8f9aea1e --- /dev/null +++ b/theodolite-benchmarks/settings.gradle @@ -0,0 +1,21 @@ +rootProject.name = 'theodolite-benchmarks' + +include 'load-generator-commons' +include 'kstreams-commons' +include 'flink-commons' + +include 'uc1-load-generator' +include 'uc1-kstreams' +include 'uc1-flink' + +include 'uc2-load-generator' +include 'uc2-kstreams' +include 'uc2-flink' + +include 'uc3-load-generator' +include 'uc3-kstreams' +include 'uc3-flink' + +include 'uc4-load-generator' +include 'uc4-kstreams' +include 'uc4-flink' diff --git a/benchmarks/uc2-application/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc1-flink/.settings/org.eclipse.jdt.ui.prefs similarity index 100% rename from benchmarks/uc2-application/.settings/org.eclipse.jdt.ui.prefs rename to theodolite-benchmarks/uc1-flink/.settings/org.eclipse.jdt.ui.prefs diff --git a/benchmarks/uc2-application/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/uc1-flink/.settings/qa.eclipse.plugin.checkstyle.prefs similarity index 100% rename from benchmarks/uc2-application/.settings/qa.eclipse.plugin.checkstyle.prefs rename to theodolite-benchmarks/uc1-flink/.settings/qa.eclipse.plugin.checkstyle.prefs diff --git a/benchmarks/uc2-application/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/uc1-flink/.settings/qa.eclipse.plugin.pmd.prefs similarity index 100% rename from benchmarks/uc2-application/.settings/qa.eclipse.plugin.pmd.prefs rename to theodolite-benchmarks/uc1-flink/.settings/qa.eclipse.plugin.pmd.prefs diff --git a/theodolite-benchmarks/uc1-flink/Dockerfile b/theodolite-benchmarks/uc1-flink/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..795b9e343a03cf0209e1625f5cbc3d45dcb77cda --- /dev/null +++ b/theodolite-benchmarks/uc1-flink/Dockerfile @@ -0,0 +1,3 @@ +FROM flink:1.12-scala_2.12-java11 + +ADD build/libs/uc1-flink-all.jar /opt/flink/usrlib/artifacts/uc1-flink-all.jar diff --git a/theodolite-benchmarks/uc1-flink/build.gradle b/theodolite-benchmarks/uc1-flink/build.gradle new file mode 100644 index 0000000000000000000000000000000000000000..8a2a359c4840e67581f7bc24f1544ff519f82525 --- /dev/null +++ b/theodolite-benchmarks/uc1-flink/build.gradle @@ -0,0 +1,5 @@ +plugins { + id 'theodolite.flink' +} + +mainClassName = "theodolite.uc1.application.HistoryServiceFlinkJob" diff --git a/theodolite-benchmarks/uc1-flink/src/main/java/theodolite/uc1/application/ConfigurationKeys.java b/theodolite-benchmarks/uc1-flink/src/main/java/theodolite/uc1/application/ConfigurationKeys.java new file mode 100644 index 0000000000000000000000000000000000000000..ed961bab733a409dc07b1be7fa35562103c3e2f4 --- /dev/null +++ b/theodolite-benchmarks/uc1-flink/src/main/java/theodolite/uc1/application/ConfigurationKeys.java @@ -0,0 +1,24 @@ +package theodolite.uc1.application; + +/** + * Keys to access configuration parameters. + */ +public final class ConfigurationKeys { + + public static final String APPLICATION_NAME = "application.name"; + + public static final String APPLICATION_VERSION = "application.version"; + + public static final String COMMIT_INTERVAL_MS = "commit.interval.ms"; + + public static final String KAFKA_BOOTSTRAP_SERVERS = "kafka.bootstrap.servers"; + + public static final String KAFKA_INPUT_TOPIC = "kafka.input.topic"; + + public static final String SCHEMA_REGISTRY_URL = "schema.registry.url"; + + public static final String CHECKPOINTING = "checkpointing"; + + private ConfigurationKeys() {} + +} diff --git a/theodolite-benchmarks/uc1-flink/src/main/java/theodolite/uc1/application/GsonMapper.java b/theodolite-benchmarks/uc1-flink/src/main/java/theodolite/uc1/application/GsonMapper.java new file mode 100644 index 0000000000000000000000000000000000000000..831db7fe63be6529e6b7ba299dca92b138ff7d13 --- /dev/null +++ b/theodolite-benchmarks/uc1-flink/src/main/java/theodolite/uc1/application/GsonMapper.java @@ -0,0 +1,22 @@ +package theodolite.uc1.application; + +import com.google.gson.Gson; +import org.apache.flink.api.common.functions.MapFunction; +import titan.ccp.model.records.ActivePowerRecord; + +/** + * {@link MapFunction} which maps {@link ActivePowerRecord}s to their representation as JSON + * strings. + */ +public class GsonMapper implements MapFunction<ActivePowerRecord, String> { + + private static final long serialVersionUID = -5263671231838353747L; // NOPMD + + private static final Gson GSON = new Gson(); + + @Override + public String map(final ActivePowerRecord value) throws Exception { + return GSON.toJson(value); + } + +} diff --git a/theodolite-benchmarks/uc1-flink/src/main/java/theodolite/uc1/application/HistoryServiceFlinkJob.java b/theodolite-benchmarks/uc1-flink/src/main/java/theodolite/uc1/application/HistoryServiceFlinkJob.java new file mode 100644 index 0000000000000000000000000000000000000000..6655b52ec3020f46bb8a37c7124ee870fa663573 --- /dev/null +++ b/theodolite-benchmarks/uc1-flink/src/main/java/theodolite/uc1/application/HistoryServiceFlinkJob.java @@ -0,0 +1,81 @@ +package theodolite.uc1.application; + +import org.apache.commons.configuration2.Configuration; +import org.apache.flink.streaming.api.datastream.DataStream; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import theodolite.commons.flink.KafkaConnectorFactory; +import titan.ccp.common.configuration.ServiceConfigurations; +import titan.ccp.model.records.ActivePowerRecord; + +/** + * The History microservice implemented as a Flink job. + */ +public final class HistoryServiceFlinkJob { + + private static final Logger LOGGER = LoggerFactory.getLogger(HistoryServiceFlinkJob.class); + + private final Configuration config = ServiceConfigurations.createWithDefaults(); + private final StreamExecutionEnvironment env; + private final String applicationId; + + /** + * Create a new instance of the {@link HistoryServiceFlinkJob}. + */ + public HistoryServiceFlinkJob() { + final String applicationName = this.config.getString(ConfigurationKeys.APPLICATION_NAME); + final String applicationVersion = this.config.getString(ConfigurationKeys.APPLICATION_VERSION); + this.applicationId = applicationName + "-" + applicationVersion; + + this.env = StreamExecutionEnvironment.getExecutionEnvironment(); + + this.configureEnv(); + + this.buildPipeline(); + } + + private void configureEnv() { + final boolean checkpointing = this.config.getBoolean(ConfigurationKeys.CHECKPOINTING, true); + final int commitIntervalMs = this.config.getInt(ConfigurationKeys.COMMIT_INTERVAL_MS); + if (checkpointing) { + this.env.enableCheckpointing(commitIntervalMs); + } + } + + private void buildPipeline() { + final String kafkaBroker = this.config.getString(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS); + final String schemaRegistryUrl = this.config.getString(ConfigurationKeys.SCHEMA_REGISTRY_URL); + final String inputTopic = this.config.getString(ConfigurationKeys.KAFKA_INPUT_TOPIC); + final boolean checkpointing = this.config.getBoolean(ConfigurationKeys.CHECKPOINTING, true); + + final KafkaConnectorFactory kafkaConnector = new KafkaConnectorFactory( + this.applicationId, kafkaBroker, checkpointing, schemaRegistryUrl); + + final FlinkKafkaConsumer<ActivePowerRecord> kafkaConsumer = + kafkaConnector.createConsumer(inputTopic, ActivePowerRecord.class); + + final DataStream<ActivePowerRecord> stream = this.env.addSource(kafkaConsumer); + + stream + .rebalance() + .map(new GsonMapper()) + .flatMap((record, c) -> LOGGER.info("Record: {}", record)); + } + + /** + * Start running this microservice. + */ + public void run() { + try { + this.env.execute(this.applicationId); + } catch (final Exception e) { // NOPMD Execution thrown by Flink + LOGGER.error("An error occured while running this job.", e); + } + } + + public static void main(final String[] args) { + new HistoryServiceFlinkJob().run(); + } +} diff --git a/theodolite-benchmarks/uc1-flink/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc1-flink/src/main/resources/META-INF/application.properties new file mode 100644 index 0000000000000000000000000000000000000000..905e501b8cb66712f2b245470d96803987a9b93b --- /dev/null +++ b/theodolite-benchmarks/uc1-flink/src/main/resources/META-INF/application.properties @@ -0,0 +1,12 @@ +application.name=theodolite-uc1-application +application.version=0.0.1 + +kafka.bootstrap.servers=localhost:9092 +kafka.input.topic=input +kafka.output.topic=output + +schema.registry.url=http://localhost:8081 + +num.threads=1 +commit.interval.ms=1000 +cache.max.bytes.buffering=-1 diff --git a/benchmarks/uc2-workload-generator/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc1-kstreams/.settings/org.eclipse.jdt.ui.prefs similarity index 100% rename from benchmarks/uc2-workload-generator/.settings/org.eclipse.jdt.ui.prefs rename to theodolite-benchmarks/uc1-kstreams/.settings/org.eclipse.jdt.ui.prefs diff --git a/benchmarks/uc2-workload-generator/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/uc1-kstreams/.settings/qa.eclipse.plugin.checkstyle.prefs similarity index 100% rename from benchmarks/uc2-workload-generator/.settings/qa.eclipse.plugin.checkstyle.prefs rename to theodolite-benchmarks/uc1-kstreams/.settings/qa.eclipse.plugin.checkstyle.prefs diff --git a/benchmarks/uc2-workload-generator/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/uc1-kstreams/.settings/qa.eclipse.plugin.pmd.prefs similarity index 100% rename from benchmarks/uc2-workload-generator/.settings/qa.eclipse.plugin.pmd.prefs rename to theodolite-benchmarks/uc1-kstreams/.settings/qa.eclipse.plugin.pmd.prefs diff --git a/benchmarks/uc1-application/Dockerfile b/theodolite-benchmarks/uc1-kstreams/Dockerfile similarity index 53% rename from benchmarks/uc1-application/Dockerfile rename to theodolite-benchmarks/uc1-kstreams/Dockerfile index 09c36f42afe730a2fc6ba59bbc2082aa8b715f68..bfabd8ff2073e03beaecba847d5cf6cd1722224f 100644 --- a/benchmarks/uc1-application/Dockerfile +++ b/theodolite-benchmarks/uc1-kstreams/Dockerfile @@ -1,7 +1,7 @@ FROM openjdk:11-slim -ADD build/distributions/uc1-application.tar / +ADD build/distributions/uc1-kstreams.tar / CMD JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \ - /uc1-application/bin/uc1-application \ No newline at end of file + /uc1-kstreams/bin/uc1-kstreams \ No newline at end of file diff --git a/benchmarks/uc1-application/build.gradle b/theodolite-benchmarks/uc1-kstreams/build.gradle similarity index 57% rename from benchmarks/uc1-application/build.gradle rename to theodolite-benchmarks/uc1-kstreams/build.gradle index 3b197e85116f41dde5574d9253d60e1146fe44a2..74cfb450ec80759f60582c25ab844e3398d5bf02 100644 --- a/benchmarks/uc1-application/build.gradle +++ b/theodolite-benchmarks/uc1-kstreams/build.gradle @@ -1 +1,5 @@ +plugins { + id 'theodolite.kstreams' +} + mainClassName = "theodolite.uc1.application.HistoryService" diff --git a/benchmarks/uc1-application/src/main/java/theodolite/uc1/application/HistoryService.java b/theodolite-benchmarks/uc1-kstreams/src/main/java/theodolite/uc1/application/HistoryService.java similarity index 100% rename from benchmarks/uc1-application/src/main/java/theodolite/uc1/application/HistoryService.java rename to theodolite-benchmarks/uc1-kstreams/src/main/java/theodolite/uc1/application/HistoryService.java diff --git a/benchmarks/uc1-application/src/main/java/theodolite/uc1/streamprocessing/TopologyBuilder.java b/theodolite-benchmarks/uc1-kstreams/src/main/java/theodolite/uc1/streamprocessing/TopologyBuilder.java similarity index 90% rename from benchmarks/uc1-application/src/main/java/theodolite/uc1/streamprocessing/TopologyBuilder.java rename to theodolite-benchmarks/uc1-kstreams/src/main/java/theodolite/uc1/streamprocessing/TopologyBuilder.java index 75c833aa722654395b1adc6f739395eea5256820..427a838f45f6807ede00dcb68ebf8c5580f28ce6 100644 --- a/benchmarks/uc1-application/src/main/java/theodolite/uc1/streamprocessing/TopologyBuilder.java +++ b/theodolite-benchmarks/uc1-kstreams/src/main/java/theodolite/uc1/streamprocessing/TopologyBuilder.java @@ -17,11 +17,11 @@ import titan.ccp.model.records.ActivePowerRecord; public class TopologyBuilder { private static final Logger LOGGER = LoggerFactory.getLogger(TopologyBuilder.class); + private static final Gson GSON = new Gson(); private final String inputTopic; private final SchemaRegistryAvroSerdeFactory srAvroSerdeFactory; - private final Gson gson = new Gson(); private final StreamsBuilder builder = new StreamsBuilder(); @@ -42,8 +42,8 @@ public class TopologyBuilder { .stream(this.inputTopic, Consumed.with( Serdes.String(), this.srAvroSerdeFactory.<ActivePowerRecord>forValues())) - .mapValues(v -> this.gson.toJson(v)) - .foreach((k, v) -> LOGGER.info("Key: " + k + " Value: " + v)); + .mapValues(v -> GSON.toJson(v)) + .foreach((k, record) -> LOGGER.info("Record: {}", record)); return this.builder.build(properties); } diff --git a/benchmarks/uc1-application/src/main/java/theodolite/uc1/streamprocessing/Uc1KafkaStreamsBuilder.java b/theodolite-benchmarks/uc1-kstreams/src/main/java/theodolite/uc1/streamprocessing/Uc1KafkaStreamsBuilder.java similarity index 100% rename from benchmarks/uc1-application/src/main/java/theodolite/uc1/streamprocessing/Uc1KafkaStreamsBuilder.java rename to theodolite-benchmarks/uc1-kstreams/src/main/java/theodolite/uc1/streamprocessing/Uc1KafkaStreamsBuilder.java diff --git a/benchmarks/uc1-application/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc1-kstreams/src/main/resources/META-INF/application.properties similarity index 76% rename from benchmarks/uc1-application/src/main/resources/META-INF/application.properties rename to theodolite-benchmarks/uc1-kstreams/src/main/resources/META-INF/application.properties index b46e6246e248cc524c5b6249348c76ded6ec468b..e3371cc87e20e85e6e8c327955537e6e49dab86e 100644 --- a/benchmarks/uc1-application/src/main/resources/META-INF/application.properties +++ b/theodolite-benchmarks/uc1-kstreams/src/main/resources/META-INF/application.properties @@ -4,5 +4,5 @@ application.version=0.0.1 kafka.bootstrap.servers=localhost:9092 kafka.input.topic=input -schema.registry.url=http://localhost:8091 +schema.registry.url=http://localhost:8081 diff --git a/benchmarks/uc1-workload-generator/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc1-load-generator/.settings/org.eclipse.jdt.ui.prefs similarity index 100% rename from benchmarks/uc1-workload-generator/.settings/org.eclipse.jdt.ui.prefs rename to theodolite-benchmarks/uc1-load-generator/.settings/org.eclipse.jdt.ui.prefs diff --git a/benchmarks/uc3-application/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/uc1-load-generator/.settings/qa.eclipse.plugin.checkstyle.prefs similarity index 100% rename from benchmarks/uc3-application/.settings/qa.eclipse.plugin.checkstyle.prefs rename to theodolite-benchmarks/uc1-load-generator/.settings/qa.eclipse.plugin.checkstyle.prefs diff --git a/benchmarks/uc3-application/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/uc1-load-generator/.settings/qa.eclipse.plugin.pmd.prefs similarity index 100% rename from benchmarks/uc3-application/.settings/qa.eclipse.plugin.pmd.prefs rename to theodolite-benchmarks/uc1-load-generator/.settings/qa.eclipse.plugin.pmd.prefs diff --git a/theodolite-benchmarks/uc1-load-generator/Dockerfile b/theodolite-benchmarks/uc1-load-generator/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..2a9195d3b461219f2e0b1805ff4c7f50412132e1 --- /dev/null +++ b/theodolite-benchmarks/uc1-load-generator/Dockerfile @@ -0,0 +1,6 @@ +FROM openjdk:11-slim + +ADD build/distributions/uc1-load-generator.tar / + +CMD JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \ + /uc1-load-generator/bin/uc1-load-generator \ No newline at end of file diff --git a/benchmarks/uc1-workload-generator/build.gradle b/theodolite-benchmarks/uc1-load-generator/build.gradle similarity index 56% rename from benchmarks/uc1-workload-generator/build.gradle rename to theodolite-benchmarks/uc1-load-generator/build.gradle index 9cc0bdbf01032efa3b251db06a2837cc9b920675..aadd4796d86dd46ca6094b00479f9f8483fc7e15 100644 --- a/benchmarks/uc1-workload-generator/build.gradle +++ b/theodolite-benchmarks/uc1-load-generator/build.gradle @@ -1 +1,5 @@ +plugins { + id 'theodolite.load-generator' +} + mainClassName = "theodolite.uc1.workloadgenerator.LoadGenerator" diff --git a/benchmarks/uc1-workload-generator/src/main/java/theodolite/uc1/workloadgenerator/LoadGenerator.java b/theodolite-benchmarks/uc1-load-generator/src/main/java/theodolite/uc1/workloadgenerator/LoadGenerator.java similarity index 100% rename from benchmarks/uc1-workload-generator/src/main/java/theodolite/uc1/workloadgenerator/LoadGenerator.java rename to theodolite-benchmarks/uc1-load-generator/src/main/java/theodolite/uc1/workloadgenerator/LoadGenerator.java diff --git a/benchmarks/uc4-workload-generator/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc2-flink/.settings/org.eclipse.jdt.ui.prefs similarity index 99% rename from benchmarks/uc4-workload-generator/.settings/org.eclipse.jdt.ui.prefs rename to theodolite-benchmarks/uc2-flink/.settings/org.eclipse.jdt.ui.prefs index fa98ca63d77bdee891150bd6713f70197a75cefc..4d01df75552c562406705858b6368ecf59d6e82f 100644 --- a/benchmarks/uc4-workload-generator/.settings/org.eclipse.jdt.ui.prefs +++ b/theodolite-benchmarks/uc2-flink/.settings/org.eclipse.jdt.ui.prefs @@ -66,6 +66,7 @@ org.eclipse.jdt.ui.ignorelowercasenames=true org.eclipse.jdt.ui.importorder=; org.eclipse.jdt.ui.ondemandthreshold=99 org.eclipse.jdt.ui.staticondemandthreshold=99 +org.eclipse.jdt.ui.text.custom_code_templates= sp_cleanup.add_default_serial_version_id=true sp_cleanup.add_generated_serial_version_id=false sp_cleanup.add_missing_annotations=true diff --git a/benchmarks/uc3-workload-generator/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/uc2-flink/.settings/qa.eclipse.plugin.checkstyle.prefs similarity index 100% rename from benchmarks/uc3-workload-generator/.settings/qa.eclipse.plugin.checkstyle.prefs rename to theodolite-benchmarks/uc2-flink/.settings/qa.eclipse.plugin.checkstyle.prefs diff --git a/benchmarks/uc3-workload-generator/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/uc2-flink/.settings/qa.eclipse.plugin.pmd.prefs similarity index 100% rename from benchmarks/uc3-workload-generator/.settings/qa.eclipse.plugin.pmd.prefs rename to theodolite-benchmarks/uc2-flink/.settings/qa.eclipse.plugin.pmd.prefs diff --git a/theodolite-benchmarks/uc2-flink/Dockerfile b/theodolite-benchmarks/uc2-flink/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..537ab28e2d4e5fb8edfc2760142acc33cc49b91d --- /dev/null +++ b/theodolite-benchmarks/uc2-flink/Dockerfile @@ -0,0 +1,3 @@ +FROM flink:1.12-scala_2.12-java11 + +ADD build/libs/uc2-flink-all.jar /opt/flink/usrlib/artifacts/uc2-flink-all.jar \ No newline at end of file diff --git a/theodolite-benchmarks/uc2-flink/build.gradle b/theodolite-benchmarks/uc2-flink/build.gradle new file mode 100644 index 0000000000000000000000000000000000000000..680b802ce527d538062658874146ce1f0bd3b1e8 --- /dev/null +++ b/theodolite-benchmarks/uc2-flink/build.gradle @@ -0,0 +1,17 @@ +plugins { + id 'theodolite.flink' +} + +allprojects { + repositories { + maven { + url 'https://packages.confluent.io/maven/' + } + } +} + +dependencies { + compile('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') +} + +mainClassName = "theodolite.uc2.application.HistoryServiceFlinkJob" diff --git a/theodolite-benchmarks/uc2-flink/src/main/java/theodolite/uc2/application/ConfigurationKeys.java b/theodolite-benchmarks/uc2-flink/src/main/java/theodolite/uc2/application/ConfigurationKeys.java new file mode 100644 index 0000000000000000000000000000000000000000..9ba56c828a0ae5c6147aadd90d449c7cf2324992 --- /dev/null +++ b/theodolite-benchmarks/uc2-flink/src/main/java/theodolite/uc2/application/ConfigurationKeys.java @@ -0,0 +1,35 @@ +package theodolite.uc2.application; + +/** + * Keys to access configuration parameters. + */ +public final class ConfigurationKeys { + + public static final String APPLICATION_NAME = "application.name"; + + public static final String APPLICATION_VERSION = "application.version"; + + public static final String KAFKA_BOOTSTRAP_SERVERS = "kafka.bootstrap.servers"; + + public static final String KAFKA_OUTPUT_TOPIC = "kafka.output.topic"; + + public static final String KAFKA_INPUT_TOPIC = "kafka.input.topic"; + + public static final String SCHEMA_REGISTRY_URL = "schema.registry.url"; + + public static final String COMMIT_INTERVAL_MS = "commit.interval.ms"; + + public static final String KAFKA_WINDOW_DURATION_MINUTES = "kafka.window.duration.minutes"; + + public static final String FLINK_STATE_BACKEND = "flink.state.backend"; + + public static final String FLINK_STATE_BACKEND_PATH = "flink.state.backend.path"; + + public static final String FLINK_STATE_BACKEND_MEMORY_SIZE = // NOPMD + "flink.state.backend.memory.size"; + + public static final String CHECKPOINTING = "checkpointing"; + + private ConfigurationKeys() {} + +} diff --git a/theodolite-benchmarks/uc2-flink/src/main/java/theodolite/uc2/application/HistoryServiceFlinkJob.java b/theodolite-benchmarks/uc2-flink/src/main/java/theodolite/uc2/application/HistoryServiceFlinkJob.java new file mode 100644 index 0000000000000000000000000000000000000000..b8452847df800226ad481f9309323a2a9a532939 --- /dev/null +++ b/theodolite-benchmarks/uc2-flink/src/main/java/theodolite/uc2/application/HistoryServiceFlinkJob.java @@ -0,0 +1,126 @@ +package theodolite.uc2.application; + +import com.google.common.math.Stats; +import org.apache.commons.configuration2.Configuration; +import org.apache.flink.api.common.typeinfo.Types; +import org.apache.flink.api.java.tuple.Tuple2; +import org.apache.flink.runtime.state.StateBackend; +import org.apache.flink.streaming.api.TimeCharacteristic; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows; +import org.apache.flink.streaming.api.windowing.time.Time; +import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer; +import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer; +import org.apache.kafka.common.serialization.Serdes; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import theodolite.commons.flink.KafkaConnectorFactory; +import theodolite.commons.flink.StateBackends; +import theodolite.commons.flink.serialization.StatsSerializer; +import titan.ccp.common.configuration.ServiceConfigurations; +import titan.ccp.model.records.ActivePowerRecord; + + +/** + * The History microservice implemented as a Flink job. + */ +public final class HistoryServiceFlinkJob { + + private static final Logger LOGGER = LoggerFactory.getLogger(HistoryServiceFlinkJob.class); + + private final Configuration config = ServiceConfigurations.createWithDefaults(); + private final StreamExecutionEnvironment env; + private final String applicationId; + + /** + * Create a new instance of the {@link HistoryServiceFlinkJob}. + */ + public HistoryServiceFlinkJob() { + final String applicationName = this.config.getString(ConfigurationKeys.APPLICATION_NAME); + final String applicationVersion = this.config.getString(ConfigurationKeys.APPLICATION_VERSION); + this.applicationId = applicationName + "-" + applicationVersion; + + this.env = StreamExecutionEnvironment.getExecutionEnvironment(); + + this.configureEnv(); + + this.buildPipeline(); + } + + private void configureEnv() { + this.env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); + + final boolean checkpointing = this.config.getBoolean(ConfigurationKeys.CHECKPOINTING, true); + final int commitIntervalMs = this.config.getInt(ConfigurationKeys.COMMIT_INTERVAL_MS); + if (checkpointing) { + this.env.enableCheckpointing(commitIntervalMs); + } + + // State Backend + final StateBackend stateBackend = StateBackends.fromConfiguration(this.config); + this.env.setStateBackend(stateBackend); + + this.configureSerializers(); + } + + private void configureSerializers() { + this.env.getConfig().registerTypeWithKryoSerializer(Stats.class, new StatsSerializer()); + this.env.getConfig().getRegisteredTypesWithKryoSerializers() + .forEach((c, s) -> LOGGER.info("Class " + c.getName() + " registered with serializer " + + s.getSerializer().getClass().getName())); + + } + + private void buildPipeline() { + final String kafkaBroker = this.config.getString(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS); + final String schemaRegistryUrl = this.config.getString(ConfigurationKeys.SCHEMA_REGISTRY_URL); + final String inputTopic = this.config.getString(ConfigurationKeys.KAFKA_INPUT_TOPIC); + final String outputTopic = this.config.getString(ConfigurationKeys.KAFKA_OUTPUT_TOPIC); + final int windowDuration = this.config.getInt(ConfigurationKeys.KAFKA_WINDOW_DURATION_MINUTES); + final boolean checkpointing = this.config.getBoolean(ConfigurationKeys.CHECKPOINTING, true); + + final KafkaConnectorFactory kafkaConnector = new KafkaConnectorFactory( + this.applicationId, kafkaBroker, checkpointing, schemaRegistryUrl); + + final FlinkKafkaConsumer<ActivePowerRecord> kafkaSource = + kafkaConnector.createConsumer(inputTopic, ActivePowerRecord.class); + + final FlinkKafkaProducer<Tuple2<String, String>> kafkaSink = + kafkaConnector.createProducer(outputTopic, + Serdes::String, + Serdes::String, + Types.TUPLE(Types.STRING, Types.STRING)); + + this.env + .addSource(kafkaSource).name("[Kafka Consumer] Topic: " + inputTopic) + .rebalance() + .keyBy(ActivePowerRecord::getIdentifier) + .window(TumblingEventTimeWindows.of(Time.minutes(windowDuration))) + .aggregate(new StatsAggregateFunction(), new StatsProcessWindowFunction()) + .map(t -> { + final String key = t.f0; + final String value = t.f1.toString(); + LOGGER.info("{}: {}", key, value); + return new Tuple2<>(key, value); + }).name("map").returns(Types.TUPLE(Types.STRING, Types.STRING)) + .addSink(kafkaSink).name("[Kafka Producer] Topic: " + outputTopic); + } + + + /** + * Start running this microservice. + */ + public void run() { + LOGGER.info("Execution plan: {}", this.env.getExecutionPlan()); + + try { + this.env.execute(this.applicationId); + } catch (final Exception e) { // NOPMD Execution thrown by Flink + LOGGER.error("An error occured while running this job.", e); + } + } + + public static void main(final String[] args) { + new HistoryServiceFlinkJob().run(); + } +} diff --git a/theodolite-benchmarks/uc2-flink/src/main/java/theodolite/uc2/application/StatsAggregateFunction.java b/theodolite-benchmarks/uc2-flink/src/main/java/theodolite/uc2/application/StatsAggregateFunction.java new file mode 100644 index 0000000000000000000000000000000000000000..7bd090de819ce0c0c73687bd53a191b66ae31ed9 --- /dev/null +++ b/theodolite-benchmarks/uc2-flink/src/main/java/theodolite/uc2/application/StatsAggregateFunction.java @@ -0,0 +1,38 @@ +package theodolite.uc2.application; + +import com.google.common.math.Stats; +import com.google.common.math.StatsAccumulator; +import org.apache.flink.api.common.functions.AggregateFunction; +import theodolite.uc2.application.util.StatsFactory; +import titan.ccp.model.records.ActivePowerRecord; + +/** + * Statistical aggregation of {@link ActivePowerRecord}s using {@link Stats}. + */ +public class StatsAggregateFunction implements AggregateFunction<ActivePowerRecord, Stats, Stats> { + + private static final long serialVersionUID = -8873572990921515499L; // NOPMD + + @Override + public Stats createAccumulator() { + return Stats.of(); + } + + @Override + public Stats add(final ActivePowerRecord value, final Stats accumulator) { + return StatsFactory.accumulate(accumulator, value.getValueInW()); + } + + @Override + public Stats getResult(final Stats accumulator) { + return accumulator; + } + + @Override + public Stats merge(final Stats a, final Stats b) { + final StatsAccumulator statsAccumulator = new StatsAccumulator(); + statsAccumulator.addAll(a); + statsAccumulator.addAll(b); + return statsAccumulator.snapshot(); + } +} diff --git a/theodolite-benchmarks/uc2-flink/src/main/java/theodolite/uc2/application/StatsProcessWindowFunction.java b/theodolite-benchmarks/uc2-flink/src/main/java/theodolite/uc2/application/StatsProcessWindowFunction.java new file mode 100644 index 0000000000000000000000000000000000000000..d422c37b667d9d3309f0dd858758db29051807b9 --- /dev/null +++ b/theodolite-benchmarks/uc2-flink/src/main/java/theodolite/uc2/application/StatsProcessWindowFunction.java @@ -0,0 +1,24 @@ +package theodolite.uc2.application; + +import com.google.common.math.Stats; +import org.apache.flink.api.java.tuple.Tuple2; +import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction; +import org.apache.flink.streaming.api.windowing.windows.TimeWindow; +import org.apache.flink.util.Collector; + +/** + * A {@link ProcessWindowFunction} that forwards a computed {@link Stats} object along with its + * associated key. + */ +public class StatsProcessWindowFunction + extends ProcessWindowFunction<Stats, Tuple2<String, Stats>, String, TimeWindow> { + + private static final long serialVersionUID = 4363099880614593379L; // NOPMD + + @Override + public void process(final String key, final Context context, final Iterable<Stats> elements, + final Collector<Tuple2<String, Stats>> out) { + final Stats stats = elements.iterator().next(); + out.collect(new Tuple2<>(key, stats)); + } +} diff --git a/theodolite-benchmarks/uc2-flink/src/main/java/theodolite/uc2/application/util/StatsFactory.java b/theodolite-benchmarks/uc2-flink/src/main/java/theodolite/uc2/application/util/StatsFactory.java new file mode 100644 index 0000000000000000000000000000000000000000..9697108eb8dacabf925f06067199a41eb0658dbe --- /dev/null +++ b/theodolite-benchmarks/uc2-flink/src/main/java/theodolite/uc2/application/util/StatsFactory.java @@ -0,0 +1,23 @@ +package theodolite.uc2.application.util; + +import com.google.common.math.Stats; +import com.google.common.math.StatsAccumulator; + +/** + * Factory methods for working with {@link Stats}. + */ +public final class StatsFactory { + + private StatsFactory() {} + + /** + * Add a value to a {@link Stats} object. + */ + public static Stats accumulate(final Stats stats, final double value) { + final StatsAccumulator statsAccumulator = new StatsAccumulator(); + statsAccumulator.addAll(stats); + statsAccumulator.add(value); + return statsAccumulator.snapshot(); + } + +} diff --git a/theodolite-benchmarks/uc2-flink/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc2-flink/src/main/resources/META-INF/application.properties new file mode 100644 index 0000000000000000000000000000000000000000..f971390984ee41be1fce54e62f4f43ee2b9c02da --- /dev/null +++ b/theodolite-benchmarks/uc2-flink/src/main/resources/META-INF/application.properties @@ -0,0 +1,11 @@ +application.name=theodolite-uc2-application +application.version=0.0.1 + +kafka.bootstrap.servers=localhost:9092 +kafka.input.topic=input +kafka.output.topic=output +schema.registry.url=http://localhost:8081 +num.threads=1 +commit.interval.ms=100 +cache.max.bytes.buffering=-1 +kafka.window.duration.minutes=1 \ No newline at end of file diff --git a/benchmarks/workload-generator-commons/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc2-kstreams/.settings/org.eclipse.jdt.ui.prefs similarity index 99% rename from benchmarks/workload-generator-commons/.settings/org.eclipse.jdt.ui.prefs rename to theodolite-benchmarks/uc2-kstreams/.settings/org.eclipse.jdt.ui.prefs index fa98ca63d77bdee891150bd6713f70197a75cefc..4d01df75552c562406705858b6368ecf59d6e82f 100644 --- a/benchmarks/workload-generator-commons/.settings/org.eclipse.jdt.ui.prefs +++ b/theodolite-benchmarks/uc2-kstreams/.settings/org.eclipse.jdt.ui.prefs @@ -66,6 +66,7 @@ org.eclipse.jdt.ui.ignorelowercasenames=true org.eclipse.jdt.ui.importorder=; org.eclipse.jdt.ui.ondemandthreshold=99 org.eclipse.jdt.ui.staticondemandthreshold=99 +org.eclipse.jdt.ui.text.custom_code_templates= sp_cleanup.add_default_serial_version_id=true sp_cleanup.add_generated_serial_version_id=false sp_cleanup.add_missing_annotations=true diff --git a/benchmarks/uc4-application/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/uc2-kstreams/.settings/qa.eclipse.plugin.checkstyle.prefs similarity index 100% rename from benchmarks/uc4-application/.settings/qa.eclipse.plugin.checkstyle.prefs rename to theodolite-benchmarks/uc2-kstreams/.settings/qa.eclipse.plugin.checkstyle.prefs diff --git a/benchmarks/uc4-application/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/uc2-kstreams/.settings/qa.eclipse.plugin.pmd.prefs similarity index 100% rename from benchmarks/uc4-application/.settings/qa.eclipse.plugin.pmd.prefs rename to theodolite-benchmarks/uc2-kstreams/.settings/qa.eclipse.plugin.pmd.prefs diff --git a/benchmarks/uc2-application/Dockerfile b/theodolite-benchmarks/uc2-kstreams/Dockerfile similarity index 53% rename from benchmarks/uc2-application/Dockerfile rename to theodolite-benchmarks/uc2-kstreams/Dockerfile index 5177dcede26016990b73467460fd358823c43c76..0c888bb541151da3299a86c9157ca5c6af36a088 100644 --- a/benchmarks/uc2-application/Dockerfile +++ b/theodolite-benchmarks/uc2-kstreams/Dockerfile @@ -1,6 +1,6 @@ FROM openjdk:11-slim -ADD build/distributions/uc2-application.tar / +ADD build/distributions/uc2-kstreams.tar / CMD JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \ - /uc2-application/bin/uc2-application \ No newline at end of file + /uc2-kstreams/bin/uc2-kstreams \ No newline at end of file diff --git a/benchmarks/uc2-application/build.gradle b/theodolite-benchmarks/uc2-kstreams/build.gradle similarity index 57% rename from benchmarks/uc2-application/build.gradle rename to theodolite-benchmarks/uc2-kstreams/build.gradle index e4d3f5346e401def9c9a5a49820d0682eafb0ad3..6688f229b3c57f95aaaf5f5cd4ca615db609277a 100644 --- a/benchmarks/uc2-application/build.gradle +++ b/theodolite-benchmarks/uc2-kstreams/build.gradle @@ -1 +1,5 @@ +plugins { + id 'theodolite.kstreams' +} + mainClassName = "theodolite.uc2.application.HistoryService" diff --git a/benchmarks/uc2-application/src/main/java/theodolite/uc2/application/HistoryService.java b/theodolite-benchmarks/uc2-kstreams/src/main/java/theodolite/uc2/application/HistoryService.java similarity index 100% rename from benchmarks/uc2-application/src/main/java/theodolite/uc2/application/HistoryService.java rename to theodolite-benchmarks/uc2-kstreams/src/main/java/theodolite/uc2/application/HistoryService.java diff --git a/benchmarks/uc2-application/src/main/java/theodolite/uc2/streamprocessing/TopologyBuilder.java b/theodolite-benchmarks/uc2-kstreams/src/main/java/theodolite/uc2/streamprocessing/TopologyBuilder.java similarity index 100% rename from benchmarks/uc2-application/src/main/java/theodolite/uc2/streamprocessing/TopologyBuilder.java rename to theodolite-benchmarks/uc2-kstreams/src/main/java/theodolite/uc2/streamprocessing/TopologyBuilder.java diff --git a/benchmarks/uc2-application/src/main/java/theodolite/uc2/streamprocessing/Uc2KafkaStreamsBuilder.java b/theodolite-benchmarks/uc2-kstreams/src/main/java/theodolite/uc2/streamprocessing/Uc2KafkaStreamsBuilder.java similarity index 100% rename from benchmarks/uc2-application/src/main/java/theodolite/uc2/streamprocessing/Uc2KafkaStreamsBuilder.java rename to theodolite-benchmarks/uc2-kstreams/src/main/java/theodolite/uc2/streamprocessing/Uc2KafkaStreamsBuilder.java diff --git a/benchmarks/uc2-application/src/main/java/theodolite/uc2/streamprocessing/util/StatsFactory.java b/theodolite-benchmarks/uc2-kstreams/src/main/java/theodolite/uc2/streamprocessing/util/StatsFactory.java similarity index 100% rename from benchmarks/uc2-application/src/main/java/theodolite/uc2/streamprocessing/util/StatsFactory.java rename to theodolite-benchmarks/uc2-kstreams/src/main/java/theodolite/uc2/streamprocessing/util/StatsFactory.java diff --git a/benchmarks/uc2-application/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc2-kstreams/src/main/resources/META-INF/application.properties similarity index 82% rename from benchmarks/uc2-application/src/main/resources/META-INF/application.properties rename to theodolite-benchmarks/uc2-kstreams/src/main/resources/META-INF/application.properties index 15293b1387b96688401bbc48bc2d1615c7b63aba..1b59528db59653d8dc0c2a04d242a0cd39fe07da 100644 --- a/benchmarks/uc2-application/src/main/resources/META-INF/application.properties +++ b/theodolite-benchmarks/uc2-kstreams/src/main/resources/META-INF/application.properties @@ -6,4 +6,4 @@ kafka.input.topic=input kafka.output.topic=output kafka.window.duration.minutes=1 -schema.registry.url=http://localhost:8091 +schema.registry.url=http://localhost:8081 diff --git a/theodolite-benchmarks/uc2-load-generator/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc2-load-generator/.settings/org.eclipse.jdt.ui.prefs new file mode 100644 index 0000000000000000000000000000000000000000..4d01df75552c562406705858b6368ecf59d6e82f --- /dev/null +++ b/theodolite-benchmarks/uc2-load-generator/.settings/org.eclipse.jdt.ui.prefs @@ -0,0 +1,128 @@ +cleanup.add_default_serial_version_id=true +cleanup.add_generated_serial_version_id=false +cleanup.add_missing_annotations=true +cleanup.add_missing_deprecated_annotations=true +cleanup.add_missing_methods=false +cleanup.add_missing_nls_tags=false +cleanup.add_missing_override_annotations=true +cleanup.add_missing_override_annotations_interface_methods=true +cleanup.add_serial_version_id=false +cleanup.always_use_blocks=true +cleanup.always_use_parentheses_in_expressions=false +cleanup.always_use_this_for_non_static_field_access=true +cleanup.always_use_this_for_non_static_method_access=true +cleanup.convert_functional_interfaces=false +cleanup.convert_to_enhanced_for_loop=true +cleanup.correct_indentation=true +cleanup.format_source_code=true +cleanup.format_source_code_changes_only=false +cleanup.insert_inferred_type_arguments=false +cleanup.make_local_variable_final=true +cleanup.make_parameters_final=true +cleanup.make_private_fields_final=true +cleanup.make_type_abstract_if_missing_method=false +cleanup.make_variable_declarations_final=true +cleanup.never_use_blocks=false +cleanup.never_use_parentheses_in_expressions=true +cleanup.organize_imports=true +cleanup.qualify_static_field_accesses_with_declaring_class=false +cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true +cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true +cleanup.qualify_static_member_accesses_with_declaring_class=true +cleanup.qualify_static_method_accesses_with_declaring_class=false +cleanup.remove_private_constructors=true +cleanup.remove_redundant_modifiers=false +cleanup.remove_redundant_semicolons=true +cleanup.remove_redundant_type_arguments=true +cleanup.remove_trailing_whitespaces=true +cleanup.remove_trailing_whitespaces_all=true +cleanup.remove_trailing_whitespaces_ignore_empty=false +cleanup.remove_unnecessary_casts=true +cleanup.remove_unnecessary_nls_tags=true +cleanup.remove_unused_imports=true +cleanup.remove_unused_local_variables=false +cleanup.remove_unused_private_fields=true +cleanup.remove_unused_private_members=false +cleanup.remove_unused_private_methods=true +cleanup.remove_unused_private_types=true +cleanup.sort_members=false +cleanup.sort_members_all=false +cleanup.use_anonymous_class_creation=false +cleanup.use_blocks=true +cleanup.use_blocks_only_for_return_and_throw=false +cleanup.use_lambda=true +cleanup.use_parentheses_in_expressions=true +cleanup.use_this_for_non_static_field_access=true +cleanup.use_this_for_non_static_field_access_only_if_necessary=false +cleanup.use_this_for_non_static_method_access=true +cleanup.use_this_for_non_static_method_access_only_if_necessary=false +cleanup_profile=_CAU-SE-Style +cleanup_settings_version=2 +eclipse.preferences.version=1 +editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true +formatter_profile=_CAU-SE-Style +formatter_settings_version=15 +org.eclipse.jdt.ui.ignorelowercasenames=true +org.eclipse.jdt.ui.importorder=; +org.eclipse.jdt.ui.ondemandthreshold=99 +org.eclipse.jdt.ui.staticondemandthreshold=99 +org.eclipse.jdt.ui.text.custom_code_templates= +sp_cleanup.add_default_serial_version_id=true +sp_cleanup.add_generated_serial_version_id=false +sp_cleanup.add_missing_annotations=true +sp_cleanup.add_missing_deprecated_annotations=true +sp_cleanup.add_missing_methods=false +sp_cleanup.add_missing_nls_tags=false +sp_cleanup.add_missing_override_annotations=true +sp_cleanup.add_missing_override_annotations_interface_methods=true +sp_cleanup.add_serial_version_id=false +sp_cleanup.always_use_blocks=true +sp_cleanup.always_use_parentheses_in_expressions=false +sp_cleanup.always_use_this_for_non_static_field_access=true +sp_cleanup.always_use_this_for_non_static_method_access=true +sp_cleanup.convert_functional_interfaces=false +sp_cleanup.convert_to_enhanced_for_loop=true +sp_cleanup.correct_indentation=true +sp_cleanup.format_source_code=true +sp_cleanup.format_source_code_changes_only=false +sp_cleanup.insert_inferred_type_arguments=false +sp_cleanup.make_local_variable_final=true +sp_cleanup.make_parameters_final=true +sp_cleanup.make_private_fields_final=true +sp_cleanup.make_type_abstract_if_missing_method=false +sp_cleanup.make_variable_declarations_final=true +sp_cleanup.never_use_blocks=false +sp_cleanup.never_use_parentheses_in_expressions=true +sp_cleanup.on_save_use_additional_actions=true +sp_cleanup.organize_imports=true +sp_cleanup.qualify_static_field_accesses_with_declaring_class=false +sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true +sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true +sp_cleanup.qualify_static_member_accesses_with_declaring_class=true +sp_cleanup.qualify_static_method_accesses_with_declaring_class=false +sp_cleanup.remove_private_constructors=true +sp_cleanup.remove_redundant_modifiers=false +sp_cleanup.remove_redundant_semicolons=true +sp_cleanup.remove_redundant_type_arguments=true +sp_cleanup.remove_trailing_whitespaces=true +sp_cleanup.remove_trailing_whitespaces_all=true +sp_cleanup.remove_trailing_whitespaces_ignore_empty=false +sp_cleanup.remove_unnecessary_casts=true +sp_cleanup.remove_unnecessary_nls_tags=true +sp_cleanup.remove_unused_imports=true +sp_cleanup.remove_unused_local_variables=false +sp_cleanup.remove_unused_private_fields=true +sp_cleanup.remove_unused_private_members=false +sp_cleanup.remove_unused_private_methods=true +sp_cleanup.remove_unused_private_types=true +sp_cleanup.sort_members=false +sp_cleanup.sort_members_all=false +sp_cleanup.use_anonymous_class_creation=false +sp_cleanup.use_blocks=true +sp_cleanup.use_blocks_only_for_return_and_throw=false +sp_cleanup.use_lambda=true +sp_cleanup.use_parentheses_in_expressions=true +sp_cleanup.use_this_for_non_static_field_access=true +sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=false +sp_cleanup.use_this_for_non_static_method_access=true +sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=false diff --git a/benchmarks/uc4-workload-generator/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/uc2-load-generator/.settings/qa.eclipse.plugin.checkstyle.prefs similarity index 100% rename from benchmarks/uc4-workload-generator/.settings/qa.eclipse.plugin.checkstyle.prefs rename to theodolite-benchmarks/uc2-load-generator/.settings/qa.eclipse.plugin.checkstyle.prefs diff --git a/benchmarks/uc4-workload-generator/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/uc2-load-generator/.settings/qa.eclipse.plugin.pmd.prefs similarity index 100% rename from benchmarks/uc4-workload-generator/.settings/qa.eclipse.plugin.pmd.prefs rename to theodolite-benchmarks/uc2-load-generator/.settings/qa.eclipse.plugin.pmd.prefs diff --git a/theodolite-benchmarks/uc2-load-generator/Dockerfile b/theodolite-benchmarks/uc2-load-generator/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..e9ee95af7063a92a02ac2e417288ad1505a2dba6 --- /dev/null +++ b/theodolite-benchmarks/uc2-load-generator/Dockerfile @@ -0,0 +1,6 @@ +FROM openjdk:11-slim + +ADD build/distributions/uc2-load-generator.tar / + +CMD JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \ + /uc2-load-generator/bin/uc2-load-generator \ No newline at end of file diff --git a/benchmarks/uc2-workload-generator/build.gradle b/theodolite-benchmarks/uc2-load-generator/build.gradle similarity index 56% rename from benchmarks/uc2-workload-generator/build.gradle rename to theodolite-benchmarks/uc2-load-generator/build.gradle index f2c3e5d2e73b655dffd94222ecfbc4fc31b7f722..1954fe5cbfd62d26f27c59be486a516c91892e18 100644 --- a/benchmarks/uc2-workload-generator/build.gradle +++ b/theodolite-benchmarks/uc2-load-generator/build.gradle @@ -1 +1,5 @@ +plugins { + id 'theodolite.load-generator' +} + mainClassName = "theodolite.uc2.workloadgenerator.LoadGenerator" diff --git a/benchmarks/uc2-workload-generator/src/main/java/theodolite/uc2/workloadgenerator/LoadGenerator.java b/theodolite-benchmarks/uc2-load-generator/src/main/java/theodolite/uc2/workloadgenerator/LoadGenerator.java similarity index 100% rename from benchmarks/uc2-workload-generator/src/main/java/theodolite/uc2/workloadgenerator/LoadGenerator.java rename to theodolite-benchmarks/uc2-load-generator/src/main/java/theodolite/uc2/workloadgenerator/LoadGenerator.java diff --git a/theodolite-benchmarks/uc3-flink/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc3-flink/.settings/org.eclipse.jdt.ui.prefs new file mode 100644 index 0000000000000000000000000000000000000000..4d01df75552c562406705858b6368ecf59d6e82f --- /dev/null +++ b/theodolite-benchmarks/uc3-flink/.settings/org.eclipse.jdt.ui.prefs @@ -0,0 +1,128 @@ +cleanup.add_default_serial_version_id=true +cleanup.add_generated_serial_version_id=false +cleanup.add_missing_annotations=true +cleanup.add_missing_deprecated_annotations=true +cleanup.add_missing_methods=false +cleanup.add_missing_nls_tags=false +cleanup.add_missing_override_annotations=true +cleanup.add_missing_override_annotations_interface_methods=true +cleanup.add_serial_version_id=false +cleanup.always_use_blocks=true +cleanup.always_use_parentheses_in_expressions=false +cleanup.always_use_this_for_non_static_field_access=true +cleanup.always_use_this_for_non_static_method_access=true +cleanup.convert_functional_interfaces=false +cleanup.convert_to_enhanced_for_loop=true +cleanup.correct_indentation=true +cleanup.format_source_code=true +cleanup.format_source_code_changes_only=false +cleanup.insert_inferred_type_arguments=false +cleanup.make_local_variable_final=true +cleanup.make_parameters_final=true +cleanup.make_private_fields_final=true +cleanup.make_type_abstract_if_missing_method=false +cleanup.make_variable_declarations_final=true +cleanup.never_use_blocks=false +cleanup.never_use_parentheses_in_expressions=true +cleanup.organize_imports=true +cleanup.qualify_static_field_accesses_with_declaring_class=false +cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true +cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true +cleanup.qualify_static_member_accesses_with_declaring_class=true +cleanup.qualify_static_method_accesses_with_declaring_class=false +cleanup.remove_private_constructors=true +cleanup.remove_redundant_modifiers=false +cleanup.remove_redundant_semicolons=true +cleanup.remove_redundant_type_arguments=true +cleanup.remove_trailing_whitespaces=true +cleanup.remove_trailing_whitespaces_all=true +cleanup.remove_trailing_whitespaces_ignore_empty=false +cleanup.remove_unnecessary_casts=true +cleanup.remove_unnecessary_nls_tags=true +cleanup.remove_unused_imports=true +cleanup.remove_unused_local_variables=false +cleanup.remove_unused_private_fields=true +cleanup.remove_unused_private_members=false +cleanup.remove_unused_private_methods=true +cleanup.remove_unused_private_types=true +cleanup.sort_members=false +cleanup.sort_members_all=false +cleanup.use_anonymous_class_creation=false +cleanup.use_blocks=true +cleanup.use_blocks_only_for_return_and_throw=false +cleanup.use_lambda=true +cleanup.use_parentheses_in_expressions=true +cleanup.use_this_for_non_static_field_access=true +cleanup.use_this_for_non_static_field_access_only_if_necessary=false +cleanup.use_this_for_non_static_method_access=true +cleanup.use_this_for_non_static_method_access_only_if_necessary=false +cleanup_profile=_CAU-SE-Style +cleanup_settings_version=2 +eclipse.preferences.version=1 +editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true +formatter_profile=_CAU-SE-Style +formatter_settings_version=15 +org.eclipse.jdt.ui.ignorelowercasenames=true +org.eclipse.jdt.ui.importorder=; +org.eclipse.jdt.ui.ondemandthreshold=99 +org.eclipse.jdt.ui.staticondemandthreshold=99 +org.eclipse.jdt.ui.text.custom_code_templates= +sp_cleanup.add_default_serial_version_id=true +sp_cleanup.add_generated_serial_version_id=false +sp_cleanup.add_missing_annotations=true +sp_cleanup.add_missing_deprecated_annotations=true +sp_cleanup.add_missing_methods=false +sp_cleanup.add_missing_nls_tags=false +sp_cleanup.add_missing_override_annotations=true +sp_cleanup.add_missing_override_annotations_interface_methods=true +sp_cleanup.add_serial_version_id=false +sp_cleanup.always_use_blocks=true +sp_cleanup.always_use_parentheses_in_expressions=false +sp_cleanup.always_use_this_for_non_static_field_access=true +sp_cleanup.always_use_this_for_non_static_method_access=true +sp_cleanup.convert_functional_interfaces=false +sp_cleanup.convert_to_enhanced_for_loop=true +sp_cleanup.correct_indentation=true +sp_cleanup.format_source_code=true +sp_cleanup.format_source_code_changes_only=false +sp_cleanup.insert_inferred_type_arguments=false +sp_cleanup.make_local_variable_final=true +sp_cleanup.make_parameters_final=true +sp_cleanup.make_private_fields_final=true +sp_cleanup.make_type_abstract_if_missing_method=false +sp_cleanup.make_variable_declarations_final=true +sp_cleanup.never_use_blocks=false +sp_cleanup.never_use_parentheses_in_expressions=true +sp_cleanup.on_save_use_additional_actions=true +sp_cleanup.organize_imports=true +sp_cleanup.qualify_static_field_accesses_with_declaring_class=false +sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true +sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true +sp_cleanup.qualify_static_member_accesses_with_declaring_class=true +sp_cleanup.qualify_static_method_accesses_with_declaring_class=false +sp_cleanup.remove_private_constructors=true +sp_cleanup.remove_redundant_modifiers=false +sp_cleanup.remove_redundant_semicolons=true +sp_cleanup.remove_redundant_type_arguments=true +sp_cleanup.remove_trailing_whitespaces=true +sp_cleanup.remove_trailing_whitespaces_all=true +sp_cleanup.remove_trailing_whitespaces_ignore_empty=false +sp_cleanup.remove_unnecessary_casts=true +sp_cleanup.remove_unnecessary_nls_tags=true +sp_cleanup.remove_unused_imports=true +sp_cleanup.remove_unused_local_variables=false +sp_cleanup.remove_unused_private_fields=true +sp_cleanup.remove_unused_private_members=false +sp_cleanup.remove_unused_private_methods=true +sp_cleanup.remove_unused_private_types=true +sp_cleanup.sort_members=false +sp_cleanup.sort_members_all=false +sp_cleanup.use_anonymous_class_creation=false +sp_cleanup.use_blocks=true +sp_cleanup.use_blocks_only_for_return_and_throw=false +sp_cleanup.use_lambda=true +sp_cleanup.use_parentheses_in_expressions=true +sp_cleanup.use_this_for_non_static_field_access=true +sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=false +sp_cleanup.use_this_for_non_static_method_access=true +sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=false diff --git a/benchmarks/workload-generator-commons/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/uc3-flink/.settings/qa.eclipse.plugin.checkstyle.prefs similarity index 100% rename from benchmarks/workload-generator-commons/.settings/qa.eclipse.plugin.checkstyle.prefs rename to theodolite-benchmarks/uc3-flink/.settings/qa.eclipse.plugin.checkstyle.prefs diff --git a/benchmarks/workload-generator-commons/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/uc3-flink/.settings/qa.eclipse.plugin.pmd.prefs similarity index 100% rename from benchmarks/workload-generator-commons/.settings/qa.eclipse.plugin.pmd.prefs rename to theodolite-benchmarks/uc3-flink/.settings/qa.eclipse.plugin.pmd.prefs diff --git a/theodolite-benchmarks/uc3-flink/Dockerfile b/theodolite-benchmarks/uc3-flink/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..34c6da692cb30b738adf47b9d4ca893e72f330e4 --- /dev/null +++ b/theodolite-benchmarks/uc3-flink/Dockerfile @@ -0,0 +1,3 @@ +FROM flink:1.12-scala_2.12-java11 + +ADD build/libs/uc3-flink-all.jar /opt/flink/usrlib/artifacts/uc3-flink-all.jar \ No newline at end of file diff --git a/theodolite-benchmarks/uc3-flink/build.gradle b/theodolite-benchmarks/uc3-flink/build.gradle new file mode 100644 index 0000000000000000000000000000000000000000..7f74b4b6f3d75f213d2fae868775423381076641 --- /dev/null +++ b/theodolite-benchmarks/uc3-flink/build.gradle @@ -0,0 +1,17 @@ +plugins { + id 'theodolite.flink' +} + +allprojects { + repositories { + maven { + url 'https://packages.confluent.io/maven/' + } + } +} + +dependencies { + compile('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') +} + +mainClassName = "theodolite.uc3.application.HistoryServiceFlinkJob" diff --git a/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/ConfigurationKeys.java b/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/ConfigurationKeys.java new file mode 100644 index 0000000000000000000000000000000000000000..a895c74d89c5d788c47b3b78dc70500b4b5a6f5b --- /dev/null +++ b/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/ConfigurationKeys.java @@ -0,0 +1,39 @@ +package theodolite.uc3.application; + +/** + * Keys to access configuration parameters. + */ +public final class ConfigurationKeys { + + public static final String APPLICATION_NAME = "application.name"; + + public static final String APPLICATION_VERSION = "application.version"; + + public static final String KAFKA_BOOTSTRAP_SERVERS = "kafka.bootstrap.servers"; + + public static final String KAFKA_INPUT_TOPIC = "kafka.input.topic"; + + public static final String KAFKA_OUTPUT_TOPIC = "kafka.output.topic"; + + public static final String SCHEMA_REGISTRY_URL = "schema.registry.url"; + + public static final String AGGREGATION_DURATION_DAYS = "aggregation.duration.days"; + + public static final String AGGREGATION_ADVANCE_DAYS = "aggregation.advance.days"; + + public static final String COMMIT_INTERVAL_MS = "commit.interval.ms"; + + public static final String TIME_ZONE = "time.zone"; + + public static final String FLINK_STATE_BACKEND = "flink.state.backend"; + + public static final String FLINK_STATE_BACKEND_PATH = "flink.state.backend.path"; + + public static final String FLINK_STATE_BACKEND_MEMORY_SIZE = // NOPMD + "flink.state.backend.memory.size"; + + public static final String CHECKPOINTING = "checkpointing"; + + private ConfigurationKeys() {} + +} diff --git a/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/HistoryServiceFlinkJob.java b/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/HistoryServiceFlinkJob.java new file mode 100644 index 0000000000000000000000000000000000000000..0f26d37652924a16be1840fd759b3cd5b023f338 --- /dev/null +++ b/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/HistoryServiceFlinkJob.java @@ -0,0 +1,153 @@ +package theodolite.uc3.application; + +import com.google.common.math.Stats; +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneId; +import org.apache.commons.configuration2.Configuration; +import org.apache.flink.api.common.typeinfo.Types; +import org.apache.flink.api.java.functions.KeySelector; +import org.apache.flink.api.java.tuple.Tuple2; +import org.apache.flink.runtime.state.StateBackend; +import org.apache.flink.streaming.api.TimeCharacteristic; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.streaming.api.windowing.assigners.SlidingEventTimeWindows; +import org.apache.flink.streaming.api.windowing.time.Time; +import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer; +import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer; +import org.apache.kafka.common.serialization.Serdes; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import theodolite.commons.flink.KafkaConnectorFactory; +import theodolite.commons.flink.StateBackends; +import theodolite.commons.flink.serialization.StatsSerializer; +import theodolite.uc3.application.util.HourOfDayKey; +import theodolite.uc3.application.util.HourOfDayKeyFactory; +import theodolite.uc3.application.util.HourOfDayKeySerde; +import theodolite.uc3.application.util.StatsKeyFactory; +import titan.ccp.common.configuration.ServiceConfigurations; +import titan.ccp.model.records.ActivePowerRecord; + +/** + * The History microservice implemented as a Flink job. + */ +public final class HistoryServiceFlinkJob { + + private static final Logger LOGGER = LoggerFactory.getLogger(HistoryServiceFlinkJob.class); + + private final Configuration config = ServiceConfigurations.createWithDefaults(); + private final StreamExecutionEnvironment env; + private final String applicationId; + + /** + * Create a new instance of the {@link HistoryServiceFlinkJob}. + */ + public HistoryServiceFlinkJob() { + final String applicationName = this.config.getString(ConfigurationKeys.APPLICATION_NAME); + final String applicationVersion = this.config.getString(ConfigurationKeys.APPLICATION_VERSION); + this.applicationId = applicationName + "-" + applicationVersion; + + this.env = StreamExecutionEnvironment.getExecutionEnvironment(); + + this.configureEnv(); + + this.buildPipeline(); + } + + private void configureEnv() { + this.env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); + + final boolean checkpointing = this.config.getBoolean(ConfigurationKeys.CHECKPOINTING, true); + final int commitIntervalMs = this.config.getInt(ConfigurationKeys.COMMIT_INTERVAL_MS); + if (checkpointing) { + this.env.enableCheckpointing(commitIntervalMs); + } + + // State Backend + final StateBackend stateBackend = StateBackends.fromConfiguration(this.config); + this.env.setStateBackend(stateBackend); + + this.configureSerializers(); + } + + private void configureSerializers() { + this.env.getConfig().registerTypeWithKryoSerializer(HourOfDayKey.class, + new HourOfDayKeySerde()); + this.env.getConfig().registerTypeWithKryoSerializer(Stats.class, new StatsSerializer()); + for (final var entry : this.env.getConfig().getRegisteredTypesWithKryoSerializers() + .entrySet()) { + LOGGER.info("Class {} registered with serializer {}.", + entry.getKey().getName(), + entry.getValue().getSerializer().getClass().getName()); + } + } + + private void buildPipeline() { + // Configurations + final String kafkaBroker = this.config.getString(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS); + final String schemaRegistryUrl = this.config.getString(ConfigurationKeys.SCHEMA_REGISTRY_URL); + final String inputTopic = this.config.getString(ConfigurationKeys.KAFKA_INPUT_TOPIC); + final String outputTopic = this.config.getString(ConfigurationKeys.KAFKA_OUTPUT_TOPIC); + final ZoneId timeZone = ZoneId.of(this.config.getString(ConfigurationKeys.TIME_ZONE)); + final Time aggregationDuration = + Time.days(this.config.getInt(ConfigurationKeys.AGGREGATION_DURATION_DAYS)); + final Time aggregationAdvance = + Time.days(this.config.getInt(ConfigurationKeys.AGGREGATION_ADVANCE_DAYS)); + final boolean checkpointing = this.config.getBoolean(ConfigurationKeys.CHECKPOINTING, true); + + final KafkaConnectorFactory kafkaConnector = new KafkaConnectorFactory( + this.applicationId, kafkaBroker, checkpointing, schemaRegistryUrl); + + // Sources and Sinks + final FlinkKafkaConsumer<ActivePowerRecord> kafkaSource = + kafkaConnector.createConsumer(inputTopic, ActivePowerRecord.class); + final FlinkKafkaProducer<Tuple2<String, String>> kafkaSink = + kafkaConnector.createProducer(outputTopic, + Serdes::String, + Serdes::String, + Types.TUPLE(Types.STRING, Types.STRING)); + + // Streaming topology + final StatsKeyFactory<HourOfDayKey> keyFactory = new HourOfDayKeyFactory(); + this.env + .addSource(kafkaSource) + .name("[Kafka Consumer] Topic: " + inputTopic) + .rebalance() + .keyBy((KeySelector<ActivePowerRecord, HourOfDayKey>) record -> { + final Instant instant = Instant.ofEpochMilli(record.getTimestamp()); + final LocalDateTime dateTime = LocalDateTime.ofInstant(instant, timeZone); + return keyFactory.createKey(record.getIdentifier(), dateTime); + }) + .window(SlidingEventTimeWindows.of(aggregationDuration, aggregationAdvance)) + .aggregate(new StatsAggregateFunction(), new HourOfDayProcessWindowFunction()) + .map(tuple -> { + final String newKey = keyFactory.getSensorId(tuple.f0); + final String newValue = tuple.f1.toString(); + final int hourOfDay = tuple.f0.getHourOfDay(); + LOGGER.info("{}|{}: {}", newKey, hourOfDay, newValue); + return new Tuple2<>(newKey, newValue); + }) + .name("map") + .returns(Types.TUPLE(Types.STRING, Types.STRING)) + .addSink(kafkaSink).name("[Kafka Producer] Topic: " + outputTopic); + } + + /** + * Start running this microservice. + */ + public void run() { + // Execution plan + LOGGER.info("Execution Plan: {}", this.env.getExecutionPlan()); + + // Execute Job + try { + this.env.execute(this.applicationId); + } catch (final Exception e) { // NOPMD Execution thrown by Flink + LOGGER.error("An error occured while running this job.", e); + } + } + + public static void main(final String[] args) { + new HistoryServiceFlinkJob().run(); + } +} diff --git a/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/HourOfDayProcessWindowFunction.java b/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/HourOfDayProcessWindowFunction.java new file mode 100644 index 0000000000000000000000000000000000000000..349c63413d0da792ad34e8ec8d94e7ff5dc06a42 --- /dev/null +++ b/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/HourOfDayProcessWindowFunction.java @@ -0,0 +1,28 @@ +package theodolite.uc3.application; + +import com.google.common.math.Stats; +import org.apache.flink.api.java.tuple.Tuple2; +import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction; +import org.apache.flink.streaming.api.windowing.windows.TimeWindow; +import org.apache.flink.util.Collector; +import theodolite.uc3.application.util.HourOfDayKey; + +/** + * A {@link ProcessWindowFunction} that forwards a computed {@link Stats} object along with its + * associated key. + */ +public class HourOfDayProcessWindowFunction + extends ProcessWindowFunction<Stats, Tuple2<HourOfDayKey, Stats>, HourOfDayKey, TimeWindow> { + + private static final long serialVersionUID = 7702216563302727315L; // NOPMD + + @Override + public void process(final HourOfDayKey hourOfDayKey, + final Context context, + final Iterable<Stats> elements, + final Collector<Tuple2<HourOfDayKey, Stats>> out) { + final Stats stats = elements.iterator().next(); + out.collect(new Tuple2<>(hourOfDayKey, stats)); + } + +} diff --git a/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/StatsAggregateFunction.java b/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/StatsAggregateFunction.java new file mode 100644 index 0000000000000000000000000000000000000000..4706da0a9491e0391f25cd61639c3bb565509cb1 --- /dev/null +++ b/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/StatsAggregateFunction.java @@ -0,0 +1,38 @@ +package theodolite.uc3.application; + +import com.google.common.math.Stats; +import com.google.common.math.StatsAccumulator; +import org.apache.flink.api.common.functions.AggregateFunction; +import theodolite.uc3.application.util.StatsFactory; +import titan.ccp.model.records.ActivePowerRecord; + +/** + * Statistical aggregation of {@link ActivePowerRecord}s using {@link Stats}. + */ +public class StatsAggregateFunction implements AggregateFunction<ActivePowerRecord, Stats, Stats> { + + private static final long serialVersionUID = -8873572990921515499L; // NOPMD + + @Override + public Stats createAccumulator() { + return Stats.of(); + } + + @Override + public Stats add(final ActivePowerRecord value, final Stats accumulator) { + return StatsFactory.accumulate(accumulator, value.getValueInW()); + } + + @Override + public Stats getResult(final Stats accumulator) { + return accumulator; + } + + @Override + public Stats merge(final Stats a, final Stats b) { + final StatsAccumulator statsAccumulator = new StatsAccumulator(); + statsAccumulator.addAll(a); + statsAccumulator.addAll(b); + return statsAccumulator.snapshot(); + } +} diff --git a/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/util/HourOfDayKey.java b/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/util/HourOfDayKey.java new file mode 100644 index 0000000000000000000000000000000000000000..5def88b404f23a59955ca2de42b91c22b7b1b53d --- /dev/null +++ b/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/util/HourOfDayKey.java @@ -0,0 +1,79 @@ +package theodolite.uc3.application.util; + +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.util.Objects; + +/** + * Composed key of an hour of the day and a sensor id. + */ +public class HourOfDayKey { + + private final int hourOfDay; + private final String sensorId; + + public HourOfDayKey(final int hourOfDay, final String sensorId) { + this.hourOfDay = hourOfDay; + this.sensorId = sensorId; + } + + public int getHourOfDay() { + return this.hourOfDay; + } + + public String getSensorId() { + return this.sensorId; + } + + @Override + public String toString() { + return this.sensorId + ";" + this.hourOfDay; + } + + @Override + public int hashCode() { + return Objects.hash(this.hourOfDay, this.sensorId); + } + + @Override + public boolean equals(final Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof HourOfDayKey)) { + return false; + } + final HourOfDayKey k = (HourOfDayKey) obj; + return this.hourOfDay == k.hourOfDay && this.sensorId.equals(k.sensorId); + } + + /** + * Convert this {@link HourOfDayKey} into a byte array. This method is the inverse to + * {@code HourOfDayKey#fromByteArray()}. + */ + public byte[] toByteArray() { + final int numBytes = (2 * Integer.SIZE + this.sensorId.length() * Character.SIZE) / Byte.SIZE; + final ByteBuffer buffer = ByteBuffer.allocate(numBytes).order(ByteOrder.LITTLE_ENDIAN); + buffer.putInt(this.hourOfDay); + buffer.putInt(this.sensorId.length()); + for (final char c : this.sensorId.toCharArray()) { + buffer.putChar(c); + } + return buffer.array(); + } + + /** + * Construct a new {@link HourOfDayKey} from a byte array. This method is the inverse to + * {@code HourOfDayKey#toByteArray()}. + */ + public static HourOfDayKey fromByteArray(final byte[] bytes) { + final ByteBuffer buffer = ByteBuffer.wrap(bytes).order(ByteOrder.LITTLE_ENDIAN); + final int hourOfDay = buffer.getInt(); + final int strLen = buffer.getInt(); + final char[] sensorId = new char[strLen]; + for (int i = 0; i < strLen; i++) { + sensorId[i] = buffer.getChar(); + } + return new HourOfDayKey(hourOfDay, new String(sensorId)); + } +} diff --git a/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/util/HourOfDayKeyFactory.java b/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/util/HourOfDayKeyFactory.java new file mode 100644 index 0000000000000000000000000000000000000000..bd67b2508bc91a87635c52e95b963ed908ed92bf --- /dev/null +++ b/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/util/HourOfDayKeyFactory.java @@ -0,0 +1,24 @@ +package theodolite.uc3.application.util; + +import java.io.Serializable; +import java.time.LocalDateTime; + +/** + * {@link StatsKeyFactory} for {@link HourOfDayKey}. + */ +public class HourOfDayKeyFactory implements StatsKeyFactory<HourOfDayKey>, Serializable { + + private static final long serialVersionUID = 4357668496473645043L; // NOPMD + + @Override + public HourOfDayKey createKey(final String sensorId, final LocalDateTime dateTime) { + final int hourOfDay = dateTime.getHour(); + return new HourOfDayKey(hourOfDay, sensorId); + } + + @Override + public String getSensorId(final HourOfDayKey key) { + return key.getSensorId(); + } + +} diff --git a/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/util/HourOfDayKeySerde.java b/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/util/HourOfDayKeySerde.java new file mode 100644 index 0000000000000000000000000000000000000000..6e3ae9f754d2b1d4ab10349040f0c9e51134c4f7 --- /dev/null +++ b/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/util/HourOfDayKeySerde.java @@ -0,0 +1,52 @@ +package theodolite.uc3.application.util; + +import com.esotericsoftware.kryo.Kryo; +import com.esotericsoftware.kryo.Serializer; +import com.esotericsoftware.kryo.io.Input; +import com.esotericsoftware.kryo.io.Output; +import java.io.Serializable; +import org.apache.kafka.common.serialization.Serde; +import titan.ccp.common.kafka.simpleserdes.BufferSerde; +import titan.ccp.common.kafka.simpleserdes.ReadBuffer; +import titan.ccp.common.kafka.simpleserdes.SimpleSerdes; +import titan.ccp.common.kafka.simpleserdes.WriteBuffer; + +/** + * {@link BufferSerde} for a {@link HourOfDayKey}. Use the {@link #create()} method to create a new + * Kafka {@link Serde}. + */ +public class HourOfDayKeySerde extends Serializer<HourOfDayKey> + implements BufferSerde<HourOfDayKey>, Serializable { + + private static final long serialVersionUID = 1262778284661945041L; // NOPMD + + @Override + public void serialize(final WriteBuffer buffer, final HourOfDayKey data) { + buffer.putInt(data.getHourOfDay()); + buffer.putString(data.getSensorId()); + } + + @Override + public HourOfDayKey deserialize(final ReadBuffer buffer) { + final int hourOfDay = buffer.getInt(); + final String sensorId = buffer.getString(); + return new HourOfDayKey(hourOfDay, sensorId); + } + + public static Serde<HourOfDayKey> create() { + return SimpleSerdes.create(new HourOfDayKeySerde()); + } + + @Override + public void write(final Kryo kryo, final Output output, final HourOfDayKey object) { + final byte[] data = object.toByteArray(); + output.writeInt(data.length); + output.writeBytes(data); + } + + @Override + public HourOfDayKey read(final Kryo kryo, final Input input, final Class<HourOfDayKey> type) { + final int numBytes = input.readInt(); + return HourOfDayKey.fromByteArray(input.readBytes(numBytes)); + } +} diff --git a/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/util/HourOfDayRecordFactory.java b/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/util/HourOfDayRecordFactory.java new file mode 100644 index 0000000000000000000000000000000000000000..d8a42b74e5ca1cc55f9f21de62a5d8f877223e62 --- /dev/null +++ b/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/util/HourOfDayRecordFactory.java @@ -0,0 +1,28 @@ +package theodolite.uc3.application.util; + +import com.google.common.math.Stats; +import org.apache.kafka.streams.kstream.Windowed; +import titan.ccp.model.records.HourOfDayActivePowerRecord; + +/** + * {@link StatsRecordFactory} to create an {@link HourOfDayActivePowerRecord}. + */ +public class HourOfDayRecordFactory + implements StatsRecordFactory<HourOfDayKey, HourOfDayActivePowerRecord> { + + @Override + public HourOfDayActivePowerRecord create(final Windowed<HourOfDayKey> windowed, + final Stats stats) { + return new HourOfDayActivePowerRecord( + windowed.key().getSensorId(), + windowed.key().getHourOfDay(), + windowed.window().start(), + windowed.window().end(), + stats.count(), + stats.mean(), + stats.populationVariance(), + stats.min(), + stats.max()); + } + +} diff --git a/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/util/StatsFactory.java b/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/util/StatsFactory.java new file mode 100644 index 0000000000000000000000000000000000000000..b7880be4eb48035959251cc56273d16407bcb888 --- /dev/null +++ b/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/util/StatsFactory.java @@ -0,0 +1,23 @@ +package theodolite.uc3.application.util; + +import com.google.common.math.Stats; +import com.google.common.math.StatsAccumulator; + +/** + * Factory methods for working with {@link Stats}. + */ +public final class StatsFactory { + + private StatsFactory() {} + + /** + * Add a value to a {@link Stats} object. + */ + public static Stats accumulate(final Stats stats, final double value) { + final StatsAccumulator statsAccumulator = new StatsAccumulator(); + statsAccumulator.addAll(stats); + statsAccumulator.add(value); + return statsAccumulator.snapshot(); + } + +} diff --git a/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/util/StatsKeyFactory.java b/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/util/StatsKeyFactory.java new file mode 100644 index 0000000000000000000000000000000000000000..fdebccaa2d116253c41492cab3443057adef7b36 --- /dev/null +++ b/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/util/StatsKeyFactory.java @@ -0,0 +1,17 @@ +package theodolite.uc3.application.util; + +import java.time.LocalDateTime; + +/** + * Factory interface for creating a stats key from a sensor id and a {@link LocalDateTime} object + * and vice versa. + * + * @param <T> Type of the key + */ +public interface StatsKeyFactory<T> { + + T createKey(String sensorId, LocalDateTime dateTime); + + String getSensorId(T key); + +} diff --git a/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/util/StatsRecordFactory.java b/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/util/StatsRecordFactory.java new file mode 100644 index 0000000000000000000000000000000000000000..61333c99966b1ffea608d225f17d8460eac9ada1 --- /dev/null +++ b/theodolite-benchmarks/uc3-flink/src/main/java/theodolite/uc3/application/util/StatsRecordFactory.java @@ -0,0 +1,22 @@ +package theodolite.uc3.application.util; + +import com.google.common.math.Stats; +import org.apache.avro.specific.SpecificRecord; +import org.apache.kafka.streams.kstream.Window; +import org.apache.kafka.streams.kstream.Windowed; + +/** + * Factory interface for creating a stats Avro record from a {@link Windowed} and a {@link Stats}. + * The {@link Windowed} contains about information about the start end end of the {@link Window} as + * well as the sensor id and the aggregated time unit. The {@link Stats} objects contains the actual + * aggregation results. + * + * @param <K> Key type of the {@link Windowed} + * @param <R> Avro record type + */ +@FunctionalInterface +public interface StatsRecordFactory<K, R extends SpecificRecord> { + + R create(Windowed<K> windowed, Stats stats); + +} diff --git a/theodolite-benchmarks/uc3-flink/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc3-flink/src/main/resources/META-INF/application.properties new file mode 100644 index 0000000000000000000000000000000000000000..6b6874674ce6a0abea73ea6d983c00c15deb8bb1 --- /dev/null +++ b/theodolite-benchmarks/uc3-flink/src/main/resources/META-INF/application.properties @@ -0,0 +1,13 @@ +application.name=theodolite-uc3-application +application.version=0.0.1 + +kafka.bootstrap.servers=localhost:9092 +kafka.input.topic=input +kafka.output.topic=output +schema.registry.url=http://localhost:8081 +aggregation.duration.days=30 +aggregation.advance.days=1 +num.threads=1 +commit.interval.ms=100 +cache.max.bytes.buffering=-1 +time.zone=Europe/Paris \ No newline at end of file diff --git a/benchmarks/uc3-application/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc3-kstreams/.settings/org.eclipse.jdt.ui.prefs similarity index 100% rename from benchmarks/uc3-application/.settings/org.eclipse.jdt.ui.prefs rename to theodolite-benchmarks/uc3-kstreams/.settings/org.eclipse.jdt.ui.prefs diff --git a/theodolite-benchmarks/uc3-kstreams/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/uc3-kstreams/.settings/qa.eclipse.plugin.checkstyle.prefs new file mode 100644 index 0000000000000000000000000000000000000000..87860c815222845c1d264d7d0ce498d3397f8280 --- /dev/null +++ b/theodolite-benchmarks/uc3-kstreams/.settings/qa.eclipse.plugin.checkstyle.prefs @@ -0,0 +1,4 @@ +configFilePath=../config/checkstyle.xml +customModulesJarPaths= +eclipse.preferences.version=1 +enabled=true diff --git a/theodolite-benchmarks/uc3-kstreams/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/uc3-kstreams/.settings/qa.eclipse.plugin.pmd.prefs new file mode 100644 index 0000000000000000000000000000000000000000..efbcb8c9e5d449194a48ca1ea42b7d807b573db9 --- /dev/null +++ b/theodolite-benchmarks/uc3-kstreams/.settings/qa.eclipse.plugin.pmd.prefs @@ -0,0 +1,4 @@ +customRulesJars= +eclipse.preferences.version=1 +enabled=true +ruleSetFilePath=../config/pmd.xml diff --git a/benchmarks/uc3-application/Dockerfile b/theodolite-benchmarks/uc3-kstreams/Dockerfile similarity index 52% rename from benchmarks/uc3-application/Dockerfile rename to theodolite-benchmarks/uc3-kstreams/Dockerfile index 61141baaf752af4b596c8a04cd0d7cc2e6d740af..30d6994b0214c8ff3576a79781654b9018fdf93a 100644 --- a/benchmarks/uc3-application/Dockerfile +++ b/theodolite-benchmarks/uc3-kstreams/Dockerfile @@ -1,6 +1,6 @@ FROM openjdk:11-slim -ADD build/distributions/uc3-application.tar / +ADD build/distributions/uc3-kstreams.tar / CMD JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \ - /uc3-application/bin/uc3-application + /uc3-kstreams/bin/uc3-kstreams diff --git a/benchmarks/uc3-application/build.gradle b/theodolite-benchmarks/uc3-kstreams/build.gradle similarity index 57% rename from benchmarks/uc3-application/build.gradle rename to theodolite-benchmarks/uc3-kstreams/build.gradle index aa96b6dbf90c4895dfda57a51c753c9103c29414..d588d85ae88e3efd2b687e44e9eb9561a45cd8c0 100644 --- a/benchmarks/uc3-application/build.gradle +++ b/theodolite-benchmarks/uc3-kstreams/build.gradle @@ -1 +1,5 @@ +plugins { + id 'theodolite.kstreams' +} + mainClassName = "theodolite.uc3.application.HistoryService" diff --git a/benchmarks/uc3-application/src/main/java/theodolite/uc3/application/HistoryService.java b/theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/application/HistoryService.java similarity index 100% rename from benchmarks/uc3-application/src/main/java/theodolite/uc3/application/HistoryService.java rename to theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/application/HistoryService.java diff --git a/benchmarks/uc3-application/src/main/java/theodolite/uc3/streamprocessing/HourOfDayKey.java b/theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/streamprocessing/HourOfDayKey.java similarity index 100% rename from benchmarks/uc3-application/src/main/java/theodolite/uc3/streamprocessing/HourOfDayKey.java rename to theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/streamprocessing/HourOfDayKey.java diff --git a/benchmarks/uc3-application/src/main/java/theodolite/uc3/streamprocessing/HourOfDayKeyFactory.java b/theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/streamprocessing/HourOfDayKeyFactory.java similarity index 100% rename from benchmarks/uc3-application/src/main/java/theodolite/uc3/streamprocessing/HourOfDayKeyFactory.java rename to theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/streamprocessing/HourOfDayKeyFactory.java diff --git a/benchmarks/uc3-application/src/main/java/theodolite/uc3/streamprocessing/HourOfDayKeySerde.java b/theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/streamprocessing/HourOfDayKeySerde.java similarity index 100% rename from benchmarks/uc3-application/src/main/java/theodolite/uc3/streamprocessing/HourOfDayKeySerde.java rename to theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/streamprocessing/HourOfDayKeySerde.java diff --git a/benchmarks/uc3-application/src/main/java/theodolite/uc3/streamprocessing/HourOfDayRecordFactory.java b/theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/streamprocessing/HourOfDayRecordFactory.java similarity index 100% rename from benchmarks/uc3-application/src/main/java/theodolite/uc3/streamprocessing/HourOfDayRecordFactory.java rename to theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/streamprocessing/HourOfDayRecordFactory.java diff --git a/benchmarks/uc3-application/src/main/java/theodolite/uc3/streamprocessing/RecordDatabaseAdapter.java b/theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/streamprocessing/RecordDatabaseAdapter.java similarity index 100% rename from benchmarks/uc3-application/src/main/java/theodolite/uc3/streamprocessing/RecordDatabaseAdapter.java rename to theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/streamprocessing/RecordDatabaseAdapter.java diff --git a/benchmarks/uc3-application/src/main/java/theodolite/uc3/streamprocessing/StatsKeyFactory.java b/theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/streamprocessing/StatsKeyFactory.java similarity index 100% rename from benchmarks/uc3-application/src/main/java/theodolite/uc3/streamprocessing/StatsKeyFactory.java rename to theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/streamprocessing/StatsKeyFactory.java diff --git a/benchmarks/uc3-application/src/main/java/theodolite/uc3/streamprocessing/StatsRecordFactory.java b/theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/streamprocessing/StatsRecordFactory.java similarity index 100% rename from benchmarks/uc3-application/src/main/java/theodolite/uc3/streamprocessing/StatsRecordFactory.java rename to theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/streamprocessing/StatsRecordFactory.java diff --git a/benchmarks/uc3-application/src/main/java/theodolite/uc3/streamprocessing/TopologyBuilder.java b/theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/streamprocessing/TopologyBuilder.java similarity index 100% rename from benchmarks/uc3-application/src/main/java/theodolite/uc3/streamprocessing/TopologyBuilder.java rename to theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/streamprocessing/TopologyBuilder.java diff --git a/benchmarks/uc3-application/src/main/java/theodolite/uc3/streamprocessing/Uc3KafkaStreamsBuilder.java b/theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/streamprocessing/Uc3KafkaStreamsBuilder.java similarity index 100% rename from benchmarks/uc3-application/src/main/java/theodolite/uc3/streamprocessing/Uc3KafkaStreamsBuilder.java rename to theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/streamprocessing/Uc3KafkaStreamsBuilder.java diff --git a/benchmarks/uc3-application/src/main/java/theodolite/uc3/streamprocessing/util/StatsFactory.java b/theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/streamprocessing/util/StatsFactory.java similarity index 100% rename from benchmarks/uc3-application/src/main/java/theodolite/uc3/streamprocessing/util/StatsFactory.java rename to theodolite-benchmarks/uc3-kstreams/src/main/java/theodolite/uc3/streamprocessing/util/StatsFactory.java diff --git a/benchmarks/uc3-application/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc3-kstreams/src/main/resources/META-INF/application.properties similarity index 83% rename from benchmarks/uc3-application/src/main/resources/META-INF/application.properties rename to theodolite-benchmarks/uc3-kstreams/src/main/resources/META-INF/application.properties index 1273441a61763325c812541e1af8c243f81a31a5..0ce745fb61f87016aee5cc242c03069924ceb58e 100644 --- a/benchmarks/uc3-application/src/main/resources/META-INF/application.properties +++ b/theodolite-benchmarks/uc3-kstreams/src/main/resources/META-INF/application.properties @@ -7,4 +7,4 @@ kafka.output.topic=output aggregation.duration.days=30 aggregation.advance.days=1 -schema.registry.url=http://localhost:8091 +schema.registry.url=http://localhost:8081 diff --git a/benchmarks/uc3-workload-generator/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc3-load-generator/.settings/org.eclipse.jdt.ui.prefs similarity index 100% rename from benchmarks/uc3-workload-generator/.settings/org.eclipse.jdt.ui.prefs rename to theodolite-benchmarks/uc3-load-generator/.settings/org.eclipse.jdt.ui.prefs diff --git a/theodolite-benchmarks/uc3-load-generator/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/uc3-load-generator/.settings/qa.eclipse.plugin.checkstyle.prefs new file mode 100644 index 0000000000000000000000000000000000000000..87860c815222845c1d264d7d0ce498d3397f8280 --- /dev/null +++ b/theodolite-benchmarks/uc3-load-generator/.settings/qa.eclipse.plugin.checkstyle.prefs @@ -0,0 +1,4 @@ +configFilePath=../config/checkstyle.xml +customModulesJarPaths= +eclipse.preferences.version=1 +enabled=true diff --git a/theodolite-benchmarks/uc3-load-generator/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/uc3-load-generator/.settings/qa.eclipse.plugin.pmd.prefs new file mode 100644 index 0000000000000000000000000000000000000000..efbcb8c9e5d449194a48ca1ea42b7d807b573db9 --- /dev/null +++ b/theodolite-benchmarks/uc3-load-generator/.settings/qa.eclipse.plugin.pmd.prefs @@ -0,0 +1,4 @@ +customRulesJars= +eclipse.preferences.version=1 +enabled=true +ruleSetFilePath=../config/pmd.xml diff --git a/theodolite-benchmarks/uc3-load-generator/Dockerfile b/theodolite-benchmarks/uc3-load-generator/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..a2ab45d350bc01b4f526912fbb39f6f7c730f290 --- /dev/null +++ b/theodolite-benchmarks/uc3-load-generator/Dockerfile @@ -0,0 +1,6 @@ +FROM openjdk:11-slim + +ADD build/distributions/uc3-load-generator.tar / + +CMD JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \ + /uc3-load-generator/bin/uc3-load-generator diff --git a/benchmarks/uc3-workload-generator/build.gradle b/theodolite-benchmarks/uc3-load-generator/build.gradle similarity index 56% rename from benchmarks/uc3-workload-generator/build.gradle rename to theodolite-benchmarks/uc3-load-generator/build.gradle index c3ca94290c8600d8482210362666efc1249b8f02..fddc53ae8273a44d178e8828a38a503196af9208 100644 --- a/benchmarks/uc3-workload-generator/build.gradle +++ b/theodolite-benchmarks/uc3-load-generator/build.gradle @@ -1 +1,5 @@ +plugins { + id 'theodolite.load-generator' +} + mainClassName = "theodolite.uc3.workloadgenerator.LoadGenerator" diff --git a/benchmarks/uc3-workload-generator/src/main/java/theodolite/uc3/workloadgenerator/LoadGenerator.java b/theodolite-benchmarks/uc3-load-generator/src/main/java/theodolite/uc3/workloadgenerator/LoadGenerator.java similarity index 100% rename from benchmarks/uc3-workload-generator/src/main/java/theodolite/uc3/workloadgenerator/LoadGenerator.java rename to theodolite-benchmarks/uc3-load-generator/src/main/java/theodolite/uc3/workloadgenerator/LoadGenerator.java diff --git a/theodolite-benchmarks/uc4-flink/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc4-flink/.settings/org.eclipse.jdt.ui.prefs new file mode 100644 index 0000000000000000000000000000000000000000..272e01533f6a345d53d2635c47e38c6d3c33dc8a --- /dev/null +++ b/theodolite-benchmarks/uc4-flink/.settings/org.eclipse.jdt.ui.prefs @@ -0,0 +1,128 @@ +cleanup.add_default_serial_version_id=true +cleanup.add_generated_serial_version_id=false +cleanup.add_missing_annotations=true +cleanup.add_missing_deprecated_annotations=true +cleanup.add_missing_methods=false +cleanup.add_missing_nls_tags=false +cleanup.add_missing_override_annotations=true +cleanup.add_missing_override_annotations_interface_methods=true +cleanup.add_serial_version_id=false +cleanup.always_use_blocks=true +cleanup.always_use_parentheses_in_expressions=false +cleanup.always_use_this_for_non_static_field_access=true +cleanup.always_use_this_for_non_static_method_access=true +cleanup.convert_functional_interfaces=false +cleanup.convert_to_enhanced_for_loop=true +cleanup.correct_indentation=true +cleanup.format_source_code=true +cleanup.format_source_code_changes_only=false +cleanup.insert_inferred_type_arguments=false +cleanup.make_local_variable_final=true +cleanup.make_parameters_final=true +cleanup.make_private_fields_final=true +cleanup.make_type_abstract_if_missing_method=false +cleanup.make_variable_declarations_final=true +cleanup.never_use_blocks=false +cleanup.never_use_parentheses_in_expressions=true +cleanup.organize_imports=true +cleanup.qualify_static_field_accesses_with_declaring_class=false +cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true +cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true +cleanup.qualify_static_member_accesses_with_declaring_class=true +cleanup.qualify_static_method_accesses_with_declaring_class=false +cleanup.remove_private_constructors=true +cleanup.remove_redundant_modifiers=false +cleanup.remove_redundant_semicolons=true +cleanup.remove_redundant_type_arguments=true +cleanup.remove_trailing_whitespaces=true +cleanup.remove_trailing_whitespaces_all=true +cleanup.remove_trailing_whitespaces_ignore_empty=false +cleanup.remove_unnecessary_casts=true +cleanup.remove_unnecessary_nls_tags=true +cleanup.remove_unused_imports=true +cleanup.remove_unused_local_variables=false +cleanup.remove_unused_private_fields=true +cleanup.remove_unused_private_members=false +cleanup.remove_unused_private_methods=true +cleanup.remove_unused_private_types=true +cleanup.sort_members=false +cleanup.sort_members_all=false +cleanup.use_anonymous_class_creation=false +cleanup.use_blocks=true +cleanup.use_blocks_only_for_return_and_throw=false +cleanup.use_lambda=true +cleanup.use_parentheses_in_expressions=true +cleanup.use_this_for_non_static_field_access=true +cleanup.use_this_for_non_static_field_access_only_if_necessary=false +cleanup.use_this_for_non_static_method_access=true +cleanup.use_this_for_non_static_method_access_only_if_necessary=false +cleanup_profile=_CAU-SE-Style +cleanup_settings_version=2 +eclipse.preferences.version=1 +editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true +formatter_profile=_CAU-SE-Style +formatter_settings_version=15 +org.eclipse.jdt.ui.ignorelowercasenames=true +org.eclipse.jdt.ui.importorder=; +org.eclipse.jdt.ui.ondemandthreshold=99 +org.eclipse.jdt.ui.staticondemandthreshold=99 +org.eclipse.jdt.ui.text.custom_code_templates= +sp_cleanup.add_default_serial_version_id=true +sp_cleanup.add_generated_serial_version_id=false +sp_cleanup.add_missing_annotations=true +sp_cleanup.add_missing_deprecated_annotations=true +sp_cleanup.add_missing_methods=false +sp_cleanup.add_missing_nls_tags=false +sp_cleanup.add_missing_override_annotations=true +sp_cleanup.add_missing_override_annotations_interface_methods=true +sp_cleanup.add_serial_version_id=false +sp_cleanup.always_use_blocks=true +sp_cleanup.always_use_parentheses_in_expressions=false +sp_cleanup.always_use_this_for_non_static_field_access=true +sp_cleanup.always_use_this_for_non_static_method_access=true +sp_cleanup.convert_functional_interfaces=false +sp_cleanup.convert_to_enhanced_for_loop=true +sp_cleanup.correct_indentation=true +sp_cleanup.format_source_code=true +sp_cleanup.format_source_code_changes_only=false +sp_cleanup.insert_inferred_type_arguments=false +sp_cleanup.make_local_variable_final=true +sp_cleanup.make_parameters_final=true +sp_cleanup.make_private_fields_final=true +sp_cleanup.make_type_abstract_if_missing_method=false +sp_cleanup.make_variable_declarations_final=true +sp_cleanup.never_use_blocks=false +sp_cleanup.never_use_parentheses_in_expressions=true +sp_cleanup.on_save_use_additional_actions=true +sp_cleanup.organize_imports=true +sp_cleanup.qualify_static_field_accesses_with_declaring_class=false +sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true +sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true +sp_cleanup.qualify_static_member_accesses_with_declaring_class=true +sp_cleanup.qualify_static_method_accesses_with_declaring_class=false +sp_cleanup.remove_private_constructors=true +sp_cleanup.remove_redundant_modifiers=true +sp_cleanup.remove_redundant_semicolons=true +sp_cleanup.remove_redundant_type_arguments=true +sp_cleanup.remove_trailing_whitespaces=true +sp_cleanup.remove_trailing_whitespaces_all=true +sp_cleanup.remove_trailing_whitespaces_ignore_empty=false +sp_cleanup.remove_unnecessary_casts=true +sp_cleanup.remove_unnecessary_nls_tags=true +sp_cleanup.remove_unused_imports=true +sp_cleanup.remove_unused_local_variables=false +sp_cleanup.remove_unused_private_fields=true +sp_cleanup.remove_unused_private_members=false +sp_cleanup.remove_unused_private_methods=true +sp_cleanup.remove_unused_private_types=true +sp_cleanup.sort_members=false +sp_cleanup.sort_members_all=false +sp_cleanup.use_anonymous_class_creation=false +sp_cleanup.use_blocks=true +sp_cleanup.use_blocks_only_for_return_and_throw=false +sp_cleanup.use_lambda=true +sp_cleanup.use_parentheses_in_expressions=true +sp_cleanup.use_this_for_non_static_field_access=true +sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=false +sp_cleanup.use_this_for_non_static_method_access=true +sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=false diff --git a/theodolite-benchmarks/uc4-flink/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/uc4-flink/.settings/qa.eclipse.plugin.checkstyle.prefs new file mode 100644 index 0000000000000000000000000000000000000000..87860c815222845c1d264d7d0ce498d3397f8280 --- /dev/null +++ b/theodolite-benchmarks/uc4-flink/.settings/qa.eclipse.plugin.checkstyle.prefs @@ -0,0 +1,4 @@ +configFilePath=../config/checkstyle.xml +customModulesJarPaths= +eclipse.preferences.version=1 +enabled=true diff --git a/theodolite-benchmarks/uc4-flink/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/uc4-flink/.settings/qa.eclipse.plugin.pmd.prefs new file mode 100644 index 0000000000000000000000000000000000000000..efbcb8c9e5d449194a48ca1ea42b7d807b573db9 --- /dev/null +++ b/theodolite-benchmarks/uc4-flink/.settings/qa.eclipse.plugin.pmd.prefs @@ -0,0 +1,4 @@ +customRulesJars= +eclipse.preferences.version=1 +enabled=true +ruleSetFilePath=../config/pmd.xml diff --git a/theodolite-benchmarks/uc4-flink/Dockerfile b/theodolite-benchmarks/uc4-flink/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..4f51f379e5da436104bb8c914e3233d6ecb4ec1f --- /dev/null +++ b/theodolite-benchmarks/uc4-flink/Dockerfile @@ -0,0 +1,3 @@ +FROM flink:1.12-scala_2.12-java11 + +ADD build/libs/uc4-flink-all.jar /opt/flink/usrlib/artifacts/uc4-flink-all.jar \ No newline at end of file diff --git a/theodolite-benchmarks/uc4-flink/build.gradle b/theodolite-benchmarks/uc4-flink/build.gradle new file mode 100644 index 0000000000000000000000000000000000000000..ebc7ca5f30a668fd161bb22f95133452b5061441 --- /dev/null +++ b/theodolite-benchmarks/uc4-flink/build.gradle @@ -0,0 +1,5 @@ +plugins { + id 'theodolite.flink' +} + +mainClassName = "theodolite.uc4.application.AggregationServiceFlinkJob" diff --git a/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/AggregationServiceFlinkJob.java b/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/AggregationServiceFlinkJob.java new file mode 100644 index 0000000000000000000000000000000000000000..0db5a3d524f74fbf22304e8f9b44fa55eead321a --- /dev/null +++ b/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/AggregationServiceFlinkJob.java @@ -0,0 +1,217 @@ +package theodolite.uc4.application; // NOPMD Imports required + +import java.time.Duration; +import java.util.Set; +import org.apache.commons.configuration2.Configuration; +import org.apache.flink.api.common.eventtime.WatermarkStrategy; +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.common.typeinfo.Types; +import org.apache.flink.api.java.functions.KeySelector; +import org.apache.flink.api.java.tuple.Tuple2; +import org.apache.flink.runtime.state.StateBackend; +import org.apache.flink.streaming.api.TimeCharacteristic; +import org.apache.flink.streaming.api.datastream.DataStream; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows; +import org.apache.flink.streaming.api.windowing.time.Time; +import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer; +import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer; +import org.apache.kafka.common.serialization.Serdes; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import theodolite.commons.flink.KafkaConnectorFactory; +import theodolite.commons.flink.StateBackends; +import theodolite.commons.flink.TupleType; +import theodolite.uc4.application.util.ImmutableSensorRegistrySerializer; +import theodolite.uc4.application.util.ImmutableSetSerializer; +import theodolite.uc4.application.util.SensorParentKey; +import theodolite.uc4.application.util.SensorParentKeySerializer; +import titan.ccp.common.configuration.ServiceConfigurations; +import titan.ccp.common.kafka.avro.SchemaRegistryAvroSerdeFactory; +import titan.ccp.configuration.events.Event; +import titan.ccp.configuration.events.EventSerde; +import titan.ccp.model.records.ActivePowerRecord; +import titan.ccp.model.records.AggregatedActivePowerRecord; +import titan.ccp.model.sensorregistry.ImmutableSensorRegistry; +import titan.ccp.model.sensorregistry.SensorRegistry; + +/** + * The Aggregation microservice implemented as a Flink job. + */ +public final class AggregationServiceFlinkJob { + + private static final Logger LOGGER = LoggerFactory.getLogger(AggregationServiceFlinkJob.class); + + private final Configuration config = ServiceConfigurations.createWithDefaults(); + private final StreamExecutionEnvironment env; + private final String applicationId; + + /** + * Create a new {@link AggregationServiceFlinkJob}. + */ + public AggregationServiceFlinkJob() { + final String applicationName = this.config.getString(ConfigurationKeys.APPLICATION_NAME); + final String applicationVersion = this.config.getString(ConfigurationKeys.APPLICATION_VERSION); + this.applicationId = applicationName + "-" + applicationVersion; + + // Execution environment configuration + // org.apache.flink.configuration.Configuration conf = new + // org.apache.flink.configuration.Configuration(); + // conf.setBoolean(ConfigConstants.LOCAL_START_WEBSERVER, true); + // final StreamExecutionEnvironment env = + // StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(conf); + this.env = StreamExecutionEnvironment.getExecutionEnvironment(); + + this.configureEnv(); + + this.buildPipeline(); + } + + private void configureEnv() { + this.env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); + + final boolean checkpointing = this.config.getBoolean(ConfigurationKeys.CHECKPOINTING, true); + final int commitIntervalMs = this.config.getInt(ConfigurationKeys.COMMIT_INTERVAL_MS); + if (checkpointing) { + this.env.enableCheckpointing(commitIntervalMs); + } + + // State Backend + final StateBackend stateBackend = StateBackends.fromConfiguration(this.config); + this.env.setStateBackend(stateBackend); + + this.configureSerializers(); + } + + private void configureSerializers() { + this.env.getConfig().registerTypeWithKryoSerializer(ImmutableSensorRegistry.class, + new ImmutableSensorRegistrySerializer()); + this.env.getConfig().registerTypeWithKryoSerializer(SensorParentKey.class, + new SensorParentKeySerializer()); + + this.env.getConfig().registerTypeWithKryoSerializer(Set.of().getClass(), + new ImmutableSetSerializer()); + this.env.getConfig().registerTypeWithKryoSerializer(Set.of(1).getClass(), + new ImmutableSetSerializer()); + this.env.getConfig().registerTypeWithKryoSerializer(Set.of(1, 2, 3, 4).getClass(), // NOCS + new ImmutableSetSerializer()); + + this.env.getConfig().getRegisteredTypesWithKryoSerializers() + .forEach((c, s) -> LOGGER.info("Class " + c.getName() + " registered with serializer " + + s.getSerializer().getClass().getName())); + } + + private void buildPipeline() { + // Get configurations + final String kafkaBroker = this.config.getString(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS); + final String schemaRegistryUrl = this.config.getString(ConfigurationKeys.SCHEMA_REGISTRY_URL); + final String inputTopic = this.config.getString(ConfigurationKeys.KAFKA_INPUT_TOPIC); + final String outputTopic = this.config.getString(ConfigurationKeys.KAFKA_OUTPUT_TOPIC); + final Time windowSize = + Time.milliseconds(this.config.getLong(ConfigurationKeys.WINDOW_SIZE_MS)); + final Duration windowGrace = + Duration.ofMillis(this.config.getLong(ConfigurationKeys.WINDOW_GRACE_MS)); + final String configurationTopic = + this.config.getString(ConfigurationKeys.CONFIGURATION_KAFKA_TOPIC); + final boolean checkpointing = this.config.getBoolean(ConfigurationKeys.CHECKPOINTING, true); + + final KafkaConnectorFactory kafkaConnector = new KafkaConnectorFactory( + this.applicationId, kafkaBroker, checkpointing, schemaRegistryUrl); + + // Source from input topic with ActivePowerRecords + final FlinkKafkaConsumer<ActivePowerRecord> kafkaInputSource = + kafkaConnector.createConsumer(inputTopic, ActivePowerRecord.class); + // TODO Watermarks? + + // Source from output topic with AggregatedPowerRecords + final FlinkKafkaConsumer<AggregatedActivePowerRecord> kafkaOutputSource = + kafkaConnector.createConsumer(outputTopic, AggregatedActivePowerRecord.class); + + final FlinkKafkaConsumer<Tuple2<Event, String>> kafkaConfigSource = + kafkaConnector.createConsumer( + configurationTopic, + EventSerde::serde, + Serdes::String, + TupleType.of(TypeInformation.of(Event.class), Types.STRING)); + + // Sink to output topic with SensorId, AggregatedActivePowerRecord + final FlinkKafkaProducer<Tuple2<String, AggregatedActivePowerRecord>> kafkaAggregationSink = + kafkaConnector.createProducer( + outputTopic, + Serdes::String, + () -> new SchemaRegistryAvroSerdeFactory(schemaRegistryUrl).forValues(), + Types.TUPLE(Types.STRING, TypeInformation.of(AggregatedActivePowerRecord.class))); + + // Build input stream + final DataStream<ActivePowerRecord> inputStream = this.env.addSource(kafkaInputSource) + .name("[Kafka Consumer] Topic: " + inputTopic)// NOCS + .rebalance() + .map(r -> r) + .name("[Map] Rebalance Forward"); + + // Build aggregation stream + final DataStream<ActivePowerRecord> aggregationsInputStream = + this.env.addSource(kafkaOutputSource) + .name("[Kafka Consumer] Topic: " + outputTopic) // NOCS + .rebalance() + .map(r -> new ActivePowerRecord(r.getIdentifier(), r.getTimestamp(), r.getSumInW())) + .name("[Map] AggregatedActivePowerRecord -> ActivePowerRecord"); + + // Merge input and aggregation streams + final DataStream<ActivePowerRecord> mergedInputStream = inputStream + .union(aggregationsInputStream); + + // Build parent sensor stream from configuration stream + final DataStream<Tuple2<String, Set<String>>> configurationsStream = + this.env.addSource(kafkaConfigSource) + .name("[Kafka Consumer] Topic: " + configurationTopic) // NOCS + .filter(tuple -> tuple.f0 == Event.SENSOR_REGISTRY_CHANGED + || tuple.f0 == Event.SENSOR_REGISTRY_STATUS) + .name("[Filter] SensorRegistry changed") + .map(tuple -> SensorRegistry.fromJson(tuple.f1)).name("[Map] JSON -> SensorRegistry") + .keyBy(sr -> 1) + .flatMap(new ChildParentsFlatMapFunction()) + .name("[FlatMap] SensorRegistry -> (ChildSensor, ParentSensor[])"); + + final DataStream<Tuple2<SensorParentKey, ActivePowerRecord>> lastValueStream = + mergedInputStream.connect(configurationsStream) + .keyBy(ActivePowerRecord::getIdentifier, + (KeySelector<Tuple2<String, Set<String>>, String>) t -> t.f0) + .flatMap(new JoinAndDuplicateCoFlatMapFunction()) + .name("[CoFlatMap] Join input-config, Flatten to ((Sensor, Group), ActivePowerRecord)"); + + final DataStream<AggregatedActivePowerRecord> aggregationStream = lastValueStream + .rebalance() + .assignTimestampsAndWatermarks(WatermarkStrategy.forBoundedOutOfOrderness(windowGrace)) + .keyBy(t -> t.f0.getParent()) + .window(TumblingEventTimeWindows.of(windowSize)) + .process(new RecordAggregationProcessWindowFunction()) + .name("[Aggregate] ((Sensor, Group), ActivePowerRecord) -> AggregatedActivePowerRecord"); + + // add Kafka Sink + aggregationStream + .map(value -> new Tuple2<>(value.getIdentifier(), value)) + .name("[Map] AggregatedActivePowerRecord -> (Sensor, AggregatedActivePowerRecord)") + .returns(Types.TUPLE(Types.STRING, TypeInformation.of(AggregatedActivePowerRecord.class))) + .addSink(kafkaAggregationSink).name("[Kafka Producer] Topic: " + outputTopic); + } + + /** + * Start running this microservice. + */ + public void run() { + // Execution plan + LOGGER.info("Execution plan: {}", this.env.getExecutionPlan()); + + // Execute Job + try { + this.env.execute(this.applicationId); + } catch (final Exception e) { // NOPMD Execution thrown by Flink + LOGGER.error("An error occured while running this job.", e); + } + } + + public static void main(final String[] args) { + new AggregationServiceFlinkJob().run(); + } +} diff --git a/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/ChildParentsFlatMapFunction.java b/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/ChildParentsFlatMapFunction.java new file mode 100644 index 0000000000000000000000000000000000000000..910dc359fa9b5b0810f7f9b6e67bfceaa68cc798 --- /dev/null +++ b/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/ChildParentsFlatMapFunction.java @@ -0,0 +1,102 @@ +package theodolite.uc4.application; + +import java.util.Iterator; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.apache.flink.api.common.functions.RichFlatMapFunction; +import org.apache.flink.api.common.state.MapState; +import org.apache.flink.api.common.state.MapStateDescriptor; +import org.apache.flink.api.common.typeinfo.TypeHint; +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.java.tuple.Tuple2; +import org.apache.flink.configuration.Configuration; +import org.apache.flink.util.Collector; +import titan.ccp.model.sensorregistry.AggregatedSensor; +import titan.ccp.model.sensorregistry.Sensor; +import titan.ccp.model.sensorregistry.SensorRegistry; + +/** + * Transforms a {@link SensorRegistry} into key value pairs of Sensor identifiers and their parents' + * sensor identifiers. All pairs whose sensor's parents have changed since last iteration are + * forwarded. A mapping of an identifier to <code>null</code> means that the corresponding sensor + * does not longer exists in the sensor registry. + */ +public class ChildParentsFlatMapFunction + extends RichFlatMapFunction<SensorRegistry, Tuple2<String, Set<String>>> { + + private static final long serialVersionUID = 3969444219510915221L; // NOPMD + + private transient MapState<String, Set<String>> state; + + @Override + public void open(final Configuration parameters) { + final MapStateDescriptor<String, Set<String>> descriptor = + new MapStateDescriptor<>( + "child-parents-state", + TypeInformation.of(new TypeHint<String>() {}), + TypeInformation.of(new TypeHint<Set<String>>() {})); + this.state = this.getRuntimeContext().getMapState(descriptor); + } + + @Override + public void flatMap(final SensorRegistry value, final Collector<Tuple2<String, Set<String>>> out) + throws Exception { + final Map<String, Set<String>> childParentsPairs = this.constructChildParentsPairs(value); + this.updateChildParentsPairs(childParentsPairs); + this.updateState(childParentsPairs); + childParentsPairs + .entrySet() + .stream() + .map(e -> new Tuple2<>(e.getKey(), e.getValue())) + .forEach(out::collect); + } + + private Map<String, Set<String>> constructChildParentsPairs(final SensorRegistry registry) { + return this.streamAllChildren(registry.getTopLevelSensor()) + .collect(Collectors.toMap( + Sensor::getIdentifier, + child -> child.getParent() + .map(p -> Set.of(p.getIdentifier())) + .orElseGet(Set::of))); + } + + private Stream<Sensor> streamAllChildren(final AggregatedSensor sensor) { + return sensor.getChildren().stream() + .flatMap(s -> Stream.concat( + Stream.of(s), + s instanceof AggregatedSensor ? this.streamAllChildren((AggregatedSensor) s) + : Stream.empty())); + } + + private void updateChildParentsPairs(final Map<String, Set<String>> childParentsPairs) + throws Exception { // NOPMD General exception thown by Flink + final Iterator<Map.Entry<String, Set<String>>> oldChildParentsPairs = this.state.iterator(); + while (oldChildParentsPairs.hasNext()) { + final Map.Entry<String, Set<String>> oldChildParentPair = oldChildParentsPairs.next(); + final String identifier = oldChildParentPair.getKey(); + final Set<String> oldParents = oldChildParentPair.getValue(); + final Set<String> newParents = childParentsPairs.get(identifier); // null if not exists + if (newParents == null) { + // Sensor was deleted + childParentsPairs.put(identifier, null); + } else if (newParents.equals(oldParents)) { + // No changes + childParentsPairs.remove(identifier); + } + // Else: Later Perhaps: Mark changed parents + } + } + + private void updateState(final Map<String, Set<String>> childParentsPairs) + throws Exception { // NOPMD General exception thown by Flink + for (final Map.Entry<String, Set<String>> childParentPair : childParentsPairs.entrySet()) { + if (childParentPair.getValue() == null) { + this.state.remove(childParentPair.getKey()); + } else { + this.state.put(childParentPair.getKey(), childParentPair.getValue()); + } + } + } +} diff --git a/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/ConfigurationKeys.java b/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/ConfigurationKeys.java new file mode 100644 index 0000000000000000000000000000000000000000..6497f6b055ef115c4a681499c5fa38657bb5d29e --- /dev/null +++ b/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/ConfigurationKeys.java @@ -0,0 +1,40 @@ +package theodolite.uc4.application; + +/** + * Keys to access configuration parameters. + */ +public final class ConfigurationKeys { + public static final String APPLICATION_NAME = "application.name"; + + public static final String APPLICATION_VERSION = "application.version"; + + public static final String CONFIGURATION_KAFKA_TOPIC = "configuration.kafka.topic"; + + public static final String KAFKA_BOOTSTRAP_SERVERS = "kafka.bootstrap.servers"; + + public static final String KAFKA_OUTPUT_TOPIC = "kafka.output.topic"; + + public static final String KAFKA_INPUT_TOPIC = "kafka.input.topic"; + + public static final String SCHEMA_REGISTRY_URL = "schema.registry.url"; + + public static final String WINDOW_SIZE_MS = "window.size.ms"; + + public static final String WINDOW_GRACE_MS = "window.grace.ms"; + + public static final String COMMIT_INTERVAL_MS = "commit.interval.ms"; + + public static final String FLINK_STATE_BACKEND = "flink.state.backend"; + + public static final String FLINK_STATE_BACKEND_PATH = "flink.state.backend.path"; + + public static final String FLINK_STATE_BACKEND_MEMORY_SIZE = //NOPMD + "flink.state.backend.memory.size"; + + public static final String DEBUG = "debug"; + + public static final String CHECKPOINTING = "checkpointing"; + + private ConfigurationKeys() {} + +} diff --git a/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/JoinAndDuplicateCoFlatMapFunction.java b/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/JoinAndDuplicateCoFlatMapFunction.java new file mode 100644 index 0000000000000000000000000000000000000000..6ef9a72e9695cfccba0bbcca1238f7ebc94fc505 --- /dev/null +++ b/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/JoinAndDuplicateCoFlatMapFunction.java @@ -0,0 +1,66 @@ +package theodolite.uc4.application; + +import java.util.Set; +import org.apache.flink.api.common.state.MapState; +import org.apache.flink.api.common.state.MapStateDescriptor; +import org.apache.flink.api.common.typeinfo.TypeHint; +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.java.tuple.Tuple2; +import org.apache.flink.configuration.Configuration; +import org.apache.flink.streaming.api.functions.co.RichCoFlatMapFunction; +import org.apache.flink.util.Collector; +import theodolite.uc4.application.util.SensorParentKey; +import titan.ccp.model.records.ActivePowerRecord; + +/** + * A {@link RichCoFlatMapFunction} which joins each incoming {@link ActivePowerRecord} with its + * corresponding parents. The {@link ActivePowerRecord} is duplicated for each parent. When + * receiving a new set of parents for a sensor, this operator updates its internal state and + * forwards "tombstone" record if a sensor does no longer have a certain parent. + */ +public class JoinAndDuplicateCoFlatMapFunction extends + RichCoFlatMapFunction<ActivePowerRecord, Tuple2<String, Set<String>>, Tuple2<SensorParentKey, ActivePowerRecord>> { // NOCS + + private static final long serialVersionUID = -6992783644887835979L; // NOPMD + + private transient MapState<String, Set<String>> state; + + @Override + public void open(final Configuration parameters) throws Exception { + final MapStateDescriptor<String, Set<String>> descriptor = + new MapStateDescriptor<>( + "join-and-duplicate-state", + TypeInformation.of(new TypeHint<String>() {}), + TypeInformation.of(new TypeHint<Set<String>>() {})); + this.state = this.getRuntimeContext().getMapState(descriptor); + } + + @Override + public void flatMap1(final ActivePowerRecord value, + final Collector<Tuple2<SensorParentKey, ActivePowerRecord>> out) throws Exception { + final Set<String> parents = this.state.get(value.getIdentifier()); + if (parents == null) { + return; + } + for (final String parent : parents) { + out.collect(new Tuple2<>(new SensorParentKey(value.getIdentifier(), parent), value)); + } + } + + @Override + public void flatMap2(final Tuple2<String, Set<String>> value, + final Collector<Tuple2<SensorParentKey, ActivePowerRecord>> out) throws Exception { + final String sensor = value.f0; + final Set<String> oldParents = this.state.get(sensor); + final Set<String> newParents = value.f1; + if (oldParents != null && !newParents.equals(oldParents)) { + for (final String oldParent : oldParents) { + if (!newParents.contains(oldParent)) { + // Parent was deleted, emit tombstone record + out.collect(new Tuple2<>(new SensorParentKey(sensor, oldParent), null)); + } + } + } + this.state.put(sensor, newParents); + } +} diff --git a/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/RecordAggregationProcessWindowFunction.java b/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/RecordAggregationProcessWindowFunction.java new file mode 100644 index 0000000000000000000000000000000000000000..45d4a09d153881572c949d2af7542f9cffb5622d --- /dev/null +++ b/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/RecordAggregationProcessWindowFunction.java @@ -0,0 +1,102 @@ +package theodolite.uc4.application; + +import org.apache.flink.api.common.state.MapState; +import org.apache.flink.api.common.state.MapStateDescriptor; +import org.apache.flink.api.common.state.ValueState; +import org.apache.flink.api.common.state.ValueStateDescriptor; +import org.apache.flink.api.common.typeinfo.TypeHint; +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.java.tuple.Tuple2; +import org.apache.flink.configuration.Configuration; +import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction; +import org.apache.flink.streaming.api.windowing.windows.TimeWindow; +import org.apache.flink.util.Collector; +import theodolite.uc4.application.util.SensorParentKey; +import titan.ccp.model.records.ActivePowerRecord; +import titan.ccp.model.records.AggregatedActivePowerRecord; + +/** + * A {@link ProcessWindowFunction} which performs the windowed aggregation of all + * {@link ActivePowerRecord} for the same {@link SensorParentKey}. Result of this aggregation is an + * {@link AggregatedActivePowerRecord}. + */ +public class RecordAggregationProcessWindowFunction extends + ProcessWindowFunction<Tuple2<SensorParentKey, ActivePowerRecord>, AggregatedActivePowerRecord, String, TimeWindow> { // NOCS + + private static final long serialVersionUID = 6030159552332624435L; // NOPMD + + private transient MapState<SensorParentKey, ActivePowerRecord> lastValueState; + private transient ValueState<AggregatedActivePowerRecord> aggregateState; + + @Override + public void open(final Configuration parameters) { + final MapStateDescriptor<SensorParentKey, ActivePowerRecord> lastValueStateDescriptor = + new MapStateDescriptor<>( + "last-value-state", + TypeInformation.of(new TypeHint<SensorParentKey>() {}), + TypeInformation.of(new TypeHint<ActivePowerRecord>() {})); + this.lastValueState = this.getRuntimeContext().getMapState(lastValueStateDescriptor); + + final ValueStateDescriptor<AggregatedActivePowerRecord> aggregateStateDescriptor = + new ValueStateDescriptor<>( + "aggregation-state", + TypeInformation.of(new TypeHint<AggregatedActivePowerRecord>() {})); + this.aggregateState = this.getRuntimeContext().getState(aggregateStateDescriptor); + } + + @Override + public void process( + final String key, + final Context context, + final Iterable<Tuple2<SensorParentKey, ActivePowerRecord>> elements, + final Collector<AggregatedActivePowerRecord> out) throws Exception { + for (final Tuple2<SensorParentKey, ActivePowerRecord> t : elements) { + AggregatedActivePowerRecord currentAggregate = this.aggregateState.value(); + if (currentAggregate == null) { + currentAggregate = new AggregatedActivePowerRecord(key, 0L, 0L, 0.0, 0.0); + this.aggregateState.update(currentAggregate); + } + long count = currentAggregate.getCount(); + + final SensorParentKey sensorParentKey = t.f0; + ActivePowerRecord newRecord = t.f1; + if (newRecord == null) { // sensor was deleted -> decrease count, set newRecord to zero + count--; + newRecord = new ActivePowerRecord(sensorParentKey.getSensor(), 0L, 0.0); + } + + // get last value of this record from state or create 0 valued record + ActivePowerRecord previousRecord = this.lastValueState.get(sensorParentKey); + if (previousRecord == null) { // sensor was added -> increase count + count++; + previousRecord = new ActivePowerRecord(sensorParentKey.getSensor(), 0L, 0.0); + } + + // if incoming record is older than the last saved record, skip the record + if (newRecord.getTimestamp() < previousRecord.getTimestamp()) { + continue; + } + + // prefer newer timestamp, but use previous if 0 -> sensor was deleted + final long timestamp = + newRecord.getTimestamp() == 0 ? previousRecord.getTimestamp() : newRecord.getTimestamp(); + final double sumInW = + currentAggregate.getSumInW() - previousRecord.getValueInW() + newRecord.getValueInW(); + final double avgInW = count == 0 ? 0 : sumInW / count; + + final AggregatedActivePowerRecord newAggregate = new AggregatedActivePowerRecord( + sensorParentKey.getParent(), + timestamp, + count, + sumInW, + avgInW); + + // update state and aggregateState + this.lastValueState.put(sensorParentKey, newRecord); + this.aggregateState.update(newAggregate); + } + + // emit aggregated record + out.collect(this.aggregateState.value()); + } +} diff --git a/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/util/ImmutableSensorRegistrySerializer.java b/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/util/ImmutableSensorRegistrySerializer.java new file mode 100644 index 0000000000000000000000000000000000000000..e157f35c8a052d2d4a28526a0d98d56515d586d6 --- /dev/null +++ b/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/util/ImmutableSensorRegistrySerializer.java @@ -0,0 +1,28 @@ +package theodolite.uc4.application.util; + +import com.esotericsoftware.kryo.Kryo; +import com.esotericsoftware.kryo.Serializer; +import com.esotericsoftware.kryo.io.Input; +import com.esotericsoftware.kryo.io.Output; +import java.io.Serializable; +import titan.ccp.model.sensorregistry.ImmutableSensorRegistry; + +/** + * A {@link Serializer} for {@link ImmutableSensorRegistry}s. + */ +public class ImmutableSensorRegistrySerializer extends Serializer<ImmutableSensorRegistry> + implements Serializable { + + private static final long serialVersionUID = 1806411056006113017L; // NOPMD + + @Override + public void write(final Kryo kryo, final Output output, final ImmutableSensorRegistry object) { + output.writeString(object.toJson()); + } + + @Override + public ImmutableSensorRegistry read(final Kryo kryo, final Input input, + final Class<ImmutableSensorRegistry> type) { + return (ImmutableSensorRegistry) ImmutableSensorRegistry.fromJson(input.readString()); + } +} diff --git a/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/util/ImmutableSetSerializer.java b/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/util/ImmutableSetSerializer.java new file mode 100644 index 0000000000000000000000000000000000000000..6b2dbcdfb403705b39815dd31112deab7947d83d --- /dev/null +++ b/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/util/ImmutableSetSerializer.java @@ -0,0 +1,51 @@ +package theodolite.uc4.application.util; + +import com.esotericsoftware.kryo.Kryo; +import com.esotericsoftware.kryo.Serializer; +import com.esotericsoftware.kryo.io.Input; +import com.esotericsoftware.kryo.io.Output; +import java.io.Serializable; +import java.util.Set; + +/** + * A {@link Serializer} for serializing arbitrary {@link Set}s of {@link Object}s. + */ +public final class ImmutableSetSerializer extends Serializer<Set<Object>> implements Serializable { + + private static final long serialVersionUID = 6919877826110724620L; // NOPMD + + public ImmutableSetSerializer() { + super(false, true); + } + + @Override + public void write(final Kryo kryo, final Output output, final Set<Object> object) { + output.writeInt(object.size(), true); + for (final Object elm : object) { + kryo.writeClassAndObject(output, elm); + } + } + + @Override + public Set<Object> read(final Kryo kryo, final Input input, final Class<Set<Object>> type) { + final int size = input.readInt(true); + final Object[] list = new Object[size]; + for (int i = 0; i < size; ++i) { + list[i] = kryo.readClassAndObject(input); + } + return Set.of(list); + } + + /** + * Creates a new {@link ImmutableSetSerializer} and registers its serializer for the several + * related classes. + * + * @param kryo the {@link Kryo} instance to set the serializer on + */ + public static void registerSerializers(final Kryo kryo) { + final ImmutableSetSerializer serializer = new ImmutableSetSerializer(); + kryo.register(Set.of().getClass(), serializer); + kryo.register(Set.of(1).getClass(), serializer); + kryo.register(Set.of(1, 2, 3, 4).getClass(), serializer); // NOCS + } +} diff --git a/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/util/SensorParentKey.java b/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/util/SensorParentKey.java new file mode 100644 index 0000000000000000000000000000000000000000..903b66dd12a2864d522fde7eb7cf3fdc2ec73bcd --- /dev/null +++ b/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/util/SensorParentKey.java @@ -0,0 +1,51 @@ +package theodolite.uc4.application.util; + +import java.util.Objects; + +/** + * A key consisting of the identifier of a sensor and an identifier of parent sensor. + */ +public class SensorParentKey { + + private final String sensorIdentifier; + + private final String parentIdentifier; + + public SensorParentKey(final String sensorIdentifier, final String parentIdentifier) { + this.sensorIdentifier = sensorIdentifier; + this.parentIdentifier = parentIdentifier; + } + + public String getSensor() { + return this.sensorIdentifier; + } + + public String getParent() { + return this.parentIdentifier; + } + + @Override + public String toString() { + return "{" + this.sensorIdentifier + ", " + this.parentIdentifier + "}"; + } + + @Override + public int hashCode() { + return Objects.hash(this.sensorIdentifier, this.parentIdentifier); + } + + @Override + public boolean equals(final Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof SensorParentKey)) { + return false; + } + final SensorParentKey k = (SensorParentKey) obj; + return this.sensorIdentifier.equals(k.sensorIdentifier) + && this.parentIdentifier.equals(k.parentIdentifier); + } + + +} diff --git a/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/util/SensorParentKeySerializer.java b/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/util/SensorParentKeySerializer.java new file mode 100644 index 0000000000000000000000000000000000000000..bdd403a05de8f54f636568e839f5f48effd43d58 --- /dev/null +++ b/theodolite-benchmarks/uc4-flink/src/main/java/theodolite/uc4/application/util/SensorParentKeySerializer.java @@ -0,0 +1,30 @@ +package theodolite.uc4.application.util; + +import com.esotericsoftware.kryo.Kryo; +import com.esotericsoftware.kryo.Serializer; +import com.esotericsoftware.kryo.io.Input; +import com.esotericsoftware.kryo.io.Output; +import java.io.Serializable; + +/** + * Kryo serializer for {@link SensorParentKey}. + */ +public final class SensorParentKeySerializer extends Serializer<SensorParentKey> + implements Serializable { + + private static final long serialVersionUID = -867781963471414857L; // NOPMD + + @Override + public void write(final Kryo kryo, final Output output, final SensorParentKey object) { + output.writeString(object.getSensor()); + output.writeString(object.getParent()); + } + + @Override + public SensorParentKey read(final Kryo kryo, final Input input, + final Class<SensorParentKey> type) { + final String sensor = input.readString(); + final String parent = input.readString(); + return new SensorParentKey(sensor, parent); + } +} diff --git a/theodolite-benchmarks/uc4-flink/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc4-flink/src/main/resources/META-INF/application.properties new file mode 100644 index 0000000000000000000000000000000000000000..de85fdb88c0462edc9fba58409918470fcb8cb6c --- /dev/null +++ b/theodolite-benchmarks/uc4-flink/src/main/resources/META-INF/application.properties @@ -0,0 +1,16 @@ +application.name=theodolite-uc2-application +application.version=0.0.1 + +configuration.host=localhost +configuration.port=8082 +configuration.kafka.topic=configuration + +kafka.bootstrap.servers=localhost:9092 +kafka.input.topic=input +kafka.output.topic=output +schema.registry.url=http://localhost:8081 +window.size.ms=1000 +window.grace.ms=0 +num.threads=1 +commit.interval.ms=1000 +cache.max.bytes.buffering=-1 diff --git a/benchmarks/uc4-application/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc4-kstreams/.settings/org.eclipse.jdt.ui.prefs similarity index 100% rename from benchmarks/uc4-application/.settings/org.eclipse.jdt.ui.prefs rename to theodolite-benchmarks/uc4-kstreams/.settings/org.eclipse.jdt.ui.prefs diff --git a/theodolite-benchmarks/uc4-kstreams/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/uc4-kstreams/.settings/qa.eclipse.plugin.checkstyle.prefs new file mode 100644 index 0000000000000000000000000000000000000000..87860c815222845c1d264d7d0ce498d3397f8280 --- /dev/null +++ b/theodolite-benchmarks/uc4-kstreams/.settings/qa.eclipse.plugin.checkstyle.prefs @@ -0,0 +1,4 @@ +configFilePath=../config/checkstyle.xml +customModulesJarPaths= +eclipse.preferences.version=1 +enabled=true diff --git a/theodolite-benchmarks/uc4-kstreams/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/uc4-kstreams/.settings/qa.eclipse.plugin.pmd.prefs new file mode 100644 index 0000000000000000000000000000000000000000..efbcb8c9e5d449194a48ca1ea42b7d807b573db9 --- /dev/null +++ b/theodolite-benchmarks/uc4-kstreams/.settings/qa.eclipse.plugin.pmd.prefs @@ -0,0 +1,4 @@ +customRulesJars= +eclipse.preferences.version=1 +enabled=true +ruleSetFilePath=../config/pmd.xml diff --git a/benchmarks/uc4-application/Dockerfile b/theodolite-benchmarks/uc4-kstreams/Dockerfile similarity index 52% rename from benchmarks/uc4-application/Dockerfile rename to theodolite-benchmarks/uc4-kstreams/Dockerfile index add251c0ef11324830bcada9174fbbdecc18d532..f50f09995a0479de0d7deb8c68184c2dd3ed1461 100644 --- a/benchmarks/uc4-application/Dockerfile +++ b/theodolite-benchmarks/uc4-kstreams/Dockerfile @@ -1,6 +1,6 @@ FROM openjdk:11-slim -ADD build/distributions/uc4-application.tar / +ADD build/distributions/uc4-kstreams.tar / CMD JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \ - /uc4-application/bin/uc4-application + /uc4-kstreams/bin/uc4-kstreams diff --git a/benchmarks/uc4-application/README.md b/theodolite-benchmarks/uc4-kstreams/README.md similarity index 100% rename from benchmarks/uc4-application/README.md rename to theodolite-benchmarks/uc4-kstreams/README.md diff --git a/benchmarks/uc4-application/build.gradle b/theodolite-benchmarks/uc4-kstreams/build.gradle similarity index 58% rename from benchmarks/uc4-application/build.gradle rename to theodolite-benchmarks/uc4-kstreams/build.gradle index 9cb1b311d8f50769d371952db886e4a00a454591..83212a499ae344ea44beb3c2b98aec147dda8488 100644 --- a/benchmarks/uc4-application/build.gradle +++ b/theodolite-benchmarks/uc4-kstreams/build.gradle @@ -1 +1,5 @@ +plugins { + id 'theodolite.kstreams' +} + mainClassName = "theodolite.uc4.application.AggregationService" diff --git a/benchmarks/uc4-application/src/main/java/theodolite/uc4/application/AggregationService.java b/theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/application/AggregationService.java similarity index 100% rename from benchmarks/uc4-application/src/main/java/theodolite/uc4/application/AggregationService.java rename to theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/application/AggregationService.java diff --git a/benchmarks/uc4-application/src/main/java/theodolite/uc4/streamprocessing/ChildParentsTransformer.java b/theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/ChildParentsTransformer.java similarity index 100% rename from benchmarks/uc4-application/src/main/java/theodolite/uc4/streamprocessing/ChildParentsTransformer.java rename to theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/ChildParentsTransformer.java diff --git a/benchmarks/uc4-application/src/main/java/theodolite/uc4/streamprocessing/ChildParentsTransformerSupplier.java b/theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/ChildParentsTransformerSupplier.java similarity index 100% rename from benchmarks/uc4-application/src/main/java/theodolite/uc4/streamprocessing/ChildParentsTransformerSupplier.java rename to theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/ChildParentsTransformerSupplier.java diff --git a/benchmarks/uc4-application/src/main/java/theodolite/uc4/streamprocessing/JointFlatTransformer.java b/theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/JointFlatTransformer.java similarity index 100% rename from benchmarks/uc4-application/src/main/java/theodolite/uc4/streamprocessing/JointFlatTransformer.java rename to theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/JointFlatTransformer.java diff --git a/benchmarks/uc4-application/src/main/java/theodolite/uc4/streamprocessing/JointFlatTransformerSupplier.java b/theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/JointFlatTransformerSupplier.java similarity index 100% rename from benchmarks/uc4-application/src/main/java/theodolite/uc4/streamprocessing/JointFlatTransformerSupplier.java rename to theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/JointFlatTransformerSupplier.java diff --git a/benchmarks/uc4-application/src/main/java/theodolite/uc4/streamprocessing/JointRecordParents.java b/theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/JointRecordParents.java similarity index 100% rename from benchmarks/uc4-application/src/main/java/theodolite/uc4/streamprocessing/JointRecordParents.java rename to theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/JointRecordParents.java diff --git a/benchmarks/uc4-application/src/main/java/theodolite/uc4/streamprocessing/OptionalParentsSerde.java b/theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/OptionalParentsSerde.java similarity index 100% rename from benchmarks/uc4-application/src/main/java/theodolite/uc4/streamprocessing/OptionalParentsSerde.java rename to theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/OptionalParentsSerde.java diff --git a/benchmarks/uc4-application/src/main/java/theodolite/uc4/streamprocessing/ParentsSerde.java b/theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/ParentsSerde.java similarity index 100% rename from benchmarks/uc4-application/src/main/java/theodolite/uc4/streamprocessing/ParentsSerde.java rename to theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/ParentsSerde.java diff --git a/benchmarks/uc4-application/src/main/java/theodolite/uc4/streamprocessing/RecordAggregator.java b/theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/RecordAggregator.java similarity index 100% rename from benchmarks/uc4-application/src/main/java/theodolite/uc4/streamprocessing/RecordAggregator.java rename to theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/RecordAggregator.java diff --git a/benchmarks/uc4-application/src/main/java/theodolite/uc4/streamprocessing/SensorParentKey.java b/theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/SensorParentKey.java similarity index 100% rename from benchmarks/uc4-application/src/main/java/theodolite/uc4/streamprocessing/SensorParentKey.java rename to theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/SensorParentKey.java diff --git a/benchmarks/uc4-application/src/main/java/theodolite/uc4/streamprocessing/SensorParentKeySerde.java b/theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/SensorParentKeySerde.java similarity index 100% rename from benchmarks/uc4-application/src/main/java/theodolite/uc4/streamprocessing/SensorParentKeySerde.java rename to theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/SensorParentKeySerde.java diff --git a/benchmarks/uc4-application/src/main/java/theodolite/uc4/streamprocessing/TopologyBuilder.java b/theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/TopologyBuilder.java similarity index 100% rename from benchmarks/uc4-application/src/main/java/theodolite/uc4/streamprocessing/TopologyBuilder.java rename to theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/TopologyBuilder.java diff --git a/benchmarks/uc4-application/src/main/java/theodolite/uc4/streamprocessing/Uc4KafkaStreamsBuilder.java b/theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/Uc4KafkaStreamsBuilder.java similarity index 100% rename from benchmarks/uc4-application/src/main/java/theodolite/uc4/streamprocessing/Uc4KafkaStreamsBuilder.java rename to theodolite-benchmarks/uc4-kstreams/src/main/java/theodolite/uc4/streamprocessing/Uc4KafkaStreamsBuilder.java diff --git a/benchmarks/uc4-application/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc4-kstreams/src/main/resources/META-INF/application.properties similarity index 86% rename from benchmarks/uc4-application/src/main/resources/META-INF/application.properties rename to theodolite-benchmarks/uc4-kstreams/src/main/resources/META-INF/application.properties index ce06091076e6ff7f9ede355c7f54c12b3d872119..a21f7e917e3ce4a0762261ca90444613c82ab650 100644 --- a/benchmarks/uc4-application/src/main/resources/META-INF/application.properties +++ b/theodolite-benchmarks/uc4-kstreams/src/main/resources/META-INF/application.properties @@ -7,7 +7,7 @@ kafka.configuration.topic=configuration kafka.feedback.topic=aggregation-feedback kafka.output.topic=output -schema.registry.url=http://localhost:8091 +schema.registry.url=http://localhost:8081 emit.period.ms=5000 grace.period.ms=0 \ No newline at end of file diff --git a/benchmarks/uc4-application/src/test/java/theodolite/uc4/streamprocessing/OptionalParentsSerdeTest.java b/theodolite-benchmarks/uc4-kstreams/src/test/java/theodolite/uc4/streamprocessing/OptionalParentsSerdeTest.java similarity index 100% rename from benchmarks/uc4-application/src/test/java/theodolite/uc4/streamprocessing/OptionalParentsSerdeTest.java rename to theodolite-benchmarks/uc4-kstreams/src/test/java/theodolite/uc4/streamprocessing/OptionalParentsSerdeTest.java diff --git a/benchmarks/uc4-application/src/test/java/theodolite/uc4/streamprocessing/ParentsSerdeTest.java b/theodolite-benchmarks/uc4-kstreams/src/test/java/theodolite/uc4/streamprocessing/ParentsSerdeTest.java similarity index 100% rename from benchmarks/uc4-application/src/test/java/theodolite/uc4/streamprocessing/ParentsSerdeTest.java rename to theodolite-benchmarks/uc4-kstreams/src/test/java/theodolite/uc4/streamprocessing/ParentsSerdeTest.java diff --git a/benchmarks/uc4-application/src/test/java/theodolite/uc4/streamprocessing/SensorParentKeySerdeTest.java b/theodolite-benchmarks/uc4-kstreams/src/test/java/theodolite/uc4/streamprocessing/SensorParentKeySerdeTest.java similarity index 100% rename from benchmarks/uc4-application/src/test/java/theodolite/uc4/streamprocessing/SensorParentKeySerdeTest.java rename to theodolite-benchmarks/uc4-kstreams/src/test/java/theodolite/uc4/streamprocessing/SensorParentKeySerdeTest.java diff --git a/benchmarks/uc4-application/src/test/java/theodolite/uc4/streamprocessing/SerdeTester.java b/theodolite-benchmarks/uc4-kstreams/src/test/java/theodolite/uc4/streamprocessing/SerdeTester.java similarity index 100% rename from benchmarks/uc4-application/src/test/java/theodolite/uc4/streamprocessing/SerdeTester.java rename to theodolite-benchmarks/uc4-kstreams/src/test/java/theodolite/uc4/streamprocessing/SerdeTester.java diff --git a/benchmarks/uc4-application/src/test/java/theodolite/uc4/streamprocessing/SerdeTesterFactory.java b/theodolite-benchmarks/uc4-kstreams/src/test/java/theodolite/uc4/streamprocessing/SerdeTesterFactory.java similarity index 100% rename from benchmarks/uc4-application/src/test/java/theodolite/uc4/streamprocessing/SerdeTesterFactory.java rename to theodolite-benchmarks/uc4-kstreams/src/test/java/theodolite/uc4/streamprocessing/SerdeTesterFactory.java diff --git a/theodolite-benchmarks/uc4-load-generator/.settings/org.eclipse.jdt.ui.prefs b/theodolite-benchmarks/uc4-load-generator/.settings/org.eclipse.jdt.ui.prefs new file mode 100644 index 0000000000000000000000000000000000000000..4d01df75552c562406705858b6368ecf59d6e82f --- /dev/null +++ b/theodolite-benchmarks/uc4-load-generator/.settings/org.eclipse.jdt.ui.prefs @@ -0,0 +1,128 @@ +cleanup.add_default_serial_version_id=true +cleanup.add_generated_serial_version_id=false +cleanup.add_missing_annotations=true +cleanup.add_missing_deprecated_annotations=true +cleanup.add_missing_methods=false +cleanup.add_missing_nls_tags=false +cleanup.add_missing_override_annotations=true +cleanup.add_missing_override_annotations_interface_methods=true +cleanup.add_serial_version_id=false +cleanup.always_use_blocks=true +cleanup.always_use_parentheses_in_expressions=false +cleanup.always_use_this_for_non_static_field_access=true +cleanup.always_use_this_for_non_static_method_access=true +cleanup.convert_functional_interfaces=false +cleanup.convert_to_enhanced_for_loop=true +cleanup.correct_indentation=true +cleanup.format_source_code=true +cleanup.format_source_code_changes_only=false +cleanup.insert_inferred_type_arguments=false +cleanup.make_local_variable_final=true +cleanup.make_parameters_final=true +cleanup.make_private_fields_final=true +cleanup.make_type_abstract_if_missing_method=false +cleanup.make_variable_declarations_final=true +cleanup.never_use_blocks=false +cleanup.never_use_parentheses_in_expressions=true +cleanup.organize_imports=true +cleanup.qualify_static_field_accesses_with_declaring_class=false +cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true +cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true +cleanup.qualify_static_member_accesses_with_declaring_class=true +cleanup.qualify_static_method_accesses_with_declaring_class=false +cleanup.remove_private_constructors=true +cleanup.remove_redundant_modifiers=false +cleanup.remove_redundant_semicolons=true +cleanup.remove_redundant_type_arguments=true +cleanup.remove_trailing_whitespaces=true +cleanup.remove_trailing_whitespaces_all=true +cleanup.remove_trailing_whitespaces_ignore_empty=false +cleanup.remove_unnecessary_casts=true +cleanup.remove_unnecessary_nls_tags=true +cleanup.remove_unused_imports=true +cleanup.remove_unused_local_variables=false +cleanup.remove_unused_private_fields=true +cleanup.remove_unused_private_members=false +cleanup.remove_unused_private_methods=true +cleanup.remove_unused_private_types=true +cleanup.sort_members=false +cleanup.sort_members_all=false +cleanup.use_anonymous_class_creation=false +cleanup.use_blocks=true +cleanup.use_blocks_only_for_return_and_throw=false +cleanup.use_lambda=true +cleanup.use_parentheses_in_expressions=true +cleanup.use_this_for_non_static_field_access=true +cleanup.use_this_for_non_static_field_access_only_if_necessary=false +cleanup.use_this_for_non_static_method_access=true +cleanup.use_this_for_non_static_method_access_only_if_necessary=false +cleanup_profile=_CAU-SE-Style +cleanup_settings_version=2 +eclipse.preferences.version=1 +editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true +formatter_profile=_CAU-SE-Style +formatter_settings_version=15 +org.eclipse.jdt.ui.ignorelowercasenames=true +org.eclipse.jdt.ui.importorder=; +org.eclipse.jdt.ui.ondemandthreshold=99 +org.eclipse.jdt.ui.staticondemandthreshold=99 +org.eclipse.jdt.ui.text.custom_code_templates= +sp_cleanup.add_default_serial_version_id=true +sp_cleanup.add_generated_serial_version_id=false +sp_cleanup.add_missing_annotations=true +sp_cleanup.add_missing_deprecated_annotations=true +sp_cleanup.add_missing_methods=false +sp_cleanup.add_missing_nls_tags=false +sp_cleanup.add_missing_override_annotations=true +sp_cleanup.add_missing_override_annotations_interface_methods=true +sp_cleanup.add_serial_version_id=false +sp_cleanup.always_use_blocks=true +sp_cleanup.always_use_parentheses_in_expressions=false +sp_cleanup.always_use_this_for_non_static_field_access=true +sp_cleanup.always_use_this_for_non_static_method_access=true +sp_cleanup.convert_functional_interfaces=false +sp_cleanup.convert_to_enhanced_for_loop=true +sp_cleanup.correct_indentation=true +sp_cleanup.format_source_code=true +sp_cleanup.format_source_code_changes_only=false +sp_cleanup.insert_inferred_type_arguments=false +sp_cleanup.make_local_variable_final=true +sp_cleanup.make_parameters_final=true +sp_cleanup.make_private_fields_final=true +sp_cleanup.make_type_abstract_if_missing_method=false +sp_cleanup.make_variable_declarations_final=true +sp_cleanup.never_use_blocks=false +sp_cleanup.never_use_parentheses_in_expressions=true +sp_cleanup.on_save_use_additional_actions=true +sp_cleanup.organize_imports=true +sp_cleanup.qualify_static_field_accesses_with_declaring_class=false +sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true +sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true +sp_cleanup.qualify_static_member_accesses_with_declaring_class=true +sp_cleanup.qualify_static_method_accesses_with_declaring_class=false +sp_cleanup.remove_private_constructors=true +sp_cleanup.remove_redundant_modifiers=false +sp_cleanup.remove_redundant_semicolons=true +sp_cleanup.remove_redundant_type_arguments=true +sp_cleanup.remove_trailing_whitespaces=true +sp_cleanup.remove_trailing_whitespaces_all=true +sp_cleanup.remove_trailing_whitespaces_ignore_empty=false +sp_cleanup.remove_unnecessary_casts=true +sp_cleanup.remove_unnecessary_nls_tags=true +sp_cleanup.remove_unused_imports=true +sp_cleanup.remove_unused_local_variables=false +sp_cleanup.remove_unused_private_fields=true +sp_cleanup.remove_unused_private_members=false +sp_cleanup.remove_unused_private_methods=true +sp_cleanup.remove_unused_private_types=true +sp_cleanup.sort_members=false +sp_cleanup.sort_members_all=false +sp_cleanup.use_anonymous_class_creation=false +sp_cleanup.use_blocks=true +sp_cleanup.use_blocks_only_for_return_and_throw=false +sp_cleanup.use_lambda=true +sp_cleanup.use_parentheses_in_expressions=true +sp_cleanup.use_this_for_non_static_field_access=true +sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=false +sp_cleanup.use_this_for_non_static_method_access=true +sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=false diff --git a/theodolite-benchmarks/uc4-load-generator/.settings/qa.eclipse.plugin.checkstyle.prefs b/theodolite-benchmarks/uc4-load-generator/.settings/qa.eclipse.plugin.checkstyle.prefs new file mode 100644 index 0000000000000000000000000000000000000000..87860c815222845c1d264d7d0ce498d3397f8280 --- /dev/null +++ b/theodolite-benchmarks/uc4-load-generator/.settings/qa.eclipse.plugin.checkstyle.prefs @@ -0,0 +1,4 @@ +configFilePath=../config/checkstyle.xml +customModulesJarPaths= +eclipse.preferences.version=1 +enabled=true diff --git a/theodolite-benchmarks/uc4-load-generator/.settings/qa.eclipse.plugin.pmd.prefs b/theodolite-benchmarks/uc4-load-generator/.settings/qa.eclipse.plugin.pmd.prefs new file mode 100644 index 0000000000000000000000000000000000000000..efbcb8c9e5d449194a48ca1ea42b7d807b573db9 --- /dev/null +++ b/theodolite-benchmarks/uc4-load-generator/.settings/qa.eclipse.plugin.pmd.prefs @@ -0,0 +1,4 @@ +customRulesJars= +eclipse.preferences.version=1 +enabled=true +ruleSetFilePath=../config/pmd.xml diff --git a/theodolite-benchmarks/uc4-load-generator/Dockerfile b/theodolite-benchmarks/uc4-load-generator/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..7723121ed337492af5a2e5b3ca3e026b1a7a3478 --- /dev/null +++ b/theodolite-benchmarks/uc4-load-generator/Dockerfile @@ -0,0 +1,6 @@ +FROM openjdk:11-slim + +ADD build/distributions/uc4-load-generator.tar / + +CMD JAVA_OPTS="$JAVA_OPTS -Dorg.slf4j.simpleLogger.defaultLogLevel=$LOG_LEVEL" \ + /uc4-load-generator/bin/uc4-load-generator diff --git a/benchmarks/uc4-workload-generator/build.gradle b/theodolite-benchmarks/uc4-load-generator/build.gradle similarity index 58% rename from benchmarks/uc4-workload-generator/build.gradle rename to theodolite-benchmarks/uc4-load-generator/build.gradle index 8865ec9391213f3d8c52be2366573dee09652087..9785718056fa1a14d687a75237cd23b941ce7365 100644 --- a/benchmarks/uc4-workload-generator/build.gradle +++ b/theodolite-benchmarks/uc4-load-generator/build.gradle @@ -1 +1,5 @@ +plugins { + id 'theodolite.load-generator' +} + mainClassName = "theodolite.uc4.workloadgenerator.LoadGenerator" diff --git a/benchmarks/uc4-workload-generator/src/main/java/theodolite/uc4/workloadgenerator/ConfigPublisher.java b/theodolite-benchmarks/uc4-load-generator/src/main/java/theodolite/uc4/workloadgenerator/ConfigPublisher.java similarity index 100% rename from benchmarks/uc4-workload-generator/src/main/java/theodolite/uc4/workloadgenerator/ConfigPublisher.java rename to theodolite-benchmarks/uc4-load-generator/src/main/java/theodolite/uc4/workloadgenerator/ConfigPublisher.java diff --git a/benchmarks/uc4-workload-generator/src/main/java/theodolite/uc4/workloadgenerator/LoadGenerator.java b/theodolite-benchmarks/uc4-load-generator/src/main/java/theodolite/uc4/workloadgenerator/LoadGenerator.java similarity index 100% rename from benchmarks/uc4-workload-generator/src/main/java/theodolite/uc4/workloadgenerator/LoadGenerator.java rename to theodolite-benchmarks/uc4-load-generator/src/main/java/theodolite/uc4/workloadgenerator/LoadGenerator.java diff --git a/benchmarks/uc4-workload-generator/src/main/java/theodolite/uc4/workloadgenerator/SensorRegistryBuilder.java b/theodolite-benchmarks/uc4-load-generator/src/main/java/theodolite/uc4/workloadgenerator/SensorRegistryBuilder.java similarity index 100% rename from benchmarks/uc4-workload-generator/src/main/java/theodolite/uc4/workloadgenerator/SensorRegistryBuilder.java rename to theodolite-benchmarks/uc4-load-generator/src/main/java/theodolite/uc4/workloadgenerator/SensorRegistryBuilder.java diff --git a/benchmarks/uc4-workload-generator/src/main/resources/META-INF/application.properties b/theodolite-benchmarks/uc4-load-generator/src/main/resources/META-INF/application.properties similarity index 100% rename from benchmarks/uc4-workload-generator/src/main/resources/META-INF/application.properties rename to theodolite-benchmarks/uc4-load-generator/src/main/resources/META-INF/application.properties diff --git a/benchmarks/uc4-workload-generator/src/test/java/theodolite/uc4/workloadgenerator/SensorRegistryBuilderTest.java b/theodolite-benchmarks/uc4-load-generator/src/test/java/theodolite/uc4/workloadgenerator/SensorRegistryBuilderTest.java similarity index 100% rename from benchmarks/uc4-workload-generator/src/test/java/theodolite/uc4/workloadgenerator/SensorRegistryBuilderTest.java rename to theodolite-benchmarks/uc4-load-generator/src/test/java/theodolite/uc4/workloadgenerator/SensorRegistryBuilderTest.java diff --git a/theodolite-quarkus/src/main/kotlin/theodolite/benchmark/KafkaLagExporterRemover.kt b/theodolite-quarkus/src/main/kotlin/theodolite/benchmark/KafkaLagExporterRemover.kt new file mode 100644 index 0000000000000000000000000000000000000000..9e241cabfa208a0632635a30c658590faec2c1a8 --- /dev/null +++ b/theodolite-quarkus/src/main/kotlin/theodolite/benchmark/KafkaLagExporterRemover.kt @@ -0,0 +1,14 @@ +package theodolite.benchmark; + +import io.fabric8.kubernetes.client.NamespacedKubernetesClient +import mu.KotlinLogging + +private val logger = KotlinLogging.logger {} + +class KafkaLagExporterRemover(private val client : NamespacedKubernetesClient) { + + fun remove(label: String){ + this.client.pods().withLabel(label).delete() + logger.info{"Pod with label: $label deleted"} + } +} diff --git a/theodolite-quarkus/src/main/kotlin/theodolite/benchmark/KubernetesBenchmark.kt b/theodolite-quarkus/src/main/kotlin/theodolite/benchmark/KubernetesBenchmark.kt index 1e2d9a2e675bee3817c493e5afc89333ca508ed8..0110e1d7cdbbe150fc6d76bc303770b989f5d739 100644 --- a/theodolite-quarkus/src/main/kotlin/theodolite/benchmark/KubernetesBenchmark.kt +++ b/theodolite-quarkus/src/main/kotlin/theodolite/benchmark/KubernetesBenchmark.kt @@ -4,7 +4,7 @@ import io.fabric8.kubernetes.api.model.KubernetesResource import io.fabric8.kubernetes.client.DefaultKubernetesClient import mu.KotlinLogging import theodolite.k8s.K8sResourceLoader -import theodolite.patcher.PatcherManager +import theodolite.patcher.PatcherFactory import theodolite.util.* private val logger = KotlinLogging.logger {} @@ -43,20 +43,15 @@ class KubernetesBenchmark : Benchmark { configurationOverrides: List<ConfigurationOverride> ): BenchmarkDeployment { val resources = loadKubernetesResources(this.appResource + this.loadGenResource) - val patcherManager = PatcherManager() + val patcherFactory = PatcherFactory() - // patch res and load - patcherManager.createAndApplyPatcher(res.getType(), this.resourceTypes, resources, res.get()) - patcherManager.createAndApplyPatcher(load.getType(), this.loadTypes, resources, load.get().toString()) + // patch the load dimension the resources + load.getType().forEach { patcherDefinition -> patcherFactory.createPatcher(patcherDefinition, resources).patch(load.get().toString()) } + res.getType().forEach{ patcherDefinition -> patcherFactory.createPatcher(patcherDefinition, resources).patch(res.get().toString()) } + + // Patch the given overrides + configurationOverrides.forEach { override -> patcherFactory.createPatcher(override.patcher, resources).patch(override.value) } - // patch overrides - configurationOverrides.forEach { override -> - patcherManager.applyPatcher( - listOf(override.patcher), - resources, - override.value - ) - } return KubernetesBenchmarkDeployment( namespace = namespace, diff --git a/theodolite-quarkus/src/main/kotlin/theodolite/benchmark/KubernetesBenchmarkDeployment.kt b/theodolite-quarkus/src/main/kotlin/theodolite/benchmark/KubernetesBenchmarkDeployment.kt index 342ceebb89a2ccd04a84ba40fb49a1e4872fa24d..35d7cc79ba8aafa17584d392d94443e59a7fa590 100644 --- a/theodolite-quarkus/src/main/kotlin/theodolite/benchmark/KubernetesBenchmarkDeployment.kt +++ b/theodolite-quarkus/src/main/kotlin/theodolite/benchmark/KubernetesBenchmarkDeployment.kt @@ -15,6 +15,9 @@ class KubernetesBenchmarkDeployment( private val kafkaController = TopicManager(this.kafkaConfig) private val kubernetesManager = K8sManager(DefaultKubernetesClient().inNamespace(namespace)) + private val LABEL = "app.kubernetes.io/name=kafka-lag-exporter" + private val client = DefaultKubernetesClient().inNamespace(namespace) + override fun setup() { kafkaController.createTopics(this.topics) resources.forEach { @@ -23,6 +26,7 @@ class KubernetesBenchmarkDeployment( } override fun teardown() { + KafkaLagExporterRemover(client).remove(LABEL) kafkaController.removeTopics(this.topics.map { topic -> topic.name() }) resources.forEach { kubernetesManager.remove(it) diff --git a/theodolite-quarkus/src/main/kotlin/theodolite/execution/TheodoliteExecutor.kt b/theodolite-quarkus/src/main/kotlin/theodolite/execution/TheodoliteExecutor.kt index d1d66af6afdf2f207742b86c89e0771cd2467012..689c07fc6419f8d8a63c2c0fe5f2d5961e15e374 100644 --- a/theodolite-quarkus/src/main/kotlin/theodolite/execution/TheodoliteExecutor.kt +++ b/theodolite-quarkus/src/main/kotlin/theodolite/execution/TheodoliteExecutor.kt @@ -2,6 +2,7 @@ package theodolite.execution import theodolite.benchmark.BenchmarkExecution import theodolite.benchmark.KubernetesBenchmark +import theodolite.patcher.PatcherDefinitionFactory import theodolite.strategies.StrategyFactory import theodolite.strategies.searchstrategy.CompositeStrategy import theodolite.util.Config @@ -20,6 +21,14 @@ class TheodoliteExecutor( val strategyFactory = StrategyFactory() val executionDuration = Duration.ofSeconds(config.execution.duration) + + val resourcePatcherDefinition = PatcherDefinitionFactory().createPatcherDefinition( + config.resources.resourceType, + this.kubernetesBenchmark.resourceTypes + ) + val loadDimensionPatcherDefinition = + PatcherDefinitionFactory().createPatcherDefinition(config.load.loadType, this.kubernetesBenchmark.loadTypes) + val executor = BenchmarkExecutorImpl( kubernetesBenchmark, @@ -30,9 +39,13 @@ class TheodoliteExecutor( ) return Config( - loads = config.load.loadValues.map { load -> LoadDimension(load, config.load.loadType) }, - resources = config.resources.resourceValues.map - { resource -> Resource(resource, config.resources.resourceType) }, + loads = config.load.loadValues.map { load -> LoadDimension(load, loadDimensionPatcherDefinition) }, + resources = config.resources.resourceValues.map { resource -> + Resource( + resource, + resourcePatcherDefinition + ) + }, compositeStrategy = CompositeStrategy( benchmarkExecutor = executor, searchStrategy = strategyFactory.createSearchStrategy(executor, config.execution.strategy), diff --git a/theodolite-quarkus/src/main/kotlin/theodolite/patcher/PatcherDefinitionFactory.kt b/theodolite-quarkus/src/main/kotlin/theodolite/patcher/PatcherDefinitionFactory.kt new file mode 100644 index 0000000000000000000000000000000000000000..096d19e7c54ce3ac308ca59edee7861a7041dde0 --- /dev/null +++ b/theodolite-quarkus/src/main/kotlin/theodolite/patcher/PatcherDefinitionFactory.kt @@ -0,0 +1,12 @@ +package theodolite.patcher + +import theodolite.util.PatcherDefinition +import theodolite.util.TypeName + +class PatcherDefinitionFactory { + fun createPatcherDefinition(requiredType: String, patcherTypes: List<TypeName>) : List<PatcherDefinition> { + return patcherTypes + .filter { type -> type.typeName == requiredType } + .flatMap { type -> type.patchers } + } +} diff --git a/theodolite-quarkus/src/main/kotlin/theodolite/patcher/PatcherFactory.kt b/theodolite-quarkus/src/main/kotlin/theodolite/patcher/PatcherFactory.kt new file mode 100644 index 0000000000000000000000000000000000000000..dd391c599ad33fa0f6990ecc86ed1af5430cb695 --- /dev/null +++ b/theodolite-quarkus/src/main/kotlin/theodolite/patcher/PatcherFactory.kt @@ -0,0 +1,29 @@ +package theodolite.patcher + +import io.fabric8.kubernetes.api.model.KubernetesResource +import theodolite.util.PatcherDefinition + +class PatcherFactory { + fun createPatcher(patcherDefinition: PatcherDefinition, + k8sResources: List<Pair<String, KubernetesResource>>) : Patcher { + val resource = + k8sResources.filter { it.first == patcherDefinition.resource }.map { resource -> resource.second }[0] + return when (patcherDefinition.type) { + "ReplicaPatcher" -> ReplicaPatcher(resource) + "EnvVarPatcher" -> EnvVarPatcher(resource, patcherDefinition.container, patcherDefinition.variableName) + "NodeSelectorPatcher" -> NodeSelectorPatcher(resource, patcherDefinition.variableName) + "ResourceLimitPatcher" -> ResourceLimitPatcher( + resource, + patcherDefinition.container, + patcherDefinition.variableName + ) + "ResourceRequestPatcher" -> ResourceRequestPatcher( + resource, + patcherDefinition.container, + patcherDefinition.variableName + ) + "SchedulerNamePatcher" -> SchedulerNamePatcher(resource) + else -> throw IllegalArgumentException("Patcher type ${patcherDefinition.type} not found") + } + } +} \ No newline at end of file diff --git a/theodolite-quarkus/src/main/kotlin/theodolite/patcher/PatcherManager.kt b/theodolite-quarkus/src/main/kotlin/theodolite/patcher/PatcherManager.kt deleted file mode 100644 index 5557eb4b98d5da3bbc8b8d82227de29335c5da67..0000000000000000000000000000000000000000 --- a/theodolite-quarkus/src/main/kotlin/theodolite/patcher/PatcherManager.kt +++ /dev/null @@ -1,73 +0,0 @@ -package theodolite.patcher - -import io.fabric8.kubernetes.api.model.KubernetesResource -import theodolite.util.PatcherDefinition -import theodolite.util.TypeName - -class PatcherManager { - private fun createK8sPatcher( - patcherDefinition: PatcherDefinition, - k8sResources: List<Pair<String, KubernetesResource>> - ): Patcher { - val resource = - k8sResources.filter { it.first == patcherDefinition.resource }.map { resource -> resource.second }[0] - return when (patcherDefinition.type) { - "ReplicaPatcher" -> ReplicaPatcher(resource) - "EnvVarPatcher" -> EnvVarPatcher(resource, patcherDefinition.container, patcherDefinition.variableName) - "NodeSelectorPatcher" -> NodeSelectorPatcher(resource, patcherDefinition.variableName) - "ResourceLimitPatcher" -> ResourceLimitPatcher( - resource, - patcherDefinition.container, - patcherDefinition.variableName - ) - "ResourceRequestPatcher" -> ResourceRequestPatcher( - resource, - patcherDefinition.container, - patcherDefinition.variableName - ) - else -> throw IllegalArgumentException("Patcher type ${patcherDefinition.type} not found") - } - } - - private fun getPatcherDef(requiredType: String, patcherTypes: List<TypeName>): List<PatcherDefinition> { - return patcherTypes - .filter { type -> type.typeName == requiredType } - .flatMap { type -> type.patchers } - } - - /** - * This function first creates a patcher definition and - * then patches the list of resources based on this patcher definition - * - * @param type Patcher type, for example "EnvVarPatcher" - * @param patcherTypes List of patcher types definitions, for example for resources and threads - * @param resources List of K8s resources, a patcher takes the resources that are needed - * @param value The value to patch - */ - fun createAndApplyPatcher( - type: String, - patcherTypes: List<TypeName>, - resources: List<Pair<String, KubernetesResource>>, - value: Any - ) { - this.getPatcherDef(type, patcherTypes) - .forEach { patcherDef -> - createK8sPatcher(patcherDef, resources).patch(value) - } - } - - /** - * Patch a resource based on the given patcher definition, a list of resources and a value to patch - * - * @param patcherDefinition The patcher definition - * @param resources List of patcher types definitions, for example for resources and threads - * @param value The value to patch - */ - fun applyPatcher( - patcherDefinition: List<PatcherDefinition>, - resources: List<Pair<String, KubernetesResource>>, - value: Any - ) { - patcherDefinition.forEach { def -> this.createK8sPatcher(def, resources).patch(value) } - } -} diff --git a/theodolite-quarkus/src/main/kotlin/theodolite/patcher/SchedulerNamePatcher.kt b/theodolite-quarkus/src/main/kotlin/theodolite/patcher/SchedulerNamePatcher.kt new file mode 100644 index 0000000000000000000000000000000000000000..88d9a978f23c8c5612d2ad46df795b7e3ba8cd19 --- /dev/null +++ b/theodolite-quarkus/src/main/kotlin/theodolite/patcher/SchedulerNamePatcher.kt @@ -0,0 +1,12 @@ +package theodolite.patcher + +import io.fabric8.kubernetes.api.model.KubernetesResource +import io.fabric8.kubernetes.api.model.apps.Deployment + +class SchedulerNamePatcher(private val k8sResource: KubernetesResource): Patcher { + override fun <String> patch(value: String) { + if (k8sResource is Deployment) { + k8sResource.spec.template.spec.schedulerName = value as kotlin.String; + } + } +} \ No newline at end of file diff --git a/theodolite-quarkus/src/main/kotlin/theodolite/strategies/restriction/LowerBoundRestriction.kt b/theodolite-quarkus/src/main/kotlin/theodolite/strategies/restriction/LowerBoundRestriction.kt index dfd6bc8052b8ca44ac8a9220fbf1e3c8df43b93d..6fed9b5d808405b42ad374346862f050ce192141 100644 --- a/theodolite-quarkus/src/main/kotlin/theodolite/strategies/restriction/LowerBoundRestriction.kt +++ b/theodolite-quarkus/src/main/kotlin/theodolite/strategies/restriction/LowerBoundRestriction.kt @@ -13,7 +13,7 @@ import theodolite.util.Results class LowerBoundRestriction(results: Results) : RestrictionStrategy(results) { override fun next(load: LoadDimension, resources: List<Resource>): List<Resource> { val maxLoad: LoadDimension? = this.results.getMaxBenchmarkedLoad(load) - var lowerBound: Resource? = this.results.getMinRequiredInstances(maxLoad, resources[0].getType()) + var lowerBound: Resource? = this.results.getMinRequiredInstances(maxLoad) if (lowerBound == null) { lowerBound = resources[0] } diff --git a/theodolite-quarkus/src/main/kotlin/theodolite/util/LoadDimension.kt b/theodolite-quarkus/src/main/kotlin/theodolite/util/LoadDimension.kt index 29d47460bc49ec44e9a46a129e3dab3246f305b6..43cb861b2d6bbbe457a61d6f98f42487aad1d216 100644 --- a/theodolite-quarkus/src/main/kotlin/theodolite/util/LoadDimension.kt +++ b/theodolite-quarkus/src/main/kotlin/theodolite/util/LoadDimension.kt @@ -1,11 +1,11 @@ package theodolite.util -data class LoadDimension(private val number: Int, private val type: String) { +data class LoadDimension(private val number: Int, private val type: List<PatcherDefinition>) { fun get(): Int { return this.number } - fun getType(): String { + fun getType(): List<PatcherDefinition> { return this.type } } diff --git a/theodolite-quarkus/src/main/kotlin/theodolite/util/Resource.kt b/theodolite-quarkus/src/main/kotlin/theodolite/util/Resource.kt index cb172e0b8de4cff5fc08828a177f3dd9d58bbb53..094e89ebb0d4566499068331ca2fc890f3335597 100644 --- a/theodolite-quarkus/src/main/kotlin/theodolite/util/Resource.kt +++ b/theodolite-quarkus/src/main/kotlin/theodolite/util/Resource.kt @@ -1,11 +1,11 @@ package theodolite.util -data class Resource(private val number: Int, private val type: String) { +data class Resource(private val number: Int, private val type: List<PatcherDefinition>) { fun get(): Int { return this.number } - fun getType(): String { + fun getType(): List<PatcherDefinition> { return this.type } } diff --git a/theodolite-quarkus/src/main/kotlin/theodolite/util/Results.kt b/theodolite-quarkus/src/main/kotlin/theodolite/util/Results.kt index 91bde71792fdca383fc9511658bab39aa58d12ce..c827e8303f5c08f4f612476a1069ecefc0a7308b 100644 --- a/theodolite-quarkus/src/main/kotlin/theodolite/util/Results.kt +++ b/theodolite-quarkus/src/main/kotlin/theodolite/util/Results.kt @@ -11,10 +11,10 @@ class Results { return this.results[experiment] } - fun getMinRequiredInstances(load: LoadDimension?, resourceTyp: String): Resource? { - if (this.results.isEmpty()) return Resource(Int.MIN_VALUE, resourceTyp) + fun getMinRequiredInstances(load: LoadDimension?): Resource? { + if (this.results.isEmpty()) return Resource(Int.MIN_VALUE, emptyList()) - var requiredInstances: Resource? = Resource(Int.MAX_VALUE, resourceTyp) + var requiredInstances: Resource? = Resource(Int.MAX_VALUE, emptyList()) for (experiment in results) { if (experiment.key.first == load && experiment.value) { if (requiredInstances == null) { diff --git a/theodolite-quarkus/src/main/resources/yaml/BenchmarkExecution.yaml b/theodolite-quarkus/src/main/resources/yaml/BenchmarkExecution.yaml index a91d123628a03bb7fd82821d6f34d7bf1239c154..0e16d655d0a8f083e021c5c6bf2e35466563f75b 100644 --- a/theodolite-quarkus/src/main/resources/yaml/BenchmarkExecution.yaml +++ b/theodolite-quarkus/src/main/resources/yaml/BenchmarkExecution.yaml @@ -1,5 +1,5 @@ name: "Theodolite Test Context" -benchmark: "benchmarkType" +benchmark: "uc1-kstreams" load: loadType: "NumSensors" loadValues: @@ -12,6 +12,7 @@ slos: - sloType: "lag trend" threshold: 1000 prometheusUrl: "http://localhost:32656" + # prometheusUrl: "http://prometheus-operated:9090" externalSloUrl: "http://localhost:80/evaluate-slope" offset: 0 warmup: 0 @@ -43,4 +44,8 @@ configOverrides: resource: "uc1-kstreams-deployment.yaml" container: "uc-application" variableName: "memory" - value: "2Gi" \ No newline at end of file + value: "2Gi" + - patcher: + type: "SchedulerNamePatcher" + resource: "uc1-kstreams-deployment.yaml" + value: "random-scheduler" \ No newline at end of file diff --git a/theodolite-quarkus/src/main/resources/yaml/BenchmarkType.yaml b/theodolite-quarkus/src/main/resources/yaml/BenchmarkType.yaml index 8f6ee0e05efd4cbb8f6a5cb08d6fc048f1a8ee8a..957314cd776e51c6a32e28489c1a6c956214f42e 100644 --- a/theodolite-quarkus/src/main/resources/yaml/BenchmarkType.yaml +++ b/theodolite-quarkus/src/main/resources/yaml/BenchmarkType.yaml @@ -1,4 +1,4 @@ -name: "theodolite ist cool" +name: "uc1-kstreams" appResource: - "uc1-kstreams-deployment.yaml" - "aggregation-service.yaml" @@ -19,7 +19,7 @@ loadTypes: container: "workload-generator" variableName: "NUM_SENSORS" kafkaConfig: - bootstrapServer: "localhost:31290" + bootstrapServer: "theodolite-cp-kafka:9092" topics: - name: "input" numPartitions: 40 diff --git a/theodolite-quarkus/src/test/kotlin/theodolite/CompositeStrategyTest.kt b/theodolite-quarkus/src/test/kotlin/theodolite/CompositeStrategyTest.kt index f269768cbcebfb778ee41f367ee5cc9b6fee1f19..67c9857220a0d419183644ffaf8c6a6e16a6ce9b 100644 --- a/theodolite-quarkus/src/test/kotlin/theodolite/CompositeStrategyTest.kt +++ b/theodolite-quarkus/src/test/kotlin/theodolite/CompositeStrategyTest.kt @@ -26,8 +26,8 @@ class CompositeStrategyTest { arrayOf(false, false, false, false, false, false, true), arrayOf(false, false, false, false, false, false, false) ) - val mockLoads: List<LoadDimension> = (0..6).map { number -> LoadDimension(number, "NumSensors") } - val mockResources: List<Resource> = (0..6).map { number -> Resource(number, "Instances") } + val mockLoads: List<LoadDimension> = (0..6).map { number -> LoadDimension(number, emptyList()) } + val mockResources: List<Resource> = (0..6).map { number -> Resource(number, emptyList()) } val results = Results() val benchmark = TestBenchmark() val sloChecker: BenchmarkExecution.Slo = BenchmarkExecution.Slo() @@ -38,7 +38,7 @@ class CompositeStrategyTest { CompositeStrategy(benchmarkExecutor, linearSearch, setOf(lowerBoundRestriction)) val actual: ArrayList<Resource?> = ArrayList() - val expected: ArrayList<Resource?> = ArrayList(listOf(0, 2, 2, 3, 4, 6).map { x -> Resource(x, "Instances") }) + val expected: ArrayList<Resource?> = ArrayList(listOf(0, 2, 2, 3, 4, 6).map { x -> Resource(x, emptyList()) }) expected.add(null) for (load in mockLoads) { @@ -59,8 +59,8 @@ class CompositeStrategyTest { arrayOf(false, false, false, false, false, false, true), arrayOf(false, false, false, false, false, false, false) ) - val mockLoads: List<LoadDimension> = (0..6).map { number -> LoadDimension(number, "NumSensors") } - val mockResources: List<Resource> = (0..6).map { number -> Resource(number, "Instances") } + val mockLoads: List<LoadDimension> = (0..6).map { number -> LoadDimension(number, emptyList()) } + val mockResources: List<Resource> = (0..6).map { number -> Resource(number, emptyList()) } val results = Results() val benchmark = TestBenchmark() val sloChecker: BenchmarkExecution.Slo = BenchmarkExecution.Slo() @@ -72,7 +72,7 @@ class CompositeStrategyTest { CompositeStrategy(benchmarkExecutorImpl, binarySearch, setOf(lowerBoundRestriction)) val actual: ArrayList<Resource?> = ArrayList() - val expected: ArrayList<Resource?> = ArrayList(listOf(0, 2, 2, 3, 4, 6).map { x -> Resource(x, "Instances") }) + val expected: ArrayList<Resource?> = ArrayList(listOf(0, 2, 2, 3, 4, 6).map { x -> Resource(x, emptyList()) }) expected.add(null) for (load in mockLoads) { @@ -93,8 +93,8 @@ class CompositeStrategyTest { arrayOf(false, false, false, false, false, false, true, true), arrayOf(false, false, false, false, false, false, false, true) ) - val mockLoads: List<LoadDimension> = (0..6).map { number -> LoadDimension(number, "NumSensors") } - val mockResources: List<Resource> = (0..7).map { number -> Resource(number, "Instances") } + val mockLoads: List<LoadDimension> = (0..6).map { number -> LoadDimension(number, emptyList()) } + val mockResources: List<Resource> = (0..7).map { number -> Resource(number, emptyList()) } val results = Results() val benchmark = TestBenchmark() val sloChecker: BenchmarkExecution.Slo = BenchmarkExecution.Slo() @@ -106,7 +106,7 @@ class CompositeStrategyTest { val actual: ArrayList<Resource?> = ArrayList() val expected: ArrayList<Resource?> = - ArrayList(listOf(0, 2, 2, 3, 4, 6, 7).map { x -> Resource(x, "Instances") }) + ArrayList(listOf(0, 2, 2, 3, 4, 6, 7).map { x -> Resource(x, emptyList()) }) for (load in mockLoads) { actual.add(strategy.findSuitableResource(load, mockResources)) diff --git a/theodolite-quarkus/src/test/kotlin/theodolite/ResourceLimitPatcherTest.kt b/theodolite-quarkus/src/test/kotlin/theodolite/ResourceLimitPatcherTest.kt index 2170a6a54cf12433b18cc621d78a8608f3f71d63..82e4bc5d77f3f35d217c56a377513c0e7d329170 100644 --- a/theodolite-quarkus/src/test/kotlin/theodolite/ResourceLimitPatcherTest.kt +++ b/theodolite-quarkus/src/test/kotlin/theodolite/ResourceLimitPatcherTest.kt @@ -6,7 +6,7 @@ import io.quarkus.test.junit.QuarkusTest import io.smallrye.common.constraint.Assert.assertTrue import org.junit.jupiter.api.Test import theodolite.k8s.K8sResourceLoader -import theodolite.patcher.PatcherManager +import theodolite.patcher.PatcherFactory import theodolite.util.PatcherDefinition /** @@ -23,7 +23,7 @@ import theodolite.util.PatcherDefinition class ResourceLimitPatcherTest { val testPath = "./src/main/resources/testYaml/" val loader = K8sResourceLoader(DefaultKubernetesClient().inNamespace("")) - val manager = PatcherManager() + val patcherFactory = PatcherFactory() fun applyTest(fileName: String) { val cpuValue = "50m" @@ -42,20 +42,17 @@ class ResourceLimitPatcherTest { defMEM.container = "uc-application" defMEM.type = "ResourceLimitPatcher" - manager.applyPatcher( - patcherDefinition = listOf(defCPU), - resources = listOf(Pair("cpu-memory-deployment.yaml", k8sResource)), - value = cpuValue - ) - manager.applyPatcher( - patcherDefinition = listOf(defMEM), - resources = listOf(Pair("cpu-memory-deployment.yaml", k8sResource)), - value = memValue - ) + patcherFactory.createPatcher( + patcherDefinition = defCPU, + k8sResources = listOf(Pair("cpu-memory-deployment.yaml", k8sResource)) + ).patch(value = cpuValue) + patcherFactory.createPatcher( + patcherDefinition = defMEM, + k8sResources = listOf(Pair("cpu-memory-deployment.yaml", k8sResource)) + ).patch(value = memValue) k8sResource.spec.template.spec.containers.filter { it.name == defCPU.container } .forEach { - println(it) assertTrue(it.resources.limits["cpu"].toString() == cpuValue) assertTrue(it.resources.limits["memory"].toString() == memValue) } diff --git a/theodolite-quarkus/src/test/kotlin/theodolite/ResourceRequestPatcherTest.kt b/theodolite-quarkus/src/test/kotlin/theodolite/ResourceRequestPatcherTest.kt index 108142843949913eb6db34bb268eab2e91fda3cf..3cd6b012f09c5471b1b011b5cd03e61a0fab1c4e 100644 --- a/theodolite-quarkus/src/test/kotlin/theodolite/ResourceRequestPatcherTest.kt +++ b/theodolite-quarkus/src/test/kotlin/theodolite/ResourceRequestPatcherTest.kt @@ -6,7 +6,7 @@ import io.quarkus.test.junit.QuarkusTest import io.smallrye.common.constraint.Assert.assertTrue import org.junit.jupiter.api.Test import theodolite.k8s.K8sResourceLoader -import theodolite.patcher.PatcherManager +import theodolite.patcher.PatcherFactory import theodolite.util.PatcherDefinition /** @@ -23,7 +23,7 @@ import theodolite.util.PatcherDefinition class ResourceRequestPatcherTest { val testPath = "./src/main/resources/testYaml/" val loader = K8sResourceLoader(DefaultKubernetesClient().inNamespace("")) - val manager = PatcherManager() + val patcherFactory = PatcherFactory() fun applyTest(fileName: String) { val cpuValue = "50m" @@ -42,20 +42,17 @@ class ResourceRequestPatcherTest { defMEM.container = "uc-application" defMEM.type = "ResourceRequestPatcher" - manager.applyPatcher( - patcherDefinition = listOf(defCPU), - resources = listOf(Pair("cpu-memory-deployment.yaml", k8sResource)), - value = cpuValue - ) - manager.applyPatcher( - patcherDefinition = listOf(defMEM), - resources = listOf(Pair("cpu-memory-deployment.yaml", k8sResource)), - value = memValue - ) + patcherFactory.createPatcher( + patcherDefinition = defCPU, + k8sResources = listOf(Pair("cpu-memory-deployment.yaml", k8sResource)) + ).patch(value = cpuValue) + patcherFactory.createPatcher( + patcherDefinition = defMEM, + k8sResources = listOf(Pair("cpu-memory-deployment.yaml", k8sResource)) + ).patch(value = memValue) k8sResource.spec.template.spec.containers.filter { it.name == defCPU.container } .forEach { - println(it) assertTrue(it.resources.requests["cpu"].toString() == cpuValue) assertTrue(it.resources.requests["memory"].toString() == memValue) }