Skip to content
Snippets Groups Projects
Commit 48fdb3a5 authored by Sören Henning's avatar Sören Henning
Browse files

Merge branch 'upgrade-beam' into upgrade-flink

parents abff412c 9776f823
No related branches found
No related tags found
1 merge request!226Upgrade Flink benchmarks to Flink 1.13
Showing
with 359 additions and 29 deletions
...@@ -10,6 +10,7 @@ stages: ...@@ -10,6 +10,7 @@ stages:
- test - test
- check - check
- deploy - deploy
- smoketest
default: default:
tags: tags:
...@@ -20,9 +21,9 @@ default: ...@@ -20,9 +21,9 @@ default:
- exec-dind - exec-dind
# see https://docs.gitlab.com/ee/ci/docker/using_docker_build.html#tls-enabled # see https://docs.gitlab.com/ee/ci/docker/using_docker_build.html#tls-enabled
# for image usage and settings for building with TLS and docker in docker # for image usage and settings for building with TLS and docker in docker
image: docker:19.03.1 image: docker:20.10.12
services: services:
- docker:19.03.1-dind - docker:20.10.12-dind
variables: variables:
DOCKER_TLS_CERTDIR: "/certs" DOCKER_TLS_CERTDIR: "/certs"
...@@ -45,6 +46,10 @@ default: ...@@ -45,6 +46,10 @@ default:
fi fi
- "[ $DOCKERFILE ] && KANIKO_DOCKERFILE=\"--dockerfile $DOCKERFILE\"" - "[ $DOCKERFILE ] && KANIKO_DOCKERFILE=\"--dockerfile $DOCKERFILE\""
- /kaniko/executor --context `pwd`/$CONTEXT $KANIKO_DOCKERFILE $KANIKO_D - /kaniko/executor --context `pwd`/$CONTEXT $KANIKO_DOCKERFILE $KANIKO_D
- echo "PUBLISHED_IMAGE_TAG=${CI_COMMIT_TAG-$CI_COMMIT_SHORT_SHA}" >> build.env
artifacts:
reports:
dotenv: build.env
# Theodolite Docs # Theodolite Docs
...@@ -373,6 +378,166 @@ deploy-uc4-load-generator: ...@@ -373,6 +378,166 @@ deploy-uc4-load-generator:
JAVA_PROJECT_NAME: "uc4-load-generator" JAVA_PROJECT_NAME: "uc4-load-generator"
JAVA_PROJECT_DEPS: "load-generator-commons" JAVA_PROJECT_DEPS: "load-generator-commons"
.smoketest-benchmarks:
stage: smoketest
extends:
- .dind
image: ghcr.io/cau-se/theodolite-build-docker-compose-jq:20.10.12
before_script:
- cd theodolite-benchmarks/docker-test
# variables:
# TEST_LOG_FILE: "test.log"
script:
- export THEODOLITE_TAG=$PUBLISHED_IMAGE_TAG
- ./smoketest-runner.sh ./$DOCKER_COMPOSE_DIR
# - cat test.log
after_script:
- cd ./$DOCKER_COMPOSE_DIR
- docker-compose down
rules:
- changes:
- theodolite-benchmarks/*
- theodolite-benchmarks/{$JAVA_PROJECT_DEPS}/**/*
if: "$CR_HOST && $CR_ORG && $CR_USER && $CR_PW && $DOCKER_COMPOSE_DIR && $JAVA_PROJECT_DEPS"
- if: "$CR_HOST && $CR_ORG && $CR_USER && $CR_PW && $DOCKER_COMPOSE_DIR && $JAVA_PROJECT_DEPS"
when: manual
allow_failure: true
smoketest-uc1-kstreams:
extends: .smoketest-benchmarks
needs:
- deploy-uc1-kstreams
- deploy-uc1-load-generator
variables:
DOCKER_COMPOSE_DIR: "uc1-kstreams"
JAVA_PROJECT_DEPS: "uc1-kstreams,kstreams-commons,uc1-load-generator,load-generator-commons"
smoketest-uc1-flink:
extends: .smoketest-benchmarks
needs:
- deploy-uc1-flink
- deploy-uc1-load-generator
variables:
DOCKER_COMPOSE_DIR: "uc1-flink"
JAVA_PROJECT_DEPS: "uc1-flink,flink-commons,uc1-load-generator,load-generator-commons"
smoketest-uc1-beam-flink:
extends: .smoketest-benchmarks
needs:
- deploy-uc1-beam-flink
- deploy-uc1-load-generator
variables:
DOCKER_COMPOSE_DIR: "uc1-beam-flink"
JAVA_PROJECT_DEPS: "uc1-beam-flink,uc1-beam,beam-commons,uc1-load-generator,load-generator-commons"
smoketest-uc1-beam-samza:
extends: .smoketest-benchmarks
needs:
- deploy-uc1-beam-samza
- deploy-uc1-load-generator
variables:
DOCKER_COMPOSE_DIR: "uc1-beam-samza"
JAVA_PROJECT_DEPS: "uc1-beam-samza,uc1-beam,beam-commons,uc1-load-generator,load-generator-commons"
smoketest-uc2-kstreams:
extends: .smoketest-benchmarks
needs:
- deploy-uc2-kstreams
- deploy-uc2-load-generator
variables:
DOCKER_COMPOSE_DIR: "uc2-kstreams"
JAVA_PROJECT_DEPS: "uc2-kstreams,kstreams-commons,uc2-load-generator,load-generator-commons"
smoketest-uc2-flink:
extends: .smoketest-benchmarks
needs:
- deploy-uc2-flink
- deploy-uc2-load-generator
variables:
DOCKER_COMPOSE_DIR: "uc2-flink"
JAVA_PROJECT_DEPS: "uc2-flink,flink-commons,uc2-load-generator,load-generator-commons"
smoketest-uc2-beam-flink:
extends: .smoketest-benchmarks
needs:
- deploy-uc2-beam-flink
- deploy-uc2-load-generator
variables:
DOCKER_COMPOSE_DIR: "uc2-beam-flink"
JAVA_PROJECT_DEPS: "uc2-beam-flink,uc2-beam,beam-commons,uc2-load-generator,load-generator-commons"
smoketest-uc2-beam-samza:
extends: .smoketest-benchmarks
needs:
- deploy-uc2-beam-samza
- deploy-uc2-load-generator
variables:
DOCKER_COMPOSE_DIR: "uc2-beam-samza"
JAVA_PROJECT_DEPS: "uc2-beam-samza,uc2-beam,beam-commons,uc2-load-generator,load-generator-commons"
smoketest-uc3-kstreams:
extends: .smoketest-benchmarks
needs:
- deploy-uc3-kstreams
- deploy-uc3-load-generator
variables:
DOCKER_COMPOSE_DIR: "uc3-kstreams"
JAVA_PROJECT_DEPS: "uc3-kstreams,kstreams-commons,uc3-load-generator,load-generator-commons"
smoketest-uc3-beam-flink:
extends: .smoketest-benchmarks
needs:
- deploy-uc3-beam-flink
- deploy-uc3-load-generator
variables:
DOCKER_COMPOSE_DIR: "uc3-beam-flink"
JAVA_PROJECT_DEPS: "uc3-beam-flink,uc3-beam,beam-commons,uc3-load-generator,load-generator-commons"
smoketest-uc3-beam-samza:
extends: .smoketest-benchmarks
needs:
- deploy-uc3-beam-samza
- deploy-uc3-load-generator
variables:
DOCKER_COMPOSE_DIR: "uc3-beam-samza"
JAVA_PROJECT_DEPS: "uc3-beam-samza,uc3-beam,beam-commons,uc3-load-generator,load-generator-commons"
smoketest-uc4-kstreams:
extends: .smoketest-benchmarks
needs:
- deploy-uc4-kstreams
- deploy-uc4-load-generator
variables:
DOCKER_COMPOSE_DIR: "uc4-kstreams"
JAVA_PROJECT_DEPS: "uc4-kstreams,kstreams-commons,uc4-load-generator,load-generator-commons"
smoketest-uc4-flink:
extends: .smoketest-benchmarks
needs:
- deploy-uc4-flink
- deploy-uc4-load-generator
variables:
DOCKER_COMPOSE_DIR: "uc4-flink"
JAVA_PROJECT_DEPS: "uc4-flink,flink-commons,uc4-load-generator,load-generator-commons"
smoketest-uc4-beam-flink:
extends: .smoketest-benchmarks
needs:
- deploy-uc4-beam-flink
- deploy-uc4-load-generator
variables:
DOCKER_COMPOSE_DIR: "uc4-beam-flink"
JAVA_PROJECT_DEPS: "uc4-beam-flink,uc4-beam,beam-commons,uc4-load-generator,load-generator-commons"
smoketest-uc4-beam-samza:
extends: .smoketest-benchmarks
needs:
- deploy-uc4-beam-samza
- deploy-uc4-load-generator
variables:
DOCKER_COMPOSE_DIR: "uc4-beam-samza"
JAVA_PROJECT_DEPS: "uc4-beam-samza,uc4-beam,beam-commons,uc4-load-generator,load-generator-commons"
# Theodolite Framework # Theodolite Framework
......
...@@ -17,7 +17,7 @@ For each benchmark, we provide a [load generator as OCI container image](https:/ ...@@ -17,7 +17,7 @@ For each benchmark, we provide a [load generator as OCI container image](https:/
You can simply run a load generator container, for example, for benchmark UC1 with: You can simply run a load generator container, for example, for benchmark UC1 with:
```sh ```sh
docker run ghcr.io/cau-se/theodolite-uc1-workload-generator docker run -it ghcr.io/cau-se/theodolite-uc1-workload-generator
``` ```
### Message format ### Message format
......
...@@ -36,3 +36,13 @@ the host, for example, from the IDE or Gradle. In such cases, the following adju ...@@ -36,3 +36,13 @@ the host, for example, from the IDE or Gradle. In such cases, the following adju
You can now connect to Kafka from your host system with bootstrap server `localhost:19092` and contact the Schema You can now connect to Kafka from your host system with bootstrap server `localhost:19092` and contact the Schema
Registry via `localhost:8081`. **Pay attention to the Kafka port, which is *19092* instead of the default one *9092*.** Registry via `localhost:8081`. **Pay attention to the Kafka port, which is *19092* instead of the default one *9092*.**
## Running Smoke Tests
The `smoketest-runner.sh` script can be used to run a simple test for a specific Docker Compose file. You can call it with
```sh
./smoketest-runner.sh <docker-compose-dir>
```
where `<docker-compose-dir>` is the directory of a Docker-Compose file, for example, `uc2-beam-samza`. The script exists with a zero exit code in case of success and a non-zero exit code otherwise.
#!/bin/sh
find . -name 'test.sh' -type f -exec dirname {} \; |
sort |
xargs -I %s sh -c "./smoketest-runner.sh %s 1>&2; echo $?" |
sort |
awk 'BEGIN {count[0]=0; count[1]=0} {count[$1!=0]++} END {print count[0] " tests successful, " count[1] " test failed."; exit count[1]}'
#!/bin/sh
COMPOSE_FILE_PATH=$1
echo "Run test for '$COMPOSE_FILE_PATH'."
cd $COMPOSE_FILE_PATH
docker-compose pull -q
docker-compose up -d kafka zookeeper schema-registry
sleep 30s
docker-compose up -d
sleep 5s
docker-compose ps
if test -f "./test.sh"; then
#timeout --foreground 3m ./test.sh
./test.sh
RETURN=$?
else
RETURN=$?
echo "test.sh does not exists for '$COMPOSE_FILE_PATH'."
fi
if [ $RETURN -eq 0 ]; then
echo "Test for '$COMPOSE_FILE_PATH' has passed."
else
echo "Test for '$COMPOSE_FILE_PATH' has failed."
fi
docker-compose down
exit $RETURN
...@@ -20,18 +20,23 @@ services: ...@@ -20,18 +20,23 @@ services:
KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000 KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false" KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
KAFKA_CREATE_TOPICS: "input:3:1" KAFKA_CREATE_TOPICS: "input:3:1"
kcat:
image: edenhill/kcat:1.7.1
entrypoint: "sh"
tty: true
schema-registry: schema-registry:
image: confluentinc/cp-schema-registry:5.3.1 image: confluentinc/cp-schema-registry:7.0.1
depends_on: depends_on:
- zookeeper - zookeeper
- kafka - kafka
restart: "on-failure"
expose: expose:
- "8081" - "8081"
#ports: #ports:
# - 8081:8081 # - 8081:8081
environment: environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181' SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
load-generator: load-generator:
image: ghcr.io/cau-se/theodolite-uc1-workload-generator:${THEODOLITE_TAG:-latest} image: ghcr.io/cau-se/theodolite-uc1-workload-generator:${THEODOLITE_TAG:-latest}
depends_on: depends_on:
......
#!/bin/sh
sleep 55s # to let the benchmark and produce some output
docker-compose logs --tail 100 benchmark-taskmanager |
sed -n "s/^.*Key:\s\(\S*\), Value:\s\(\S*\).*$/\2/p" |
tee /dev/stderr |
jq .identifier |
sort |
uniq |
wc -l |
grep "\b10\b"
...@@ -22,18 +22,23 @@ services: ...@@ -22,18 +22,23 @@ services:
KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000 KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false" KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
KAFKA_CREATE_TOPICS: "input:3:1" KAFKA_CREATE_TOPICS: "input:3:1"
kcat:
image: edenhill/kcat:1.7.1
entrypoint: "sh"
tty: true
schema-registry: schema-registry:
image: confluentinc/cp-schema-registry:5.3.1 image: confluentinc/cp-schema-registry:7.0.1
depends_on: depends_on:
- zookeeper - zookeeper
- kafka - kafka
restart: "on-failure"
expose: expose:
- "8081" - "8081"
#ports: #ports:
# - 8081:8081 # - 8081:8081
environment: environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181' SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
benchmark: benchmark:
image: ghcr.io/cau-se/theodolite-uc1-beam-samza:${THEODOLITE_TAG:-latest} image: ghcr.io/cau-se/theodolite-uc1-beam-samza:${THEODOLITE_TAG:-latest}
scale: 1 scale: 1
......
#!/bin/sh
sleep 55s # to let the benchmark and produce some output
docker-compose logs --tail 100 benchmark |
sed -n "s/^.*Key:\s\(\S*\), Value:\s\(\S*\).*$/\2/p" |
tee /dev/stderr |
jq .identifier |
sort |
uniq |
wc -l |
grep "\b10\b"
version: '2' version: '2.2'
services: services:
zookeeper: zookeeper:
image: confluentinc/cp-zookeeper image: confluentinc/cp-zookeeper
...@@ -20,18 +20,23 @@ services: ...@@ -20,18 +20,23 @@ services:
KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000 KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false" KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
KAFKA_CREATE_TOPICS: "input:3:1" KAFKA_CREATE_TOPICS: "input:3:1"
kcat:
image: edenhill/kcat:1.7.1
entrypoint: "sh"
tty: true
schema-registry: schema-registry:
image: confluentinc/cp-schema-registry:5.3.1 image: confluentinc/cp-schema-registry:7.0.1
depends_on: depends_on:
- zookeeper - zookeeper
- kafka - kafka
restart: "on-failure"
expose: expose:
- "8081" - "8081"
#ports: #ports:
# - 8081:8081 # - 8081:8081
environment: environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181' SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
load-generator: load-generator:
image: ghcr.io/cau-se/theodolite-uc1-workload-generator:${THEODOLITE_TAG:-latest} image: ghcr.io/cau-se/theodolite-uc1-workload-generator:${THEODOLITE_TAG:-latest}
depends_on: depends_on:
......
#!/bin/sh
sleep 55s # to let the benchmark and produce some output
docker-compose logs --tail 100 benchmark-taskmanager |
sed -n "s/^.*Record:\s\(\S*\)$/\1/p" |
tee /dev/stderr |
jq .identifier |
sort |
uniq |
wc -l |
grep "\b10\b"
version: '2' version: '2.2'
services: services:
zookeeper: zookeeper:
image: confluentinc/cp-zookeeper image: confluentinc/cp-zookeeper
...@@ -20,20 +20,25 @@ services: ...@@ -20,20 +20,25 @@ services:
KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000 KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false" KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
KAFKA_CREATE_TOPICS: "input:3:1" KAFKA_CREATE_TOPICS: "input:3:1"
kcat:
image: edenhill/kcat:1.7.1
entrypoint: "sh"
tty: true
schema-registry: schema-registry:
image: confluentinc/cp-schema-registry:5.3.1 image: confluentinc/cp-schema-registry:7.0.1
depends_on: depends_on:
- zookeeper - zookeeper
- kafka - kafka
restart: "on-failure"
expose: expose:
- "8081" - "8081"
#ports: #ports:
# - 8081:8081 # - 8081:8081
environment: environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181' SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
benchmark: benchmark:
image: ghcr.io/cau-se/theodolite-uc1-kstreams-app:${THEODOLITE_TAG:-latest} image: ghcr.io/cau-se/theodolite-uc1-kstreams-app:latest
depends_on: depends_on:
- schema-registry - schema-registry
- kafka - kafka
......
#!/bin/sh
sleep 55s # to let the benchmark and produce some output
docker-compose logs --tail 100 benchmark |
sed -n "s/^.*Record:\s\(\S*\)$/\1/p" |
tee /dev/stderr |
jq .identifier |
sort |
uniq |
wc -l |
grep "\b10\b"
...@@ -20,18 +20,23 @@ services: ...@@ -20,18 +20,23 @@ services:
KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000 KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false" KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
KAFKA_CREATE_TOPICS: "input:3:1,output:3:1" KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
kcat:
image: edenhill/kcat:1.7.1
entrypoint: "sh"
tty: true
schema-registry: schema-registry:
image: confluentinc/cp-schema-registry:5.3.1 image: confluentinc/cp-schema-registry:7.0.1
depends_on: depends_on:
- zookeeper - zookeeper
- kafka - kafka
restart: "on-failure"
expose: expose:
- "8081" - "8081"
#ports: #ports:
# - 8081:8081 # - 8081:8081
environment: environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181' SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
load-generator: load-generator:
image: ghcr.io/cau-se/theodolite-uc2-workload-generator:${THEODOLITE_TAG:-latest} image: ghcr.io/cau-se/theodolite-uc2-workload-generator:${THEODOLITE_TAG:-latest}
depends_on: depends_on:
......
#!/bin/sh
until docker-compose exec -T kcat kcat -L -b kafka:9092 -t output -J | jq -r '.topics[0].partitions | length' | grep "\b3\b"; do sleep 5s; done
docker-compose exec -T kcat kcat -C -b kafka:9092 -t output -s key=s -s value=s -r http://schema-registry:8081 -f '%k:%s\n' -c 20 |
tee /dev/stderr |
awk -F ':' '!/^%/ {print $1}' |
sort |
uniq |
wc -l |
grep "\b10\b"
...@@ -22,18 +22,23 @@ services: ...@@ -22,18 +22,23 @@ services:
KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000 KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false" KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
KAFKA_CREATE_TOPICS: "input:3:1,output:3:1" KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
kcat:
image: edenhill/kcat:1.7.1
entrypoint: "sh"
tty: true
schema-registry: schema-registry:
image: confluentinc/cp-schema-registry:5.3.1 image: confluentinc/cp-schema-registry:7.0.1
depends_on: depends_on:
- zookeeper - zookeeper
- kafka - kafka
restart: "on-failure"
expose: expose:
- "8081" - "8081"
#ports: #ports:
# - 8081:8081 # - 8081:8081
environment: environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181' SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
benchmark: benchmark:
image: ghcr.io/cau-se/theodolite-uc2-beam-samza:${THEODOLITE_TAG:-latest} image: ghcr.io/cau-se/theodolite-uc2-beam-samza:${THEODOLITE_TAG:-latest}
scale: 1 scale: 1
......
#!/bin/sh
until docker-compose exec -T kcat kcat -L -b kafka:9092 -t output -J | jq -r '.topics[0].partitions | length' | grep "\b3\b"; do sleep 5s; done
docker-compose exec -T kcat kcat -C -b kafka:9092 -t output -s key=s -s value=s -r http://schema-registry:8081 -f '%k:%s\n' -c 20 |
tee /dev/stderr |
awk -F ':' '!/^%/ {print $1}' |
sort |
uniq |
wc -l |
grep "\b10\b"
version: '2' version: '2.2'
services: services:
zookeeper: zookeeper:
image: confluentinc/cp-zookeeper image: confluentinc/cp-zookeeper
...@@ -20,18 +20,23 @@ services: ...@@ -20,18 +20,23 @@ services:
KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000 KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false" KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
KAFKA_CREATE_TOPICS: "input:3:1,output:3:1" KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
kcat:
image: edenhill/kcat:1.7.1
entrypoint: "sh"
tty: true
schema-registry: schema-registry:
image: confluentinc/cp-schema-registry:5.3.1 image: confluentinc/cp-schema-registry:7.0.1
depends_on: depends_on:
- zookeeper - zookeeper
- kafka - kafka
#ports: restart: "on-failure"
# - "8081:8081"
expose: expose:
- "8081" - "8081"
#ports:
# - 8081:8081
environment: environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181' SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
load-generator: load-generator:
image: ghcr.io/cau-se/theodolite-uc2-workload-generator:${THEODOLITE_TAG:-latest} image: ghcr.io/cau-se/theodolite-uc2-workload-generator:${THEODOLITE_TAG:-latest}
depends_on: depends_on:
......
#!/bin/sh
until docker-compose exec -T kcat kcat -L -b kafka:9092 -t output -J | jq -r '.topics[0].partitions | length' | grep "\b3\b"; do sleep 5s; done
docker-compose exec -T kcat kcat -C -b kafka:9092 -t output -s key=s -s value=s -r http://schema-registry:8081 -f '%k:%s\n' -c 20 |
tee /dev/stderr |
awk -F ':' '!/^%/ {print $1}' |
sort |
uniq |
wc -l |
grep "\b10\b"
version: '2' version: '2.2'
services: services:
zookeeper: zookeeper:
image: confluentinc/cp-zookeeper image: confluentinc/cp-zookeeper
...@@ -20,18 +20,23 @@ services: ...@@ -20,18 +20,23 @@ services:
KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000 KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false" KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
KAFKA_CREATE_TOPICS: "input:3:1,output:3:1" KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
kcat:
image: edenhill/kcat:1.7.1
entrypoint: "sh"
tty: true
schema-registry: schema-registry:
image: confluentinc/cp-schema-registry:5.3.1 image: confluentinc/cp-schema-registry:7.0.1
depends_on: depends_on:
- zookeeper - zookeeper
- kafka - kafka
#ports: restart: "on-failure"
# - "8081:8081"
expose: expose:
- "8081" - "8081"
#ports:
# - 8081:8081
environment: environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181' SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
benchmark: benchmark:
image: ghcr.io/cau-se/theodolite-uc2-kstreams-app:${THEODOLITE_TAG:-latest} image: ghcr.io/cau-se/theodolite-uc2-kstreams-app:${THEODOLITE_TAG:-latest}
depends_on: depends_on:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please to comment