Skip to content
Snippets Groups Projects
Commit e944be2e authored by Sören Henning's avatar Sören Henning
Browse files

Merge branch 'master' into pubsub-load-generator

parents bd9af6e8 47ec6dbc
Branches
Tags
1 merge request!225Add option to generate load via Google PubSub
Showing
with 275 additions and 159 deletions
FROM tiangolo/uvicorn-gunicorn-fastapi:python3.7
FROM python:3.8
COPY requirements.txt requirements.txt
RUN pip install -r requirements.txt
WORKDIR /code
COPY ./app /app
\ No newline at end of file
COPY ./requirements.txt /code/requirements.txt
RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
COPY ./app /code/app
WORKDIR /code/app
ENV HOST 0.0.0.0
ENV PORT 80
CMD ["sh", "-c", "uvicorn main:app --host $HOST --port $PORT"]
fastapi==0.65.2
scikit-learn==0.20.3
pandas==1.0.3
uvicorn
requests
fastapi>=0.68.0,<0.69.0
uvicorn>=0.15.0,<0.16.0
#pydantic>=1.8.0,<2.0.0
scikit-learn==0.22.2
pandas==1.0.3
cleanup.add_default_serial_version_id=true
cleanup.add_generated_serial_version_id=false
cleanup.add_missing_annotations=true
cleanup.add_missing_deprecated_annotations=true
cleanup.add_missing_methods=false
cleanup.add_missing_nls_tags=false
cleanup.add_missing_override_annotations=true
cleanup.add_missing_override_annotations_interface_methods=true
cleanup.add_serial_version_id=false
cleanup.always_use_blocks=true
cleanup.always_use_parentheses_in_expressions=false
cleanup.always_use_this_for_non_static_field_access=true
cleanup.always_use_this_for_non_static_method_access=true
cleanup.convert_functional_interfaces=false
cleanup.convert_to_enhanced_for_loop=true
cleanup.correct_indentation=true
cleanup.format_source_code=true
cleanup.format_source_code_changes_only=false
cleanup.insert_inferred_type_arguments=false
cleanup.make_local_variable_final=true
cleanup.make_parameters_final=true
cleanup.make_private_fields_final=true
cleanup.make_type_abstract_if_missing_method=false
cleanup.make_variable_declarations_final=true
cleanup.never_use_blocks=false
cleanup.never_use_parentheses_in_expressions=true
cleanup.organize_imports=true
cleanup.qualify_static_field_accesses_with_declaring_class=false
cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
cleanup.qualify_static_member_accesses_with_declaring_class=true
cleanup.qualify_static_method_accesses_with_declaring_class=false
cleanup.remove_private_constructors=true
cleanup.remove_redundant_modifiers=false
cleanup.remove_redundant_semicolons=true
cleanup.remove_redundant_type_arguments=true
cleanup.remove_trailing_whitespaces=true
cleanup.remove_trailing_whitespaces_all=true
cleanup.remove_trailing_whitespaces_ignore_empty=false
cleanup.remove_unnecessary_casts=true
cleanup.remove_unnecessary_nls_tags=true
cleanup.remove_unused_imports=true
cleanup.remove_unused_local_variables=false
cleanup.remove_unused_private_fields=true
cleanup.remove_unused_private_members=false
cleanup.remove_unused_private_methods=true
cleanup.remove_unused_private_types=true
cleanup.sort_members=false
cleanup.sort_members_all=false
cleanup.use_anonymous_class_creation=false
cleanup.use_blocks=true
cleanup.use_blocks_only_for_return_and_throw=false
cleanup.use_lambda=true
cleanup.use_parentheses_in_expressions=true
cleanup.use_this_for_non_static_field_access=true
cleanup.use_this_for_non_static_field_access_only_if_necessary=false
cleanup.use_this_for_non_static_method_access=true
cleanup.use_this_for_non_static_method_access_only_if_necessary=false
cleanup_profile=_CAU-SE-Style
cleanup_settings_version=2
eclipse.preferences.version=1
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
formatter_profile=_CAU-SE-Style
formatter_settings_version=21
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.importorder=;
org.eclipse.jdt.ui.ondemandthreshold=99
org.eclipse.jdt.ui.staticondemandthreshold=99
sp_cleanup.add_default_serial_version_id=true
sp_cleanup.add_generated_serial_version_id=false
sp_cleanup.add_missing_annotations=true
sp_cleanup.add_missing_deprecated_annotations=true
sp_cleanup.add_missing_methods=false
sp_cleanup.add_missing_nls_tags=false
sp_cleanup.add_missing_override_annotations=true
sp_cleanup.add_missing_override_annotations_interface_methods=true
sp_cleanup.add_serial_version_id=false
sp_cleanup.always_use_blocks=true
sp_cleanup.always_use_parentheses_in_expressions=false
sp_cleanup.always_use_this_for_non_static_field_access=true
sp_cleanup.always_use_this_for_non_static_method_access=true
sp_cleanup.convert_functional_interfaces=false
sp_cleanup.convert_to_enhanced_for_loop=true
sp_cleanup.correct_indentation=true
sp_cleanup.format_source_code=true
sp_cleanup.format_source_code_changes_only=false
sp_cleanup.insert_inferred_type_arguments=false
sp_cleanup.make_local_variable_final=true
sp_cleanup.make_parameters_final=true
sp_cleanup.make_private_fields_final=true
sp_cleanup.make_type_abstract_if_missing_method=false
sp_cleanup.make_variable_declarations_final=true
sp_cleanup.never_use_blocks=false
sp_cleanup.never_use_parentheses_in_expressions=true
sp_cleanup.on_save_use_additional_actions=true
sp_cleanup.organize_imports=true
sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
sp_cleanup.qualify_static_member_accesses_with_declaring_class=true
sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
sp_cleanup.remove_private_constructors=true
sp_cleanup.remove_redundant_modifiers=false
sp_cleanup.remove_redundant_semicolons=true
sp_cleanup.remove_redundant_type_arguments=true
sp_cleanup.remove_trailing_whitespaces=true
sp_cleanup.remove_trailing_whitespaces_all=true
sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
sp_cleanup.remove_unnecessary_casts=true
sp_cleanup.remove_unnecessary_nls_tags=true
sp_cleanup.remove_unused_imports=true
sp_cleanup.remove_unused_local_variables=false
sp_cleanup.remove_unused_private_fields=true
sp_cleanup.remove_unused_private_members=false
sp_cleanup.remove_unused_private_methods=true
sp_cleanup.remove_unused_private_types=true
sp_cleanup.sort_members=false
sp_cleanup.sort_members_all=false
sp_cleanup.use_anonymous_class_creation=false
sp_cleanup.use_blocks=true
sp_cleanup.use_blocks_only_for_return_and_throw=false
sp_cleanup.use_lambda=true
sp_cleanup.use_parentheses_in_expressions=true
sp_cleanup.use_this_for_non_static_field_access=true
sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=false
sp_cleanup.use_this_for_non_static_method_access=true
sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=false
\ No newline at end of file
......@@ -13,21 +13,19 @@ repositories {
}
dependencies {
// These dependencies are used internally, and not exposed to consumers on their own compile classpath.
implementation('org.industrial-devops:titan-ccp-common:0.1.0-SNAPSHOT') { changing = true }
implementation('org.industrial-devops:titan-ccp-common-kafka:0.1.0-SNAPSHOT') { changing = true }
implementation 'com.google.code.gson:gson:2.8.2'
implementation 'com.google.guava:guava:24.1-jre'
implementation group: 'org.apache.beam', name: 'beam-sdks-java-core', version: '2.22.0'
implementation('org.apache.beam:beam-sdks-java-io-kafka:2.22.0'){
exclude group: 'org.apache.kafka', module: 'kafka-clients'
}
implementation ('io.confluent:kafka-streams-avro-serde:5.3.2')
implementation group: 'org.slf4j', name: 'slf4j-simple', version: '1.7.30'
implementation group: 'org.apache.beam', name: 'beam-sdks-java-core', version: '2.22.0'
runtimeOnly 'org.slf4j:slf4j-api:1.7.32'
runtimeOnly 'org.slf4j:slf4j-jdk14:1.7.32'
// Use JUnit test framework
testImplementation 'junit:junit:4.12'
}
......@@ -12,6 +12,9 @@ import org.apache.kafka.clients.consumer.ConsumerConfig;
*/
public class AbstractPipeline extends Pipeline {
private static final String KAFKA_CONFIG_SPECIFIC_AVRO_READER = "specific.avro.reader"; // NOPMD
private static final String KAFKA_CONFIG_SCHEMA_REGISTRY_URL = "schema.registry.url"; // NOPMD
protected final String inputTopic;
protected final String bootstrapServer;
// Application Configurations
......@@ -21,8 +24,8 @@ public class AbstractPipeline extends Pipeline {
super(options);
this.config = config;
inputTopic = config.getString(ConfigurationKeys.KAFKA_INPUT_TOPIC);
bootstrapServer = config.getString(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS);
this.inputTopic = config.getString(ConfigurationKeys.KAFKA_INPUT_TOPIC);
this.bootstrapServer = config.getString(ConfigurationKeys.KAFKA_BOOTSTRAP_SERVERS);
}
/**
......@@ -32,19 +35,37 @@ public class AbstractPipeline extends Pipeline {
*/
public Map<String, Object> buildConsumerConfig() {
final Map<String, Object> consumerConfig = new HashMap<>();
consumerConfig.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,
config.getString(ConfigurationKeys.ENABLE_AUTO_COMMIT_CONFIG));
consumerConfig.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,
config
.getString(ConfigurationKeys.AUTO_OFFSET_RESET_CONFIG));
consumerConfig.put("schema.registry.url",
config.getString(ConfigurationKeys.SCHEMA_REGISTRY_URL));
consumerConfig.put("specific.avro.reader",
config.getString(ConfigurationKeys.SPECIFIC_AVRO_READER));
final String applicationName = config.getString(ConfigurationKeys.APPLICATION_NAME);
consumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG, applicationName);
consumerConfig.put(
ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,
this.config.getString(ConfigurationKeys.ENABLE_AUTO_COMMIT_CONFIG));
consumerConfig.put(
ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,
this.config.getString(ConfigurationKeys.AUTO_OFFSET_RESET_CONFIG));
consumerConfig.put(
KAFKA_CONFIG_SCHEMA_REGISTRY_URL,
this.config.getString(ConfigurationKeys.SCHEMA_REGISTRY_URL));
consumerConfig.put(
KAFKA_CONFIG_SPECIFIC_AVRO_READER,
this.config.getString(ConfigurationKeys.SPECIFIC_AVRO_READER));
consumerConfig.put(
ConsumerConfig.GROUP_ID_CONFIG,
this.config.getString(ConfigurationKeys.APPLICATION_NAME));
return consumerConfig;
}
/**
* Builds a simple configuration for a Kafka producer transformation.
*
* @return the build configuration.
*/
public Map<String, Object> buildProducerConfig() {
final Map<String, Object> config = new HashMap<>();
config.put(
KAFKA_CONFIG_SCHEMA_REGISTRY_URL,
this.config.getString(ConfigurationKeys.SCHEMA_REGISTRY_URL));
config.put(
KAFKA_CONFIG_SPECIFIC_AVRO_READER,
this.config.getString(ConfigurationKeys.SPECIFIC_AVRO_READER));
return config;
}
}
package theodolite.commons.beam.kafka;
import io.confluent.kafka.streams.serdes.avro.SpecificAvroDeserializer;
import org.apache.kafka.common.serialization.Deserializer;
import titan.ccp.model.records.ActivePowerRecord;
/**
* A Kafka {@link Deserializer} for typed Schema Registry {@link ActivePowerRecord}.
*/
public class ActivePowerRecordDeserializer extends SpecificAvroDeserializer<ActivePowerRecord> {
}
package theodolite.commons.beam.kafka;
import io.confluent.kafka.serializers.KafkaAvroDeserializer;
import java.util.Map;
import org.apache.beam.sdk.coders.AvroCoder;
import org.apache.beam.sdk.io.kafka.KafkaIO;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.PBegin;
import org.apache.beam.sdk.values.PCollection;
import org.apache.kafka.common.serialization.StringDeserializer;
import titan.ccp.model.records.ActivePowerRecord;
/**
* Simple {@link PTransform} that read from Kafka using {@link KafkaIO}.
*/
public class KafkaActivePowerRecordReader extends
PTransform<PBegin, PCollection<KV<String, ActivePowerRecord>>> {
private static final long serialVersionUID = 2603286150183186115L;
private final PTransform<PBegin, PCollection<KV<String, ActivePowerRecord>>> reader;
/**
* Instantiates a {@link PTransform} that reads from Kafka with the given Configuration.
*/
public KafkaActivePowerRecordReader(final String bootstrapServer, final String inputTopic,
final Map<String, Object> consumerConfig) {
super();
if (bootstrapServer == null) {
throw new IllegalArgumentException("bootstrapServer is null");
}
if (inputTopic == null) {
throw new IllegalArgumentException("inputTopic is null");
}
// Check if boostrap server and inputTopic are defined
if (bootstrapServer.isEmpty() || inputTopic.isEmpty()) {
throw new IllegalArgumentException("bootstrapServer or inputTopic missing");
}
reader =
KafkaIO.<String, ActivePowerRecord>read()
.withBootstrapServers(bootstrapServer)
.withTopic(inputTopic)
.withKeyDeserializer(StringDeserializer.class)
.withValueDeserializerAndCoder((Class) KafkaAvroDeserializer.class,
AvroCoder.of(ActivePowerRecord.class))
.withConsumerConfigUpdates(consumerConfig)
.withoutMetadata();
}
@Override
public PCollection<KV<String, ActivePowerRecord>> expand(final PBegin input) {
return input.apply(this.reader);
}
}
package theodolite.commons.beam.kafka;
import io.confluent.kafka.serializers.KafkaAvroDeserializer;
import java.util.Map;
import org.apache.beam.sdk.coders.AvroCoder;
import org.apache.beam.sdk.io.kafka.KafkaIO;
......@@ -12,39 +11,36 @@ import org.apache.kafka.common.serialization.StringDeserializer;
import titan.ccp.model.records.ActivePowerRecord;
/**
* Simple {@link PTransform} that read from Kafka using {@link KafkaIO}.
* Has additional a TimestampPolicy.
* Simple {@link PTransform} that reads from Kafka using {@link KafkaIO} with event time.
*/
public class KafkaActivePowerTimestampReader extends
PTransform<PBegin, PCollection<KV<String, ActivePowerRecord>>> {
public class KafkaActivePowerTimestampReader
extends PTransform<PBegin, PCollection<KV<String, ActivePowerRecord>>> {
private static final long serialVersionUID = 2603286150183186115L;
private final PTransform<PBegin, PCollection<KV<String, ActivePowerRecord>>> reader;
/**
* Instantiates a {@link PTransform} that reads from Kafka with the given Configuration.
*/
public KafkaActivePowerTimestampReader(final String bootstrapServer, final String inputTopic,
public KafkaActivePowerTimestampReader(
final String bootstrapServer,
final String inputTopic,
final Map<String, Object> consumerConfig) {
super();
// Check if boostrap server and inputTopic are defined
// Check if bootstrap server and inputTopic are defined
if (bootstrapServer.isEmpty() || inputTopic.isEmpty()) {
throw new IllegalArgumentException("bootstrapServer or inputTopic missing");
}
reader =
KafkaIO.<String, ActivePowerRecord>read()
.withBootstrapServers(bootstrapServer)
.withTopic(inputTopic)
.withKeyDeserializer(StringDeserializer.class)
.withValueDeserializerAndCoder((Class) KafkaAvroDeserializer.class,
this.reader = KafkaIO.<String, ActivePowerRecord>read().withBootstrapServers(bootstrapServer)
.withTopic(inputTopic).withKeyDeserializer(StringDeserializer.class)
.withValueDeserializerAndCoder(
ActivePowerRecordDeserializer.class,
AvroCoder.of(ActivePowerRecord.class))
.withConsumerConfigUpdates(consumerConfig)
// Set TimeStampPolicy for event time
.withTimestampPolicyFactory(
(tp, previousWaterMark) -> new EventTimePolicy(previousWaterMark))
(tp, previousWatermark) -> new EventTimePolicy(previousWatermark))
.withoutMetadata();
}
......
package theodolite.commons.beam.kafka;
import java.util.Map;
import org.apache.beam.sdk.io.kafka.KafkaIO;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.values.KV;
......@@ -9,23 +10,35 @@ import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.common.serialization.StringSerializer;
/**
* Wrapper for a Kafka writing Transformation
* where the value type can be generic.
* Wrapper for a Kafka writing Transformation where the value type can be generic.
*
* @param <T> type of the value.
*/
public class KafkaWriterTransformation<T> extends
PTransform<PCollection<KV<String, T>>, PDone> {
public class KafkaWriterTransformation<T> extends PTransform<PCollection<KV<String, T>>, PDone> {
private static final long serialVersionUID = 3171423303843174723L;
private final PTransform<PCollection<KV<String, T>>, PDone> writer;
/**
* Creates a new kafka writer transformation.
* Creates a new Kafka writer transformation.
*/
public KafkaWriterTransformation(final String bootstrapServer, final String outputTopic,
public KafkaWriterTransformation(
final String bootstrapServer,
final String outputTopic,
final Class<? extends Serializer<T>> valueSerializer) {
this(bootstrapServer, outputTopic, valueSerializer, Map.of());
}
/**
* Creates a new Kafka writer transformation.
*/
public KafkaWriterTransformation(
final String bootstrapServer,
final String outputTopic,
final Class<? extends Serializer<T>> valueSerializer,
final Map<String, Object> producerConfig) {
super();
// Check if boostrap server and outputTopic are defined
// Check if bootstrap server and outputTopic are defined
if (bootstrapServer.isEmpty() || outputTopic.isEmpty()) {
throw new IllegalArgumentException("bootstrapServer or outputTopic missing");
}
......@@ -34,7 +47,8 @@ public class KafkaWriterTransformation<T> extends
.withBootstrapServers(bootstrapServer)
.withTopic(outputTopic)
.withKeySerializer(StringSerializer.class)
.withValueSerializer(valueSerializer);
.withValueSerializer(valueSerializer)
.withProducerConfigUpdates(producerConfig);
}
......
......@@ -19,7 +19,7 @@ services:
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
KAFKA_CREATE_TOPICS: "input:3:1"
schema-registry:
image: confluentinc/cp-schema-registry:5.3.1
depends_on:
......@@ -33,7 +33,7 @@ services:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
load-generator:
image: ghcr.io/cau-se/theodolite-uc1-workload-generator:latest
image: ghcr.io/cau-se/theodolite-uc1-workload-generator:${THEODOLITE_TAG:-latest}
depends_on:
- schema-registry
- kafka
......@@ -44,7 +44,7 @@ services:
SCHEMA_REGISTRY_URL: http://schema-registry:8081
NUM_SENSORS: 10
benchmark-jobmanager:
image: ghcr.io/cau-se/theodolite-uc1-beam-flink:latest
image: ghcr.io/cau-se/theodolite-uc1-beam-flink:${THEODOLITE_TAG:-latest}
#ports:
# - "8080:8081"
command: >
......@@ -62,7 +62,7 @@ services:
- schema-registry
- kafka
benchmark-taskmanager:
image: ghcr.io/cau-se/theodolite-uc1-beam-flink:latest
image: ghcr.io/cau-se/theodolite-uc1-beam-flink:${THEODOLITE_TAG:-latest}
scale: 1
command: taskmanager
environment:
......
......@@ -21,7 +21,7 @@ services:
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
KAFKA_CREATE_TOPICS: "input:3:1"
schema-registry:
image: confluentinc/cp-schema-registry:5.3.1
depends_on:
......@@ -35,7 +35,7 @@ services:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
benchmark:
image: ghcr.io/cau-se/theodolite-uc1-beam-samza:latest
image: ghcr.io/cau-se/theodolite-uc1-beam-samza:${THEODOLITE_TAG:-latest}
scale: 1
depends_on:
- schema-registry
......@@ -47,7 +47,7 @@ services:
KAFKA_BOOTSTRAP_SERVERS: kafka:9092
SCHEMA_REGISTRY_URL: http://schema-registry:8081
load-generator:
image: ghcr.io/cau-se/theodolite-uc1-workload-generator:latest
image: ghcr.io/cau-se/theodolite-uc1-workload-generator:${THEODOLITE_TAG:-latest}
depends_on:
- schema-registry
- kafka
......
......@@ -19,7 +19,7 @@ services:
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
KAFKA_CREATE_TOPICS: "input:3:1"
schema-registry:
image: confluentinc/cp-schema-registry:5.3.1
depends_on:
......@@ -33,7 +33,7 @@ services:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
load-generator:
image: ghcr.io/cau-se/theodolite-uc1-workload-generator:latest
image: ghcr.io/cau-se/theodolite-uc1-workload-generator:${THEODOLITE_TAG:-latest}
depends_on:
- schema-registry
- kafka
......@@ -44,7 +44,7 @@ services:
SCHEMA_REGISTRY_URL: http://schema-registry:8081
NUM_SENSORS: 10
benchmark-jobmanager:
image: ghcr.io/cau-se/theodolite-uc1-flink:latest
image: ghcr.io/cau-se/theodolite-uc1-flink:${THEODOLITE_TAG:-latest}
#ports:
# - "8080:8081"
command: standalone-job --job-classname theodolite.uc1.application.HistoryServiceFlinkJob
......@@ -59,7 +59,7 @@ services:
- schema-registry
- kafka
benchmark-taskmanager:
image: ghcr.io/cau-se/theodolite-uc1-flink:latest
image: ghcr.io/cau-se/theodolite-uc1-flink:${THEODOLITE_TAG:-latest}
command: taskmanager
environment:
- |
......
......@@ -19,7 +19,7 @@ services:
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
KAFKA_CREATE_TOPICS: "input:3:1"
schema-registry:
image: confluentinc/cp-schema-registry:5.3.1
depends_on:
......@@ -33,7 +33,7 @@ services:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
benchmark:
image: ghcr.io/cau-se/theodolite-uc1-kstreams-app:latest
image: ghcr.io/cau-se/theodolite-uc1-kstreams-app:${THEODOLITE_TAG:-latest}
depends_on:
- schema-registry
- kafka
......@@ -41,7 +41,7 @@ services:
KAFKA_BOOTSTRAP_SERVERS: kafka:9092
SCHEMA_REGISTRY_URL: http://schema-registry:8081
load-generator:
image: ghcr.io/cau-se/theodolite-uc1-workload-generator:latest
image: ghcr.io/cau-se/theodolite-uc1-workload-generator:${THEODOLITE_TAG:-latest}
depends_on:
- schema-registry
- kafka
......
......@@ -19,7 +19,7 @@ services:
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
schema-registry:
image: confluentinc/cp-schema-registry:5.3.1
depends_on:
......@@ -33,7 +33,7 @@ services:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
load-generator:
image: ghcr.io/cau-se/theodolite-uc2-workload-generator:latest
image: ghcr.io/cau-se/theodolite-uc2-workload-generator:${THEODOLITE_TAG:-latest}
depends_on:
- schema-registry
- kafka
......@@ -44,7 +44,7 @@ services:
SCHEMA_REGISTRY_URL: http://schema-registry:8081
NUM_SENSORS: 10
benchmark-jobmanager:
image: ghcr.io/cau-se/theodolite-uc2-beam-flink:latest
image: ghcr.io/cau-se/theodolite-uc2-beam-flink:${THEODOLITE_TAG:-latest}
#ports:
# - "8080:8081"
command: >
......@@ -62,7 +62,7 @@ services:
- schema-registry
- kafka
benchmark-taskmanager:
image: ghcr.io/cau-se/theodolite-uc2-beam-flink:latest
image: ghcr.io/cau-se/theodolite-uc2-beam-flink:${THEODOLITE_TAG:-latest}
scale: 1
command: taskmanager
environment:
......
......@@ -21,7 +21,7 @@ services:
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
schema-registry:
image: confluentinc/cp-schema-registry:5.3.1
depends_on:
......@@ -35,7 +35,7 @@ services:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
benchmark:
image: ghcr.io/cau-se/theodolite-uc2-beam-samza:latest
image: ghcr.io/cau-se/theodolite-uc2-beam-samza:${THEODOLITE_TAG:-latest}
scale: 1
depends_on:
- schema-registry
......@@ -47,7 +47,7 @@ services:
KAFKA_BOOTSTRAP_SERVERS: kafka:9092
SCHEMA_REGISTRY_URL: http://schema-registry:8081
load-generator:
image: ghcr.io/cau-se/theodolite-uc2-workload-generator:latest
image: ghcr.io/cau-se/theodolite-uc2-workload-generator:${THEODOLITE_TAG:-latest}
depends_on:
- schema-registry
- kafka
......
......@@ -19,7 +19,7 @@ services:
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
schema-registry:
image: confluentinc/cp-schema-registry:5.3.1
depends_on:
......@@ -33,7 +33,7 @@ services:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
load-generator:
image: ghcr.io/cau-se/theodolite-uc2-workload-generator:latest
image: ghcr.io/cau-se/theodolite-uc2-workload-generator:${THEODOLITE_TAG:-latest}
depends_on:
- schema-registry
- kafka
......@@ -44,7 +44,7 @@ services:
SCHEMA_REGISTRY_URL: http://schema-registry:8081
NUM_SENSORS: 10
benchmark-jobmanager:
image: ghcr.io/cau-se/theodolite-uc2-flink:latest
image: ghcr.io/cau-se/theodolite-uc2-flink:${THEODOLITE_TAG:-latest}
#ports:
# - "8080:8081"
command: standalone-job --job-classname theodolite.uc2.application.HistoryServiceFlinkJob
......@@ -59,7 +59,7 @@ services:
- schema-registry
- kafka
benchmark-taskmanager:
image: ghcr.io/cau-se/theodolite-uc2-flink:latest
image: ghcr.io/cau-se/theodolite-uc2-flink:${THEODOLITE_TAG:-latest}
command: taskmanager
environment:
- |
......
......@@ -19,7 +19,7 @@ services:
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
schema-registry:
image: confluentinc/cp-schema-registry:5.3.1
depends_on:
......@@ -33,7 +33,7 @@ services:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
benchmark:
image: ghcr.io/cau-se/theodolite-uc2-kstreams-app:latest
image: ghcr.io/cau-se/theodolite-uc2-kstreams-app:${THEODOLITE_TAG:-latest}
depends_on:
- schema-registry
- kafka
......@@ -42,7 +42,7 @@ services:
SCHEMA_REGISTRY_URL: http://schema-registry:8081
KAFKA_WINDOW_DURATION_MINUTES: 60
load-generator:
image: ghcr.io/cau-se/theodolite-uc2-workload-generator:latest
image: ghcr.io/cau-se/theodolite-uc2-workload-generator:${THEODOLITE_TAG:-latest}
depends_on:
- schema-registry
- kafka
......
......@@ -19,7 +19,7 @@ services:
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
schema-registry:
image: confluentinc/cp-schema-registry:5.3.1
depends_on:
......@@ -33,7 +33,7 @@ services:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
load-generator:
image: ghcr.io/cau-se/theodolite-uc3-workload-generator:latest
image: ghcr.io/cau-se/theodolite-uc3-workload-generator:${THEODOLITE_TAG:-latest}
depends_on:
- schema-registry
- kafka
......@@ -44,7 +44,7 @@ services:
SCHEMA_REGISTRY_URL: http://schema-registry:8081
NUM_SENSORS: 10
benchmark-jobmanager:
image: ghcr.io/cau-se/theodolite-uc3-beam-flink:latest
image: ghcr.io/cau-se/theodolite-uc3-beam-flink:${THEODOLITE_TAG:-latest}
#ports:
# - "8080:8081"
command: >
......@@ -64,7 +64,7 @@ services:
- schema-registry
- kafka
benchmark-taskmanager:
image: ghcr.io/cau-se/theodolite-uc3-beam-flink:latest
image: ghcr.io/cau-se/theodolite-uc3-beam-flink:${THEODOLITE_TAG:-latest}
scale: 1
command: taskmanager
environment:
......
......@@ -21,7 +21,7 @@ services:
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
schema-registry:
image: confluentinc/cp-schema-registry:5.3.1
depends_on:
......@@ -35,7 +35,7 @@ services:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
benchmark:
image: ghcr.io/cau-se/theodolite-uc3-beam-samza:latest
image: ghcr.io/cau-se/theodolite-uc3-beam-samza:${THEODOLITE_TAG:-latest}
scale: 1
depends_on:
- schema-registry
......@@ -47,7 +47,7 @@ services:
KAFKA_BOOTSTRAP_SERVERS: kafka:9092
SCHEMA_REGISTRY_URL: http://schema-registry:8081
load-generator:
image: ghcr.io/cau-se/theodolite-uc3-workload-generator:latest
image: ghcr.io/cau-se/theodolite-uc3-workload-generator:${THEODOLITE_TAG:-latest}
depends_on:
- schema-registry
- kafka
......
......@@ -19,7 +19,7 @@ services:
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 30000
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
KAFKA_CREATE_TOPICS: "input:3:1,output:3:1,configuration:3:1,aggregation-feedback:3:1"
KAFKA_CREATE_TOPICS: "input:3:1,output:3:1"
schema-registry:
image: confluentinc/cp-schema-registry:5.3.1
depends_on:
......@@ -33,7 +33,7 @@ services:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
load-generator:
image: ghcr.io/cau-se/theodolite-uc3-workload-generator:latest
image: ghcr.io/cau-se/theodolite-uc3-workload-generator:${THEODOLITE_TAG:-latest}
depends_on:
- schema-registry
- kafka
......@@ -44,7 +44,7 @@ services:
SCHEMA_REGISTRY_URL: http://schema-registry:8081
NUM_SENSORS: 10
benchmark-jobmanager:
image: ghcr.io/cau-se/theodolite-uc3-flink:latest
image: ghcr.io/cau-se/theodolite-uc3-flink:${THEODOLITE_TAG:-latest}
#ports:
# - "8080:8081"
command: standalone-job --job-classname theodolite.uc3.application.HistoryServiceFlinkJob
......@@ -59,7 +59,7 @@ services:
- schema-registry
- kafka
benchmark-taskmanager:
image: ghcr.io/cau-se/theodolite-uc3-flink:latest
image: ghcr.io/cau-se/theodolite-uc3-flink:${THEODOLITE_TAG:-latest}
command: taskmanager
environment:
- |
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment