Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
T
theodolite
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Model registry
Analyze
Contributor analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Terms and privacy
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Sören Henning
theodolite
Commits
99313bef
Commit
99313bef
authored
3 years ago
by
Lorenz Boguhn
Browse files
Options
Downloads
Patches
Plain Diff
Remove Uc1ApplicationBeam introduced throug merging
parent
0c707a65
No related branches found
Branches containing commit
No related tags found
Tags containing commit
1 merge request
!187
Migrate Beam benchmark implementation
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
theodolite-benchmarks/uc1-beam-flink/src/main/java/application/Uc1ApplicationBeam.java
+0
-115
0 additions, 115 deletions
...m-flink/src/main/java/application/Uc1ApplicationBeam.java
with
0 additions
and
115 deletions
theodolite-benchmarks/uc1-beam-flink/src/main/java/application/Uc1ApplicationBeam.java
deleted
100644 → 0
+
0
−
115
View file @
0c707a65
package
application
;
import
com.google.gson.Gson
;
import
java.util.Properties
;
import
org.apache.beam.runners.flink.FlinkRunner
;
import
org.apache.beam.sdk.Pipeline
;
import
org.apache.beam.sdk.coders.AvroCoder
;
import
org.apache.beam.sdk.coders.CoderRegistry
;
import
org.apache.beam.sdk.transforms.DoFn
;
import
org.apache.beam.sdk.transforms.MapElements
;
import
org.apache.beam.sdk.transforms.PTransform
;
import
org.apache.beam.sdk.transforms.ParDo
;
import
org.apache.beam.sdk.transforms.SimpleFunction
;
import
org.apache.beam.sdk.values.KV
;
import
org.apache.beam.sdk.values.PBegin
;
import
org.apache.beam.sdk.values.PCollection
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
theodolite.commons.beam.AbstractBeamService
;
import
theodolite.commons.beam.ConfigurationKeys
;
import
theodolite.commons.beam.kafka.KafkaActivePowerRecordReader
;
import
titan.ccp.model.records.ActivePowerRecord
;
/**
* Implementation of the use case Database Storage using Apache Beam with the Flink Runner. To
* execute locally in standalone start Kafka, Zookeeper, the schema-registry and the workload
* generator using the delayed_startup.sh script. Start a Flink cluster and pass its REST adress
* using--flinkMaster as run parameter. To persist logs add
* ${workspace_loc:/uc1-application-samza/eclipseConsoleLogs.log} as Output File under Standard
* Input Output in Common in the Run Configuration Start via Eclipse Run.
*/
public
final
class
Uc1ApplicationBeam
extends
AbstractBeamService
{
private
static
final
Logger
LOGGER
=
LoggerFactory
.
getLogger
(
Uc1ApplicationBeam
.
class
);
private
final
String
inputTopic
=
CONFIG
.
getString
(
ConfigurationKeys
.
KAFKA_INPUT_TOPIC
);
private
final
String
bootstrapServer
=
CONFIG
.
getString
(
ConfigurationKeys
.
KAFKA_BOOTSTRAP_SERVERS
);
/**
* Private constructor setting specific options for this use case.
*/
private
Uc1ApplicationBeam
(
final
String
[]
args
)
{
//NOPMD
super
(
args
);
LOGGER
.
info
(
this
.
options
.
toString
());
this
.
options
.
setRunner
(
FlinkRunner
.
class
);
}
/**
* Main method.
*/
@SuppressWarnings
({
"unchecked"
,
"rawtypes"
,
"unused"
})
public
static
void
main
(
final
String
[]
args
)
{
final
Uc1ApplicationBeam
uc1
=
new
Uc1ApplicationBeam
(
args
);
// create pipeline
final
Pipeline
pipeline
=
Pipeline
.
create
(
uc1
.
options
);
// Set Coders for Classes that will be distributed
final
CoderRegistry
cr
=
pipeline
.
getCoderRegistry
();
cr
.
registerCoderForClass
(
ActivePowerRecord
.
class
,
AvroCoder
.
of
(
ActivePowerRecord
.
SCHEMA
$
));
// build KafkaConsumerConfig
final
Properties
consumerConfig
=
uc1
.
buildConsumerConfig
();
// Create Pipeline transformations
final
PTransform
<
PBegin
,
PCollection
<
KV
<
String
,
ActivePowerRecord
>>>
kafka
=
new
KafkaActivePowerRecordReader
(
uc1
.
bootstrapServer
,
uc1
.
inputTopic
,
consumerConfig
);
final
LogKeyValue
logKeyValue
=
new
LogKeyValue
();
// Apply pipeline transformations
// Read from Kafka
pipeline
.
apply
(
kafka
)
// Map to Gson
.
apply
(
MapElements
.
via
(
new
SimpleFunction
<
KV
<
String
,
ActivePowerRecord
>,
KV
<
String
,
String
>>()
{
private
transient
Gson
gsonObj
=
new
Gson
();
@Override
public
KV
<
String
,
String
>
apply
(
final
KV
<
String
,
ActivePowerRecord
>
kv
)
{
if
(
this
.
gsonObj
==
null
)
{
this
.
gsonObj
=
new
Gson
();
}
final
String
gson
=
this
.
gsonObj
.
toJson
(
kv
.
getValue
());
return
KV
.
of
(
kv
.
getKey
(),
gson
);
}
}))
// Print to console
.
apply
(
ParDo
.
of
(
logKeyValue
));
// Submit job and start execution
pipeline
.
run
().
waitUntilFinish
();
}
/**
* Logs all Key Value pairs.
*/
@SuppressWarnings
({
"unused"
})
private
static
class
LogKeyValue
extends
DoFn
<
KV
<
String
,
String
>,
KV
<
String
,
String
>>
{
private
static
final
long
serialVersionUID
=
4328743
;
@ProcessElement
public
void
processElement
(
@Element
final
KV
<
String
,
String
>
kv
,
final
OutputReceiver
<
KV
<
String
,
String
>>
out
)
{
if
(
LOGGER
.
isInfoEnabled
())
{
LOGGER
.
info
(
"Key: "
+
kv
.
getKey
()
+
"Value: "
+
kv
.
getValue
());
}
}
}
}
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment