-
Notifications
You must be signed in to change notification settings - Fork 115
/
StreamsApp.java
112 lines (87 loc) · 3.51 KB
/
StreamsApp.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
package io.confluent.parallelconsumer.examples.streams;
/*-
* Copyright (C) 2020 Confluent, Inc.
*/
import io.confluent.parallelconsumer.ParallelConsumerOptions;
import io.confluent.parallelconsumer.ParallelStreamProcessor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.RandomUtils;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.Topology;
import pl.tlinkowski.unij.api.UniLists;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicInteger;
@Slf4j
public class StreamsApp {
static String inputTopic = "input-topic-" + RandomUtils.nextInt();
static String outputTopicName = "my-output-topic-" + RandomUtils.nextInt();
Consumer<String, String> getKafkaConsumer() {
return new KafkaConsumer<>(new Properties());
}
Producer<String, String> getKafkaProducer() {
return new KafkaProducer<>(new Properties());
}
KafkaStreams streams;
ParallelStreamProcessor<String, String> parallelConsumer;
AtomicInteger messageCount = new AtomicInteger();
// tag::example[]
void run() {
preprocess(); // <1>
concurrentProcess(); // <2>
}
void preprocess() {
StreamsBuilder builder = new StreamsBuilder();
builder.<String, String>stream(inputTopic)
.mapValues((key, value) -> {
log.info("Streams preprocessing key: {} value: {}", key, value);
return String.valueOf(value.length());
})
.to(outputTopicName);
startStreams(builder.build());
}
void startStreams(Topology topology) {
streams = new KafkaStreams(topology, getStreamsProperties());
streams.start();
}
void concurrentProcess() {
setupParallelConsumer();
parallelConsumer.poll(record -> {
log.info("Concurrently processing a record: {}", record);
messageCount.getAndIncrement();
});
}
// end::example[]
private void setupParallelConsumer() {
Consumer<String, String> kafkaConsumer = getKafkaConsumer();
Producer<String, String> kafkaProducer = getKafkaProducer();
var options = ParallelConsumerOptions.<String, String>builder()
.ordering(ParallelConsumerOptions.ProcessingOrder.KEY)
.consumer(kafkaConsumer)
.producer(kafkaProducer)
.build();
parallelConsumer = ParallelStreamProcessor.createEosStreamProcessor(options);
parallelConsumer.subscribe(UniLists.of(outputTopicName));
}
Properties getStreamsProperties() {
Properties props = new Properties();
props.put(StreamsConfig.APPLICATION_ID_CONFIG, this.getClass().getName());
props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, getServerConfig());
props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
return props;
}
String getServerConfig() {
return "add your server here";
}
void close() {
streams.close();
parallelConsumer.close();
}
}