|
| 1 | +package com.baeldung.kafkastreams; |
| 2 | + |
| 3 | +import java.util.Arrays; |
| 4 | +import java.util.Properties; |
| 5 | +import java.util.regex.Pattern; |
| 6 | + |
| 7 | +import org.apache.kafka.clients.consumer.ConsumerConfig; |
| 8 | +import org.apache.kafka.common.serialization.Serdes; |
| 9 | +import org.apache.kafka.streams.KafkaStreams; |
| 10 | +import org.apache.kafka.streams.StreamsBuilder; |
| 11 | +import org.apache.kafka.streams.StreamsConfig; |
| 12 | +import org.apache.kafka.streams.Topology; |
| 13 | +import org.apache.kafka.streams.kstream.KStream; |
| 14 | +import org.apache.kafka.streams.kstream.KTable; |
| 15 | +import org.apache.kafka.streams.kstream.Produced; |
| 16 | +import org.junit.Ignore; |
| 17 | +import org.junit.Test; |
| 18 | + |
| 19 | +public class KafkaStreamsLiveTest { |
| 20 | + private String bootstrapServers = "localhost:9092"; |
| 21 | + |
| 22 | + @Test |
| 23 | + @Ignore("it needs to have kafka broker running on local") |
| 24 | + public void shouldTestKafkaStreams() throws InterruptedException { |
| 25 | + //given |
| 26 | + String inputTopic = "inputTopic"; |
| 27 | + |
| 28 | + Properties streamsConfiguration = new Properties(); |
| 29 | + streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-live-test"); |
| 30 | + streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); |
| 31 | + streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); |
| 32 | + streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); |
| 33 | + streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 1000); |
| 34 | + streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); |
| 35 | + // Use a temporary directory for storing state, which will be automatically removed after the test. |
| 36 | + // streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getAbsolutePath()); |
| 37 | + |
| 38 | + /* |
| 39 | + * final StreamsBuilder builder = new StreamsBuilder(); |
| 40 | + KStream<String, String> textLines = builder.stream(wordCountTopic, |
| 41 | + Consumed.with(Serdes.String(), Serdes.String())); |
| 42 | +
|
| 43 | + KTable<String, Long> wordCounts = textLines |
| 44 | + .flatMapValues(value -> Arrays.asList(value.toLowerCase(Locale.ROOT) |
| 45 | + .split("\\W+"))) |
| 46 | + .groupBy((key, word) -> word) |
| 47 | + .count(Materialized.<String, Long, KeyValueStore<Bytes, byte[]>> as("counts-store")); |
| 48 | + */ |
| 49 | + //when |
| 50 | + final StreamsBuilder builder = new StreamsBuilder(); |
| 51 | + KStream<String, String> textLines = builder.stream(inputTopic); |
| 52 | + Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS); |
| 53 | + |
| 54 | + KTable<String, Long> wordCounts = textLines |
| 55 | + .flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase()))) |
| 56 | + .groupBy((key, word) -> word) |
| 57 | + .count(); |
| 58 | + |
| 59 | + wordCounts.toStream().foreach((word, count) -> System.out.println("word: " + word + " -> " + count)); |
| 60 | + |
| 61 | + String outputTopic = "outputTopic"; |
| 62 | + //final Serde<String> stringSerde = Serdes.String(); |
| 63 | + //final Serde<Long> longSerde = Serdes.Long(); |
| 64 | + //wordCounts.toStream().to(stringSerde, longSerde, outputTopic); |
| 65 | + |
| 66 | + wordCounts.toStream().to("outputTopic", |
| 67 | + Produced.with(Serdes.String(), Serdes.Long())); |
| 68 | + |
| 69 | + final Topology topology = builder.build(); |
| 70 | + KafkaStreams streams = new KafkaStreams(topology, streamsConfiguration); |
| 71 | + streams.start(); |
| 72 | + |
| 73 | + //then |
| 74 | + Thread.sleep(30000); |
| 75 | + streams.close(); |
| 76 | + } |
| 77 | +} |
0 commit comments