BAEL1426 - Kafka Transactional API. Updating the kafka-clients version to 2.0.0

This commit is contained in:
Chirag Dewan 2018-09-06 23:19:26 +05:30
parent 99b800c5b9
commit 443df74d09
2 changed files with 140 additions and 3 deletions

View File

@ -795,8 +795,8 @@
<url>http://dl.bintray.com/cuba-platform/main</url>
</repository>
<repository>
<id>Apache Staging</id>
<url>https://repository.apache.org/content/groups/staging</url>
<id>Maven Central</id>
<url>https://repo.maven.apache.org/maven2/</url>
</repository>
</repositories>
@ -912,6 +912,41 @@
</executions>
</plugin>
</plugins>
<pluginManagement>
<plugins>
<!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.-->
<plugin>
<groupId>org.eclipse.m2e</groupId>
<artifactId>lifecycle-mapping</artifactId>
<version>1.0.0</version>
<configuration>
<lifecycleMappingMetadata>
<pluginExecutions>
<pluginExecution>
<pluginExecutionFilter>
<groupId>
org.apache.maven.plugins
</groupId>
<artifactId>
maven-pmd-plugin
</artifactId>
<versionRange>
[3.8,)
</versionRange>
<goals>
<goal>check</goal>
</goals>
</pluginExecutionFilter>
<action>
<ignore></ignore>
</action>
</pluginExecution>
</pluginExecutions>
</lifecycleMappingMetadata>
</configuration>
</plugin>
</plugins>
</pluginManagement>
</build>
<properties>
@ -974,7 +1009,7 @@
<caffeine.version>2.5.5</caffeine.version>
<google-api.version>1.23.0</google-api.version>
<google-sheets.version>v4-rev493-1.21.0</google-sheets.version>
<kafka.version>1.0.0</kafka.version>
<kafka.version>2.0.0</kafka.version>
<smooks.version>1.7.0</smooks.version>
<docker.version>3.0.14</docker.version>
<tomcat.version>8.5.24</tomcat.version>

View File

@ -0,0 +1,102 @@
package com.baeldung.kafka;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.TopicPartition;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import static java.time.Duration.ofSeconds;
import static java.util.Collections.singleton;
import static org.apache.kafka.clients.consumer.ConsumerConfig.*;
import static org.apache.kafka.clients.consumer.ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG;
import static org.apache.kafka.clients.producer.ProducerConfig.*;
public class TransactionalApp {
private static final String CONSUMER_GROUP_ID = "test";
private static final String OUTPUT_TOPIC = "output";
private static final String INPUT_TOPIC = "input";
public static void main(String[] args) {
KafkaConsumer<String, String> consumer = initConsumer();
KafkaProducer<String, String> producer = initProducer();
producer.initTransactions();
try {
while (true) {
ConsumerRecords<String, String> records = consumer.poll(ofSeconds(20));
producer.beginTransaction();
for (ConsumerRecord record : records)
producer.send(new ProducerRecord(OUTPUT_TOPIC, record));
Map<TopicPartition, OffsetAndMetadata> offsetsToCommit = new HashMap<>();
for (TopicPartition partition : records.partitions()) {
List<ConsumerRecord<String, String>> partitionedRecords = records.records(partition);
long offset = partitionedRecords.get(partitionedRecords.size() - 1).offset();
offsetsToCommit.put(partition, new OffsetAndMetadata(offset));
}
producer.sendOffsetsToTransaction(offsetsToCommit, CONSUMER_GROUP_ID);
producer.commitTransaction();
}
} catch (KafkaException e) {
producer.abortTransaction();
}
}
private static KafkaConsumer<String, String> initConsumer() {
Properties props = new Properties();
props.put(BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
props.put(GROUP_ID_CONFIG, CONSUMER_GROUP_ID);
props.put(ENABLE_AUTO_COMMIT_CONFIG, "false");
props.put(KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
props.put(VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
consumer.subscribe(singleton(INPUT_TOPIC));
return consumer;
}
private static KafkaProducer<String, String> initProducer() {
Properties props = new Properties();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
props.put(ACKS_CONFIG, "all");
props.put(RETRIES_CONFIG, 3);
props.put(BATCH_SIZE_CONFIG, 16384);
props.put(LINGER_MS_CONFIG, 1);
props.put(BUFFER_MEMORY_CONFIG, 33554432);
props.put(ENABLE_IDEMPOTENCE_CONFIG, "true");
props.put(TRANSACTIONAL_ID_CONFIG, "prod-1");
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
return new KafkaProducer(props);
}
}