format the code

This commit is contained in:
Amit Kumatr 2024-02-15 23:17:30 +05:30
parent 6cd68f9e48
commit b57553eeb0
5 changed files with 64 additions and 60 deletions

View File

@ -1,22 +1,23 @@
package com.baeldung.kafka.commitoffset; package com.baeldung.kafka.commitoffset;
import com.baeldung.kafka.commitoffset.config.KafkaConfigProperties; import com.baeldung.kafka.commitoffset.config.KafkaConfigProperties;
import java.time.Duration; import java.time.Duration;
import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.consumer.KafkaConsumer;
public class AsyncCommit { public class AsyncCommit {
public static void main(String[] args) { public static void main(String[] args) {
KafkaConsumer<Long, String> consumer = KafkaConsumer<Long, String> consumer = new KafkaConsumer<>(KafkaConfigProperties.getProperties());
new KafkaConsumer<>(KafkaConfigProperties.getProperties()); consumer.subscribe(KafkaConfigProperties.getTopic());
consumer.subscribe(KafkaConfigProperties.getTopic()); ConsumerRecords<Long, String> messages = consumer.poll(Duration.ofSeconds(10));
ConsumerRecords<Long, String> messages = consumer.poll(Duration.ofSeconds(10)); for (ConsumerRecord<Long, String> message : messages) {
for (ConsumerRecord<Long, String> message : messages) { // processed message
// processed message consumer.commitAsync();
consumer.commitAsync(); }
} }
}
} }

View File

@ -1,8 +1,10 @@
package com.baeldung.kafka.commitoffset; package com.baeldung.kafka.commitoffset;
import com.baeldung.kafka.commitoffset.config.KafkaConfigProperties; import com.baeldung.kafka.commitoffset.config.KafkaConfigProperties;
import java.time.Duration; import java.time.Duration;
import java.util.Properties; import java.util.Properties;
import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.ConsumerRecords;
@ -10,15 +12,15 @@ import org.apache.kafka.clients.consumer.KafkaConsumer;
public class AutomaticCommit { public class AutomaticCommit {
public static void main(String[] args) { public static void main(String[] args) {
Properties properties = KafkaConfigProperties.getProperties(); Properties properties = KafkaConfigProperties.getProperties();
properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
KafkaConsumer<Long, String> consumer = new KafkaConsumer<>(properties); KafkaConsumer<Long, String> consumer = new KafkaConsumer<>(properties);
consumer.subscribe(KafkaConfigProperties.getTopic()); consumer.subscribe(KafkaConfigProperties.getTopic());
ConsumerRecords<Long, String> messages = consumer.poll(Duration.ofSeconds(10)); ConsumerRecords<Long, String> messages = consumer.poll(Duration.ofSeconds(10));
for (ConsumerRecord<Long, String> message : messages) { for (ConsumerRecord<Long, String> message : messages) {
// processed message // processed message
}
} }
}
} }

View File

@ -1,9 +1,11 @@
package com.baeldung.kafka.commitoffset; package com.baeldung.kafka.commitoffset;
import com.baeldung.kafka.commitoffset.config.KafkaConfigProperties; import com.baeldung.kafka.commitoffset.config.KafkaConfigProperties;
import java.time.Duration; import java.time.Duration;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.consumer.KafkaConsumer;
@ -11,25 +13,22 @@ import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.TopicPartition;
public class SpecificOffsetCommit { public class SpecificOffsetCommit {
public static void main(String[] args) { public static void main(String[] args) {
KafkaConsumer<Long, String> consumer = KafkaConsumer<Long, String> consumer = new KafkaConsumer<>(KafkaConfigProperties.getProperties());
new KafkaConsumer<>(KafkaConfigProperties.getProperties()); consumer.subscribe(KafkaConfigProperties.getTopic());
consumer.subscribe(KafkaConfigProperties.getTopic()); Map<TopicPartition, OffsetAndMetadata> currentOffsets = new HashMap<>();
Map<TopicPartition, OffsetAndMetadata> currentOffsets = new HashMap<>(); int messageProcessed = 0;
int messageProcessed = 0; while (true) {
while (true) { ConsumerRecords<Long, String> messages = consumer.poll(Duration.ofSeconds(10));
ConsumerRecords<Long, String> messages = consumer.poll(Duration.ofSeconds(10)); for (ConsumerRecord<Long, String> message : messages) {
for (ConsumerRecord<Long, String> message : messages) { // processed message
// processed message messageProcessed++;
messageProcessed++; currentOffsets.put(new TopicPartition(message.topic(), message.partition()), new OffsetAndMetadata(message.offset() + 1));
currentOffsets.put( if (messageProcessed % 50 == 0) {
new TopicPartition(message.topic(), message.partition()), consumer.commitSync(currentOffsets);
new OffsetAndMetadata(message.offset() + 1)); }
if (messageProcessed % 50 == 0) { }
consumer.commitSync(currentOffsets);
} }
}
} }
}
} }

View File

@ -1,22 +1,23 @@
package com.baeldung.kafka.commitoffset; package com.baeldung.kafka.commitoffset;
import com.baeldung.kafka.commitoffset.config.KafkaConfigProperties; import com.baeldung.kafka.commitoffset.config.KafkaConfigProperties;
import java.time.Duration; import java.time.Duration;
import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.consumer.KafkaConsumer;
public class SyncCommit { public class SyncCommit {
public static void main(String[] args) { public static void main(String[] args) {
KafkaConsumer<Long, String> consumer = KafkaConsumer<Long, String> consumer = new KafkaConsumer<>(KafkaConfigProperties.getProperties());
new KafkaConsumer<>(KafkaConfigProperties.getProperties()); consumer.subscribe(KafkaConfigProperties.getTopic());
consumer.subscribe(KafkaConfigProperties.getTopic()); ConsumerRecords<Long, String> messages = consumer.poll(Duration.ofSeconds(10));
ConsumerRecords<Long, String> messages = consumer.poll(Duration.ofSeconds(10)); for (ConsumerRecord<Long, String> message : messages) {
for (ConsumerRecord<Long, String> message : messages) { // processed message
// processed message consumer.commitSync();
consumer.commitSync(); }
} }
}
} }

View File

@ -2,6 +2,7 @@ package com.baeldung.kafka.commitoffset.config;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Properties; import java.util.Properties;
import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer; import org.apache.kafka.common.serialization.StringDeserializer;
@ -9,22 +10,22 @@ import org.apache.kafka.common.serialization.StringDeserializer;
* @author amitkumar * @author amitkumar
*/ */
public class KafkaConfigProperties { public class KafkaConfigProperties {
public static final String MY_TOPIC = "my-topic"; public static final String MY_TOPIC = "my-topic";
public static Properties getProperties() { public static Properties getProperties() {
Properties props = new Properties(); Properties props = new Properties();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
props.put(ConsumerConfig.GROUP_ID_CONFIG, "MyFirstConsumer"); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); props.put(ConsumerConfig.GROUP_ID_CONFIG, "MyFirstConsumer");
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
return props; return props;
} }
public static ArrayList<String> getTopic() { public static ArrayList<String> getTopic() {
ArrayList<String> topics = new ArrayList<>(); ArrayList<String> topics = new ArrayList<>();
topics.add(MY_TOPIC); topics.add(MY_TOPIC);
return topics; return topics;
} }
} }