From 307d0f5e409b29c6bced2310206ba093b34769e7 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Tue, 10 Oct 2023 22:39:14 -0400 Subject: [PATCH 01/63] Message Ordering - First Draft. --- apache-kafka-2/pom.xml | 5 ++ .../ordering/ConsumerConfigurations.java | 32 ++++++++++++ .../ExtSeqWithTimeWindowConsumer.java | 49 +++++++++++++++++++ .../ExtSeqWithTimeWindowProducer.java | 29 +++++++++++ .../ordering/MultiPartitionConsumer.java | 34 +++++++++++++ .../ordering/MultiPartitionProducer.java | 27 ++++++++++ .../ordering/ProducerConfigurations.java | 27 ++++++++++ .../ordering/SinglePartitionConsumer.java | 35 +++++++++++++ .../ordering/SinglePartitionProducer.java | 29 +++++++++++ .../message/ordering/payload/Message.java | 36 ++++++++++++++ .../serialization/JacksonDeserializer.java | 36 ++++++++++++++ .../serialization/JacksonSerializer.java | 20 ++++++++ 12 files changed, 359 insertions(+) create mode 100644 apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java create mode 100644 apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java create mode 100644 apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java create mode 100644 apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java create mode 100644 apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java create mode 100644 apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java create mode 100644 apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java create mode 100644 apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java create mode 100644 apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java create mode 100644 apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java create mode 100644 apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonSerializer.java diff --git a/apache-kafka-2/pom.xml b/apache-kafka-2/pom.xml index 067dedef8a..45b31004b7 100644 --- a/apache-kafka-2/pom.xml +++ b/apache-kafka-2/pom.xml @@ -57,6 +57,11 @@ ${lombok.version} provided + + com.fasterxml.jackson.core + jackson-databind + 2.15.2 + diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java new file mode 100644 index 0000000000..b18db3ef24 --- /dev/null +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java @@ -0,0 +1,32 @@ +package com.baeldung.kafka.message.ordering; + +import org.apache.kafka.clients.consumer.Consumer; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; + +import java.time.Duration; +import java.util.Collections; +import java.util.Properties; + +public class ConsumerConfigurations { + public static void main(String[] args) { + Properties props = new Properties(); + props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); + props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); + props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); + props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); + props.put("max.poll.records", "500"); + props.put("fetch.min.bytes", "1"); + props.put("fetch.max.wait.ms", "500"); + Consumer consumer = new KafkaConsumer<>(props); + consumer.subscribe(Collections.singletonList("multi_partition_topic")); + + while (true) { + ConsumerRecords records = consumer.poll(Duration.ofMillis(100)); + records.forEach(record -> { + System.out.println("Partition: " + record.partition() + ", Message: " + record.value()); + }); + } + } +} diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java new file mode 100644 index 0000000000..5b01a86e39 --- /dev/null +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java @@ -0,0 +1,49 @@ +package com.baeldung.kafka.message.ordering; + +import com.baeldung.kafka.message.ordering.payload.Message; +import org.apache.kafka.clients.consumer.Consumer; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; + +import java.time.Duration; +import java.util.*; + +public class ExtSeqWithTimeWindowConsumer { + private static final long BUFFER_PERIOD_MS = 5000; + private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofMillis(100); + + public static void main(String[] args) { + Properties props = new Properties(); + props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); + props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); + props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); + props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer"); + props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + props.put("value.deserializer.serializedClass", Message.class); + Consumer consumer = new KafkaConsumer<>(props); + consumer.subscribe(Collections.singletonList("multi_partition_topic")); + List buffer = new ArrayList<>(); + long lastProcessedTime = System.nanoTime(); + while (true) { + ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); + records.forEach(record -> { + if (record != null && record.value() != null) { + buffer.add(record.value()); + } + }); + if (System.nanoTime() - lastProcessedTime > BUFFER_PERIOD_MS) { + processBuffer(buffer); + lastProcessedTime = System.nanoTime(); + } + } + } + + private static void processBuffer(List buffer) { + Collections.sort(buffer); + buffer.forEach(message -> { + System.out.println("Processing message with Insert Position: " + message.getInsertPosition() + ", Message Id: " + message.getMessageId()); + }); + buffer.clear(); + } +} diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java new file mode 100644 index 0000000000..91c5af716f --- /dev/null +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java @@ -0,0 +1,29 @@ +package com.baeldung.kafka.message.ordering; + +import com.baeldung.kafka.message.ordering.payload.Message; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; + +import java.util.Properties; +import java.util.Random; +import java.util.concurrent.atomic.AtomicLong; + +public class ExtSeqWithTimeWindowProducer { + public static void main(String[] args) { + Properties props = new Properties(); + props.put("bootstrap.servers", "localhost:9092"); + props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + props.put("value.serializer", "com.baeldung.kafka.message.ordering.serialization.JacksonSerializer"); + + KafkaProducer producer = new KafkaProducer<>(props); + for (long insertPosition = 1; insertPosition <= 10 ; insertPosition++) { + long messageId = Message.getRandomMessageId(); + String key = "Key-" + insertPosition; + Message message = new Message(insertPosition, messageId); + producer.send(new ProducerRecord<>("multi_partition_topic", key, message)); + System.out.println("Insert Position: " + message.getInsertPosition() + ", Message Id: " + message.getMessageId()); + } + producer.close(); + System.out.println("ExternalSequencingProducer Completed."); + } +} diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java new file mode 100644 index 0000000000..f9b0b3b040 --- /dev/null +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java @@ -0,0 +1,34 @@ +package com.baeldung.kafka.message.ordering; + +import com.baeldung.kafka.message.ordering.payload.Message; +import org.apache.kafka.clients.consumer.Consumer; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; + +import java.time.Duration; +import java.util.Collections; +import java.util.Properties; + +public class MultiPartitionConsumer { + private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofMillis(100); + public static void main(String[] args) { + Properties props = new Properties(); + props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); + props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); + props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); + props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer"); + props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + props.put("value.deserializer.serializedClass", Message.class); + Consumer consumer = new KafkaConsumer<>(props); + consumer.subscribe(Collections.singletonList("multi_partition_topic")); + while (true) { + ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); + records.forEach(record -> { + Message message = record.value(); + System.out.println("Process message with Insert Position: " + message.getInsertPosition() + ", Message Id: " + message.getMessageId()); + }); + } + } +} + diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java new file mode 100644 index 0000000000..8b2a49b2b5 --- /dev/null +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java @@ -0,0 +1,27 @@ +package com.baeldung.kafka.message.ordering; + +import com.baeldung.kafka.message.ordering.payload.Message; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; + +import java.util.Properties; + +public class MultiPartitionProducer { + public static void main(String[] args) { + Properties props = new Properties(); + props.put("bootstrap.servers", "localhost:9092"); + props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + props.put("value.serializer", "com.baeldung.kafka.message.ordering.serialization.JacksonSerializer"); + + KafkaProducer producer = new KafkaProducer<>(props); + for (long insertPosition = 1; insertPosition <= 10 ; insertPosition++) { + long messageId = Message.getRandomMessageId(); + String key = "Key-" + insertPosition; + Message message = new Message(insertPosition, messageId); + producer.send(new ProducerRecord<>("multi_partition_topic", key, message)); + System.out.println("Insert Position: " + message.getInsertPosition() + ", Message Id: " + message.getMessageId()); + } + producer.close(); + System.out.println("SinglePartitionProducer Completed."); + } +} diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java new file mode 100644 index 0000000000..bcdf6ceb32 --- /dev/null +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java @@ -0,0 +1,27 @@ +package com.baeldung.kafka.message.ordering; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; + +import java.util.Properties; + +public class ProducerConfigurations { + public static void main(String[] args) { + Properties props = new Properties(); + props.put("bootstrap.servers", "localhost:9092"); + props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + props.put("max.in.flight.requests.per.connection", "1"); + props.put("batch.size", "16384"); + props.put("linger.ms", "5"); + KafkaProducer producer = new KafkaProducer<>(props); + + for (int i = 0; i < 10; i++) { + String key = "Key-" + (i % 3); // Assuming 3 partitions + producer.send(new ProducerRecord<>("multi_partition_topic", key, "Message-" + i)); + } + + producer.close(); + System.out.println("MultiPartitionProducer Completed."); + } +} diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java new file mode 100644 index 0000000000..932a29c394 --- /dev/null +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java @@ -0,0 +1,35 @@ +package com.baeldung.kafka.message.ordering; + +import com.baeldung.kafka.message.ordering.payload.Message; +import org.apache.kafka.clients.consumer.Consumer; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; + +import java.time.Duration; +import java.util.Collections; +import java.util.Properties; + +public class SinglePartitionConsumer { + private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofMillis(100); + + public static void main(String[] args) { + Properties props = new Properties(); + props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); + props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); + props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); + props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer"); + props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + props.put("value.deserializer.serializedClass", Message.class); + Consumer consumer = new KafkaConsumer<>(props); + consumer.subscribe(Collections.singletonList("single_partition_topic")); + while (true) { + ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); + records.forEach(record -> { + Message message = record.value(); + System.out.println("Process message with Insert Position: " + message.getInsertPosition() + ", Message Id: " + message.getMessageId()); + }); + } + } +} + diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java new file mode 100644 index 0000000000..b5366819c5 --- /dev/null +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java @@ -0,0 +1,29 @@ +package com.baeldung.kafka.message.ordering; + +import com.baeldung.kafka.message.ordering.payload.Message; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; + +import java.util.Properties; +import java.util.Random; + +public class SinglePartitionProducer { + public static void main(String[] args) { + Properties props = new Properties(); + props.put("bootstrap.servers", "localhost:9092"); + props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + props.put("value.serializer", "com.baeldung.kafka.message.ordering.serialization.JacksonSerializer"); + + KafkaProducer producer = new KafkaProducer<>(props); + for (long insertPosition = 1; insertPosition <= 10 ; insertPosition++) { + long messageId = Message.getRandomMessageId(); + String key = "Key-" + insertPosition; + Message message = new Message(insertPosition, messageId); + producer.send(new ProducerRecord<>("single_partition_topic", key, message)); + System.out.println("Insert Position: " + message.getInsertPosition() + ", Message Id: " + message.getMessageId()); + } + producer.close(); + System.out.println("SinglePartitionProducer Completed."); + } + +} diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java new file mode 100644 index 0000000000..b185d663d4 --- /dev/null +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java @@ -0,0 +1,36 @@ +package com.baeldung.kafka.message.ordering.payload; + +import java.util.Random; + +public class Message implements Comparable { + private long insertPosition; + private long messageId; + + public Message(){ + + } + + public Message(long insertPosition, long messageId) { + this.insertPosition = insertPosition; + this.messageId = messageId; + } + + public long getInsertPosition() { + return insertPosition; + } + + public long getMessageId() { + return messageId; + } + + @Override + public int compareTo(Message other) { + return Long.compare(this.messageId, other.messageId); + } + + public static long getRandomMessageId() { + Random rand = new Random(); + return rand.nextInt(1000); + } +} + diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java new file mode 100644 index 0000000000..34aa181fcb --- /dev/null +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java @@ -0,0 +1,36 @@ +package com.baeldung.kafka.message.ordering.serialization; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.kafka.common.serialization.Deserializer; + +import java.util.Map; + +public class JacksonDeserializer implements Deserializer { + private final ObjectMapper objectMapper = new ObjectMapper(); + private Class tClass; + + public JacksonDeserializer(Class tClass) { + this.tClass = tClass; + } + + public JacksonDeserializer() { + + } + + @Override + public void configure(Map configs, boolean isKey) { + this.tClass = (Class) configs.get("value.deserializer.serializedClass"); + } + + @Override + public T deserialize(String topic, byte[] bytes) { + if (bytes == null) { + return null; + } + try { + return objectMapper.readValue(bytes, tClass); + } catch (Exception e) { + throw new RuntimeException("Error deserializing value", e); + } + } +} + diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonSerializer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonSerializer.java new file mode 100644 index 0000000000..fa9d25dd85 --- /dev/null +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonSerializer.java @@ -0,0 +1,20 @@ +package com.baeldung.kafka.message.ordering.serialization; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.kafka.common.serialization.Serializer; + +public class JacksonSerializer implements Serializer { + private final ObjectMapper objectMapper = new ObjectMapper(); + + @Override + public byte[] serialize(String topic, T data) { + if (data == null) { + return null; + } + try { + return objectMapper.writeValueAsBytes(data); + } catch (Exception e) { + throw new RuntimeException("Error serializing value", e); + } + } +} From 8772eaa67351c29e44db9c391225bb9602af2469 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 15:28:15 -0400 Subject: [PATCH 02/63] Single and Multiple Partition test. --- apache-kafka-2/pom.xml | 3 +- .../message/ordering/payload/Message.java | 14 +++ .../ordering/MultiplePartitionTest.java | 114 +++++++++++++++++ .../message/ordering/SinglePartitionTest.java | 115 ++++++++++++++++++ 4 files changed, 245 insertions(+), 1 deletion(-) create mode 100644 apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionTest.java create mode 100644 apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionTest.java diff --git a/apache-kafka-2/pom.xml b/apache-kafka-2/pom.xml index 45b31004b7..d0838a386e 100644 --- a/apache-kafka-2/pom.xml +++ b/apache-kafka-2/pom.xml @@ -60,7 +60,7 @@ com.fasterxml.jackson.core jackson-databind - 2.15.2 + ${jackson.databind.version} @@ -69,6 +69,7 @@ 2.8.0 1.15.3 1.15.3 + 2.15.2 \ No newline at end of file diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java index b185d663d4..317aec699e 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java @@ -1,5 +1,6 @@ package com.baeldung.kafka.message.ordering.payload; +import javax.swing.*; import java.util.Random; public class Message implements Comparable { @@ -10,6 +11,7 @@ public class Message implements Comparable { } + //Required for Kafka Serialization and Deserialization public Message(long insertPosition, long messageId) { this.insertPosition = insertPosition; this.messageId = messageId; @@ -28,6 +30,18 @@ public class Message implements Comparable { return Long.compare(this.messageId, other.messageId); } + @Override + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof Message)) { + return false; + } + Message message = (Message) obj; + return this.messageId == message.getMessageId() && this.insertPosition == message.getInsertPosition(); + } + public static long getRandomMessageId() { Random rand = new Random(); return rand.nextInt(1000); diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionTest.java new file mode 100644 index 0000000000..586c328f79 --- /dev/null +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionTest.java @@ -0,0 +1,114 @@ +package com.baeldung.kafka.message.ordering; + +import com.baeldung.kafka.message.ordering.payload.Message; +import lombok.var; +import org.apache.kafka.clients.admin.*; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.clients.producer.RecordMetadata; +import org.apache.kafka.common.KafkaFuture; +import org.apache.kafka.common.PartitionInfo; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.testcontainers.containers.KafkaContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; +import java.time.Duration; +import java.util.*; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; + +import static org.junit.jupiter.api.Assertions.*; + +@Testcontainers +public class MultiplePartitionTest { + private static String TOPIC = "multi_partition_topic"; + private static int PARTITIONS = 5; + private static short REPLICATION_FACTOR = 1; + private static Admin admin; + private static KafkaProducer producer; + private static KafkaConsumer consumer; + private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofMillis(5000); + @Container + private static final KafkaContainer KAFKA_CONTAINER = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")); + + @BeforeAll + static void setup() throws ExecutionException, InterruptedException { + KAFKA_CONTAINER.addExposedPort(9092); + + Properties adminProperties = new Properties(); + adminProperties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers()); + + Properties producerProperties = new Properties(); + producerProperties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers()); + producerProperties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + producerProperties.put("value.serializer", "com.baeldung.kafka.message.ordering.serialization.JacksonSerializer"); + + Properties consumerProperties = new Properties(); + consumerProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers()); + consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); + consumerProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer"); + consumerProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + consumerProperties.put("value.deserializer.serializedClass", Message.class); + consumerProperties.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); + admin = Admin.create(adminProperties); + producer = new KafkaProducer<>(producerProperties); + consumer = new KafkaConsumer<>(consumerProperties); + List topicList = new ArrayList<>(); + NewTopic newTopic = new NewTopic(TOPIC, PARTITIONS, REPLICATION_FACTOR); + topicList.add(newTopic); + CreateTopicsResult result = admin.createTopics(topicList); + KafkaFuture future = result.values().get(TOPIC); + future.whenComplete((voidResult, exception) -> { + if (exception != null) { + System.err.println("Error creating the topic: " + exception.getMessage()); + } else { + System.out.println("Topic created successfully!"); + } + }).get(); + } + + @AfterAll + static void destroy() { + KAFKA_CONTAINER.stop(); + } + + @Test + void givenMultiplePartitions_whenPublishedToKafkaAndConsumed_thenCheckForMessageOrder() throws ExecutionException, InterruptedException { + List sentMessageList = new ArrayList<>(); + List receivedMessageList = new ArrayList<>(); + for (long insertPosition = 1; insertPosition <= 10 ; insertPosition++) { + long messageId = Message.getRandomMessageId(); + String key = "Key-" + insertPosition; + Message message = new Message(insertPosition, messageId); + Future future = producer.send(new ProducerRecord<>(TOPIC, key, message)); + sentMessageList.add(message); + RecordMetadata metadata = future.get(); + System.out.println("Partition : " + metadata.partition()); + } + + boolean isOrderMaintained = true; + consumer.subscribe(Collections.singletonList(TOPIC)); + ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); + records.forEach(record -> { + Message message = record.value(); + receivedMessageList.add(message); + }); + for (int insertPosition = 0; insertPosition <= receivedMessageList.size() - 1; insertPosition++) { + if (isOrderMaintained){ + Message sentMessage = sentMessageList.get(insertPosition); + Message receivedMessage = receivedMessageList.get(insertPosition); + if (!sentMessage.equals(receivedMessage)) { + isOrderMaintained = false; + } + } + } + assertFalse(isOrderMaintained); + } +} diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionTest.java new file mode 100644 index 0000000000..afffbcc28e --- /dev/null +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionTest.java @@ -0,0 +1,115 @@ +package com.baeldung.kafka.message.ordering; + +import com.baeldung.kafka.message.ordering.payload.Message; +import org.apache.kafka.clients.admin.Admin; +import org.apache.kafka.clients.admin.AdminClientConfig; +import org.apache.kafka.clients.admin.CreateTopicsResult; +import org.apache.kafka.clients.admin.NewTopic; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.clients.producer.RecordMetadata; +import org.apache.kafka.common.KafkaFuture; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.testcontainers.containers.KafkaContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; +import java.time.Duration; +import java.util.*; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; +import static org.junit.jupiter.api.Assertions.assertTrue; + +@Testcontainers +public class SinglePartitionTest { + private static String TOPIC = "single_partition_topic"; + private static int PARTITIONS = 1; + private static short REPLICATION_FACTOR = 1; + private static Admin admin; + private static KafkaProducer producer; + private static KafkaConsumer consumer; + + private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofMillis(5000); + + @Container + private static final KafkaContainer KAFKA_CONTAINER = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")); + + @BeforeAll + static void setup() throws ExecutionException, InterruptedException { + KAFKA_CONTAINER.addExposedPort(9092); + + Properties adminProperties = new Properties(); + adminProperties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers()); + + Properties producerProperties = new Properties(); + producerProperties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers()); + producerProperties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + producerProperties.put("value.serializer", "com.baeldung.kafka.message.ordering.serialization.JacksonSerializer"); + + Properties consumerProperties = new Properties(); + consumerProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers()); + consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); + consumerProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer"); + consumerProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + consumerProperties.put("value.deserializer.serializedClass", Message.class); + consumerProperties.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); + admin = Admin.create(adminProperties); + producer = new KafkaProducer<>(producerProperties); + consumer = new KafkaConsumer<>(consumerProperties); + List topicList = new ArrayList<>(); + NewTopic newTopic = new NewTopic(TOPIC, PARTITIONS, REPLICATION_FACTOR); + topicList.add(newTopic); + CreateTopicsResult result = admin.createTopics(topicList); + KafkaFuture future = result.values().get(TOPIC); + future.whenComplete((voidResult, exception) -> { + if (exception != null) { + System.err.println("Error creating the topic: " + exception.getMessage()); + } else { + System.out.println("Topic created successfully!"); + } + }).get(); + } + + @AfterAll + static void destroy() { + KAFKA_CONTAINER.stop(); + } + + @Test + void givenASinglePartition_whenPublishedToKafkaAndConsumed_thenCheckForMessageOrder() throws ExecutionException, InterruptedException { + List sentMessageList = new ArrayList<>(); + List receivedMessageList = new ArrayList<>(); + for (long insertPosition = 1; insertPosition <= 10 ; insertPosition++) { + long messageId = Message.getRandomMessageId(); + String key = "Key-" + insertPosition; + Message message = new Message(insertPosition, messageId); + ProducerRecord producerRecord = new ProducerRecord<>(TOPIC, key, message); + Future future = producer.send(producerRecord); + sentMessageList.add(message); + RecordMetadata metadata = future.get(); + System.out.println("Partition : " + metadata.partition()); + } + + consumer.subscribe(Collections.singletonList(TOPIC)); + ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); + records.forEach(record -> { + Message message = record.value(); + receivedMessageList.add(message); + }); + boolean result = true; + for (int count = 0; count <= 9 ; count++) { + Message sentMessage = sentMessageList.get(count); + Message receivedMessage = receivedMessageList.get(count); + if (!sentMessage.equals(receivedMessage) && result){ + result = false; + } + } + assertTrue(result); + } +} From c4aac70277435d53363e2e99f8a26de8d2729585 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 15:46:35 -0400 Subject: [PATCH 03/63] User Constants instead od repeatative strings --- .../baeldung/kafka/message/ordering/Config.java | 5 +++++ .../message/ordering/MultiplePartitionTest.java | 14 +++++++++----- .../message/ordering/SinglePartitionTest.java | 14 +++++++++----- 3 files changed, 23 insertions(+), 10 deletions(-) create mode 100644 apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/Config.java diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/Config.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/Config.java new file mode 100644 index 0000000000..2635e72431 --- /dev/null +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/Config.java @@ -0,0 +1,5 @@ +package com.baeldung.kafka.message.ordering; + +public class Config { + public static final String CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS = "value.deserializer.serializedClass"; +} diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionTest.java index 586c328f79..5b68544f95 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionTest.java @@ -1,6 +1,8 @@ package com.baeldung.kafka.message.ordering; import com.baeldung.kafka.message.ordering.payload.Message; +import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer; +import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; import lombok.var; import org.apache.kafka.clients.admin.*; import org.apache.kafka.clients.consumer.ConsumerConfig; @@ -12,6 +14,8 @@ import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; import org.apache.kafka.common.KafkaFuture; import org.apache.kafka.common.PartitionInfo; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -47,15 +51,15 @@ public class MultiplePartitionTest { Properties producerProperties = new Properties(); producerProperties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers()); - producerProperties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); - producerProperties.put("value.serializer", "com.baeldung.kafka.message.ordering.serialization.JacksonSerializer"); + producerProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + producerProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName()); Properties consumerProperties = new Properties(); consumerProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers()); - consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); - consumerProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer"); + consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + consumerProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName()); consumerProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); - consumerProperties.put("value.deserializer.serializedClass", Message.class); + consumerProperties.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, Message.class); consumerProperties.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); admin = Admin.create(adminProperties); producer = new KafkaProducer<>(producerProperties); diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionTest.java index afffbcc28e..807e21bfa8 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionTest.java @@ -1,6 +1,8 @@ package com.baeldung.kafka.message.ordering; import com.baeldung.kafka.message.ordering.payload.Message; +import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer; +import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; import org.apache.kafka.clients.admin.Admin; import org.apache.kafka.clients.admin.AdminClientConfig; import org.apache.kafka.clients.admin.CreateTopicsResult; @@ -13,6 +15,8 @@ import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; import org.apache.kafka.common.KafkaFuture; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -49,15 +53,15 @@ public class SinglePartitionTest { Properties producerProperties = new Properties(); producerProperties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers()); - producerProperties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); - producerProperties.put("value.serializer", "com.baeldung.kafka.message.ordering.serialization.JacksonSerializer"); + producerProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + producerProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName()); Properties consumerProperties = new Properties(); consumerProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers()); - consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); - consumerProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer"); + consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + consumerProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName()); consumerProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); - consumerProperties.put("value.deserializer.serializedClass", Message.class); + consumerProperties.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, Message.class); consumerProperties.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); admin = Admin.create(adminProperties); producer = new KafkaProducer<>(producerProperties); From 1d1aaeedb6eea4201a8a92eade16f2ec62ef588c Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 15:57:42 -0400 Subject: [PATCH 04/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java Co-authored-by: Liam Williams --- .../baeldung/kafka/message/ordering/ProducerConfigurations.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java index bcdf6ceb32..60ae5fc2f8 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java @@ -13,7 +13,7 @@ public class ProducerConfigurations { props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("max.in.flight.requests.per.connection", "1"); props.put("batch.size", "16384"); - props.put("linger.ms", "5"); + props.put(ProducerConfig.LINGER_MS_CONFIG, "5"); KafkaProducer producer = new KafkaProducer<>(props); for (int i = 0; i < 10; i++) { From f8ea1db5ccd727c2e06912a04eac3b9de4088341 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 15:58:27 -0400 Subject: [PATCH 05/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java Co-authored-by: Liam Williams --- .../baeldung/kafka/message/ordering/ProducerConfigurations.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java index 60ae5fc2f8..566605973c 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java @@ -12,7 +12,7 @@ public class ProducerConfigurations { props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("max.in.flight.requests.per.connection", "1"); - props.put("batch.size", "16384"); + props.put(ProducerConfig.BATCH_SIZE_CONFIG, "16384"); props.put(ProducerConfig.LINGER_MS_CONFIG, "5"); KafkaProducer producer = new KafkaProducer<>(props); From 463e45d025682fbd4eecbb73165c460ef9f7ea03 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 15:58:33 -0400 Subject: [PATCH 06/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java Co-authored-by: Liam Williams --- .../baeldung/kafka/message/ordering/ProducerConfigurations.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java index 566605973c..c9cef04bbc 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java @@ -11,7 +11,7 @@ public class ProducerConfigurations { props.put("bootstrap.servers", "localhost:9092"); props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); - props.put("max.in.flight.requests.per.connection", "1"); + props.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, "1"); props.put(ProducerConfig.BATCH_SIZE_CONFIG, "16384"); props.put(ProducerConfig.LINGER_MS_CONFIG, "5"); KafkaProducer producer = new KafkaProducer<>(props); From 836c977fdea79eda47f2e5989709d73f40e5f593 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 15:58:43 -0400 Subject: [PATCH 07/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java Co-authored-by: Liam Williams --- .../baeldung/kafka/message/ordering/ProducerConfigurations.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java index c9cef04bbc..81d91693fe 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java @@ -8,7 +8,7 @@ import java.util.Properties; public class ProducerConfigurations { public static void main(String[] args) { Properties props = new Properties(); - props.put("bootstrap.servers", "localhost:9092"); + props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, "1"); From 540da8c7795ac450793347b927e6c6a800c06c25 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 15:58:57 -0400 Subject: [PATCH 08/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonSerializer.java Co-authored-by: Liam Williams --- .../message/ordering/serialization/JacksonSerializer.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonSerializer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonSerializer.java index fa9d25dd85..2d7432cc7b 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonSerializer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonSerializer.java @@ -3,6 +3,10 @@ package com.baeldung.kafka.message.ordering.serialization; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.kafka.common.serialization.Serializer; +/** + * Configured via {@link org.apache.kafka.clients.producer.ProducerConfig#VALUE_SERIALIZER_CLASS_CONFIG} + */ +@SuppressWarnings("unused") public class JacksonSerializer implements Serializer { private final ObjectMapper objectMapper = new ObjectMapper(); From c3af2facae6cdc5b3a27607eec2d079f1a670d9f Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 15:59:07 -0400 Subject: [PATCH 09/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java Co-authored-by: Liam Williams --- .../message/ordering/serialization/JacksonDeserializer.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java index 34aa181fcb..f3c8aaf60f 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java @@ -4,6 +4,10 @@ import org.apache.kafka.common.serialization.Deserializer; import java.util.Map; +/** + * Configured via {@link org.apache.kafka.clients.consumer.ConsumerConfig#VALUE_DESERIALIZER_CLASS_CONFIG} + */ +@SuppressWarnings("unused") public class JacksonDeserializer implements Deserializer { private final ObjectMapper objectMapper = new ObjectMapper(); private Class tClass; From d31cf69b4d5d2ca8d88cbebf6aee72b84dba96b3 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 15:59:38 -0400 Subject: [PATCH 10/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java Co-authored-by: Liam Williams --- .../baeldung/kafka/message/ordering/MultiPartitionProducer.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java index 8b2a49b2b5..0fd20c2ab1 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java @@ -11,7 +11,7 @@ public class MultiPartitionProducer { Properties props = new Properties(); props.put("bootstrap.servers", "localhost:9092"); props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); - props.put("value.serializer", "com.baeldung.kafka.message.ordering.serialization.JacksonSerializer"); + props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "com.baeldung.kafka.message.ordering.serialization.JacksonSerializer"); KafkaProducer producer = new KafkaProducer<>(props); for (long insertPosition = 1; insertPosition <= 10 ; insertPosition++) { From 857da48e742306babf0579436aa2baf0c3ea5a0c Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 16:00:04 -0400 Subject: [PATCH 11/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java Co-authored-by: Liam Williams --- .../baeldung/kafka/message/ordering/ProducerConfigurations.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java index 81d91693fe..ae3f2839c8 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java @@ -10,7 +10,7 @@ public class ProducerConfigurations { Properties props = new Properties(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); - props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); props.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, "1"); props.put(ProducerConfig.BATCH_SIZE_CONFIG, "16384"); props.put(ProducerConfig.LINGER_MS_CONFIG, "5"); From e35327cade00d1be3bedc513e416dc9de28f177a Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 16:00:49 -0400 Subject: [PATCH 12/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java Co-authored-by: Liam Williams --- .../baeldung/kafka/message/ordering/MultiPartitionProducer.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java index 0fd20c2ab1..04ffdd3336 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java @@ -10,7 +10,7 @@ public class MultiPartitionProducer { public static void main(String[] args) { Properties props = new Properties(); props.put("bootstrap.servers", "localhost:9092"); - props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "com.baeldung.kafka.message.ordering.serialization.JacksonSerializer"); KafkaProducer producer = new KafkaProducer<>(props); From 2dd76f7e78bad11206916195f15a81c0f79f9aad Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 16:01:50 -0400 Subject: [PATCH 13/63] Review comment incorporation --- .../kafka/message/ordering/ProducerConfigurations.java | 1 + .../kafka/message/ordering/payload/Message.java | 3 ++- .../ordering/serialization/JacksonDeserializer.java | 10 +++++----- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java index 60ae5fc2f8..2c885b7caa 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java @@ -1,6 +1,7 @@ package com.baeldung.kafka.message.ordering; import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; import java.util.Properties; diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java index 317aec699e..095aeef89a 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java @@ -2,6 +2,7 @@ package com.baeldung.kafka.message.ordering.payload; import javax.swing.*; import java.util.Random; +import java.util.concurrent.ThreadLocalRandom; public class Message implements Comparable { private long insertPosition; @@ -44,7 +45,7 @@ public class Message implements Comparable { public static long getRandomMessageId() { Random rand = new Random(); - return rand.nextInt(1000); + return ThreadLocalRandom.current().nextInt(1000); } } diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java index 34aa181fcb..cb0b77e4c0 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java @@ -6,10 +6,10 @@ import java.util.Map; public class JacksonDeserializer implements Deserializer { private final ObjectMapper objectMapper = new ObjectMapper(); - private Class tClass; + private Class type; - public JacksonDeserializer(Class tClass) { - this.tClass = tClass; + public JacksonDeserializer(Class type) { + this.type = type; } public JacksonDeserializer() { @@ -18,7 +18,7 @@ public class JacksonDeserializer implements Deserializer { @Override public void configure(Map configs, boolean isKey) { - this.tClass = (Class) configs.get("value.deserializer.serializedClass"); + this.type = (Class) configs.get("value.deserializer.serializedClass"); } @Override @@ -27,7 +27,7 @@ public class JacksonDeserializer implements Deserializer { return null; } try { - return objectMapper.readValue(bytes, tClass); + return objectMapper.readValue(bytes, type); } catch (Exception e) { throw new RuntimeException("Error deserializing value", e); } From 51cf5cee5fe1ec871163291a4e97071c024dcb1d Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 16:02:15 -0400 Subject: [PATCH 14/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java Co-authored-by: Liam Williams --- .../baeldung/kafka/message/ordering/MultiPartitionProducer.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java index 04ffdd3336..b565941fa5 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java @@ -9,7 +9,7 @@ import java.util.Properties; public class MultiPartitionProducer { public static void main(String[] args) { Properties props = new Properties(); - props.put("bootstrap.servers", "localhost:9092"); + props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "com.baeldung.kafka.message.ordering.serialization.JacksonSerializer"); From 9eed20e483f00dde53393c3d0ae541a107a7696c Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 16:03:11 -0400 Subject: [PATCH 15/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java Co-authored-by: Liam Williams --- .../kafka/message/ordering/ExtSeqWithTimeWindowProducer.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java index 91c5af716f..59bfb397ec 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java @@ -13,7 +13,7 @@ public class ExtSeqWithTimeWindowProducer { Properties props = new Properties(); props.put("bootstrap.servers", "localhost:9092"); props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); - props.put("value.serializer", "com.baeldung.kafka.message.ordering.serialization.JacksonSerializer"); + props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "com.baeldung.kafka.message.ordering.serialization.JacksonSerializer"); KafkaProducer producer = new KafkaProducer<>(props); for (long insertPosition = 1; insertPosition <= 10 ; insertPosition++) { From 6da6a92a797d02ac0086c6591d949a58fc148f3b Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 16:03:28 -0400 Subject: [PATCH 16/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java Co-authored-by: Liam Williams --- .../kafka/message/ordering/ExtSeqWithTimeWindowProducer.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java index 59bfb397ec..b4b890ffe7 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java @@ -12,7 +12,7 @@ public class ExtSeqWithTimeWindowProducer { public static void main(String[] args) { Properties props = new Properties(); props.put("bootstrap.servers", "localhost:9092"); - props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "com.baeldung.kafka.message.ordering.serialization.JacksonSerializer"); KafkaProducer producer = new KafkaProducer<>(props); From 576b08076f59695d7286afcdf25145630de2ab1a Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 16:03:53 -0400 Subject: [PATCH 17/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java Co-authored-by: Liam Williams --- .../kafka/message/ordering/ExtSeqWithTimeWindowProducer.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java index b4b890ffe7..69f50352ad 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java @@ -11,7 +11,7 @@ import java.util.concurrent.atomic.AtomicLong; public class ExtSeqWithTimeWindowProducer { public static void main(String[] args) { Properties props = new Properties(); - props.put("bootstrap.servers", "localhost:9092"); + props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092") props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "com.baeldung.kafka.message.ordering.serialization.JacksonSerializer"); From 3c716605b90ddf32311a5459aacab1cf41a55566 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 16:04:07 -0400 Subject: [PATCH 18/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java Co-authored-by: Liam Williams --- .../kafka/message/ordering/SinglePartitionProducer.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java index b5366819c5..1610df746c 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java @@ -10,7 +10,7 @@ import java.util.Random; public class SinglePartitionProducer { public static void main(String[] args) { Properties props = new Properties(); - props.put("bootstrap.servers", "localhost:9092"); + props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092") props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("value.serializer", "com.baeldung.kafka.message.ordering.serialization.JacksonSerializer"); From d408db30f209eda244bb505055b09935822e4ad9 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 16:05:27 -0400 Subject: [PATCH 19/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java Co-authored-by: Liam Williams --- .../kafka/message/ordering/SinglePartitionProducer.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java index 1610df746c..03f89b9a55 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java @@ -12,7 +12,7 @@ public class SinglePartitionProducer { Properties props = new Properties(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092") props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); - props.put("value.serializer", "com.baeldung.kafka.message.ordering.serialization.JacksonSerializer"); + props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "com.baeldung.kafka.message.ordering.serialization.JacksonSerializer"); KafkaProducer producer = new KafkaProducer<>(props); for (long insertPosition = 1; insertPosition <= 10 ; insertPosition++) { From f2a69a8bb11a482237230c7261bcebdb1c87af97 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 16:07:42 -0400 Subject: [PATCH 20/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java Co-authored-by: Liam Williams --- .../baeldung/kafka/message/ordering/ConsumerConfigurations.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java index b18db3ef24..8af4e576ce 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java @@ -18,7 +18,7 @@ public class ConsumerConfigurations { props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); props.put("max.poll.records", "500"); props.put("fetch.min.bytes", "1"); - props.put("fetch.max.wait.ms", "500"); + props.put(ConsumerConfig.FETCH_MAX_WAIT_MS_CONFIG, "500"); Consumer consumer = new KafkaConsumer<>(props); consumer.subscribe(Collections.singletonList("multi_partition_topic")); From 89eed31dbd38d7bcc784dfbe99ea13bf1e136788 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 16:08:05 -0400 Subject: [PATCH 21/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java Co-authored-by: Liam Williams --- .../baeldung/kafka/message/ordering/ConsumerConfigurations.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java index 8af4e576ce..9f3d8b9d5c 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java @@ -17,7 +17,7 @@ public class ConsumerConfigurations { props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); props.put("max.poll.records", "500"); - props.put("fetch.min.bytes", "1"); + props.put(ConsumerConfig.FETCH_MIN_BYTES_CONFIG, "1"); props.put(ConsumerConfig.FETCH_MAX_WAIT_MS_CONFIG, "500"); Consumer consumer = new KafkaConsumer<>(props); consumer.subscribe(Collections.singletonList("multi_partition_topic")); From d850d6c30cbad40b5245b58f06ac0173ecfb3546 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 16:08:27 -0400 Subject: [PATCH 22/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java Co-authored-by: Liam Williams --- .../baeldung/kafka/message/ordering/ProducerConfigurations.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java index 5dc917e308..af06fa04de 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java @@ -10,7 +10,7 @@ public class ProducerConfigurations { public static void main(String[] args) { Properties props = new Properties(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); - props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); props.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, "1"); props.put(ProducerConfig.BATCH_SIZE_CONFIG, "16384"); From d85a01e524432d27f146d89609d6935d81aef091 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 16:08:54 -0400 Subject: [PATCH 23/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java Co-authored-by: Liam Williams --- .../kafka/message/ordering/SinglePartitionProducer.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java index 03f89b9a55..869a260da6 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java @@ -11,7 +11,7 @@ public class SinglePartitionProducer { public static void main(String[] args) { Properties props = new Properties(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092") - props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "com.baeldung.kafka.message.ordering.serialization.JacksonSerializer"); KafkaProducer producer = new KafkaProducer<>(props); From 29096686cc7de507162dc4c468a574ce5932a538 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 16:09:36 -0400 Subject: [PATCH 24/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java Co-authored-by: Liam Williams --- .../baeldung/kafka/message/ordering/ConsumerConfigurations.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java index 9f3d8b9d5c..b3faceaa44 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java @@ -16,7 +16,7 @@ public class ConsumerConfigurations { props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); - props.put("max.poll.records", "500"); + props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "500"); props.put(ConsumerConfig.FETCH_MIN_BYTES_CONFIG, "1"); props.put(ConsumerConfig.FETCH_MAX_WAIT_MS_CONFIG, "500"); Consumer consumer = new KafkaConsumer<>(props); From b94d5a1f8a8e2ce146b1d60d96f3ad3319c2976e Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 16:36:33 -0400 Subject: [PATCH 25/63] Incorporated Review Comments --- .../message/ordering/ConsumerConfigurations.java | 8 ++++++-- .../ordering/ExtSeqWithTimeWindowConsumer.java | 12 ++++++------ .../ordering/ExtSeqWithTimeWindowProducer.java | 9 ++++++--- .../message/ordering/MultiPartitionConsumer.java | 12 ++++++++---- .../message/ordering/MultiPartitionProducer.java | 7 +++++-- .../message/ordering/ProducerConfigurations.java | 5 +++-- .../message/ordering/SinglePartitionConsumer.java | 9 ++++++--- .../message/ordering/SinglePartitionProducer.java | 9 ++++++--- .../ordering/serialization/JacksonDeserializer.java | 6 ++++-- 9 files changed, 50 insertions(+), 27 deletions(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java index b3faceaa44..15562cfcef 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java @@ -1,9 +1,13 @@ package com.baeldung.kafka.message.ordering; +import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; import java.time.Duration; import java.util.Collections; @@ -14,8 +18,8 @@ public class ConsumerConfigurations { Properties props = new Properties(); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); - props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); - props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); + props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "500"); props.put(ConsumerConfig.FETCH_MIN_BYTES_CONFIG, "1"); props.put(ConsumerConfig.FETCH_MAX_WAIT_MS_CONFIG, "500"); diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java index 5b01a86e39..f5a0dbd640 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java @@ -1,10 +1,12 @@ package com.baeldung.kafka.message.ordering; import com.baeldung.kafka.message.ordering.payload.Message; +import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer; import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.common.serialization.StringDeserializer; import java.time.Duration; import java.util.*; @@ -17,10 +19,10 @@ public class ExtSeqWithTimeWindowConsumer { Properties props = new Properties(); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); - props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); - props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer"); + props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName()); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); - props.put("value.deserializer.serializedClass", Message.class); + props.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, Message.class); Consumer consumer = new KafkaConsumer<>(props); consumer.subscribe(Collections.singletonList("multi_partition_topic")); List buffer = new ArrayList<>(); @@ -28,9 +30,7 @@ public class ExtSeqWithTimeWindowConsumer { while (true) { ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); records.forEach(record -> { - if (record != null && record.value() != null) { - buffer.add(record.value()); - } + buffer.add(record.value()); }); if (System.nanoTime() - lastProcessedTime > BUFFER_PERIOD_MS) { processBuffer(buffer); diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java index 69f50352ad..110015de25 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java @@ -1,8 +1,11 @@ package com.baeldung.kafka.message.ordering; import com.baeldung.kafka.message.ordering.payload.Message; +import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.serialization.StringSerializer; import java.util.Properties; import java.util.Random; @@ -11,9 +14,9 @@ import java.util.concurrent.atomic.AtomicLong; public class ExtSeqWithTimeWindowProducer { public static void main(String[] args) { Properties props = new Properties(); - props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092") - props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); - props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "com.baeldung.kafka.message.ordering.serialization.JacksonSerializer"); + props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); + props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName()); KafkaProducer producer = new KafkaProducer<>(props); for (long insertPosition = 1; insertPosition <= 10 ; insertPosition++) { diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java index f9b0b3b040..542a664745 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java @@ -1,10 +1,12 @@ package com.baeldung.kafka.message.ordering; import com.baeldung.kafka.message.ordering.payload.Message; +import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer; import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.common.serialization.StringDeserializer; import java.time.Duration; import java.util.Collections; @@ -16,17 +18,19 @@ public class MultiPartitionConsumer { Properties props = new Properties(); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); - props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); - props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer"); + props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName()); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); - props.put("value.deserializer.serializedClass", Message.class); + props.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, Message.class); Consumer consumer = new KafkaConsumer<>(props); consumer.subscribe(Collections.singletonList("multi_partition_topic")); while (true) { ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); records.forEach(record -> { Message message = record.value(); - System.out.println("Process message with Insert Position: " + message.getInsertPosition() + ", Message Id: " + message.getMessageId()); + if (message != null) { + System.out.println("Process message with Insert Position: " + message.getInsertPosition() + ", Message Id: " + message.getMessageId()); + } }); } } diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java index b565941fa5..bf9db58392 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java @@ -1,8 +1,11 @@ package com.baeldung.kafka.message.ordering; import com.baeldung.kafka.message.ordering.payload.Message; +import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.serialization.StringSerializer; import java.util.Properties; @@ -10,8 +13,8 @@ public class MultiPartitionProducer { public static void main(String[] args) { Properties props = new Properties(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); - props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); - props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "com.baeldung.kafka.message.ordering.serialization.JacksonSerializer"); + props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName()); KafkaProducer producer = new KafkaProducer<>(props); for (long insertPosition = 1; insertPosition <= 10 ; insertPosition++) { diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java index af06fa04de..0eb563910e 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java @@ -3,6 +3,7 @@ package com.baeldung.kafka.message.ordering; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.serialization.StringSerializer; import java.util.Properties; @@ -10,8 +11,8 @@ public class ProducerConfigurations { public static void main(String[] args) { Properties props = new Properties(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); - props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); - props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); + props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); props.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, "1"); props.put(ProducerConfig.BATCH_SIZE_CONFIG, "16384"); props.put(ProducerConfig.LINGER_MS_CONFIG, "5"); diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java index 932a29c394..e1a449055e 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java @@ -1,10 +1,13 @@ package com.baeldung.kafka.message.ordering; import com.baeldung.kafka.message.ordering.payload.Message; +import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer; import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; import java.time.Duration; import java.util.Collections; @@ -17,10 +20,10 @@ public class SinglePartitionConsumer { Properties props = new Properties(); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); - props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); - props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer"); + props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName()); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); - props.put("value.deserializer.serializedClass", Message.class); + props.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, Message.class); Consumer consumer = new KafkaConsumer<>(props); consumer.subscribe(Collections.singletonList("single_partition_topic")); while (true) { diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java index 869a260da6..c986089841 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java @@ -1,8 +1,11 @@ package com.baeldung.kafka.message.ordering; import com.baeldung.kafka.message.ordering.payload.Message; +import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.serialization.StringSerializer; import java.util.Properties; import java.util.Random; @@ -10,9 +13,9 @@ import java.util.Random; public class SinglePartitionProducer { public static void main(String[] args) { Properties props = new Properties(); - props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092") - props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); - props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "com.baeldung.kafka.message.ordering.serialization.JacksonSerializer"); + props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); + props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName()); KafkaProducer producer = new KafkaProducer<>(props); for (long insertPosition = 1; insertPosition <= 10 ; insertPosition++) { diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java index 2def07f987..be2b104761 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java @@ -1,4 +1,5 @@ package com.baeldung.kafka.message.ordering.serialization; +import com.baeldung.kafka.message.ordering.Config; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.kafka.common.serialization.Deserializer; @@ -22,7 +23,7 @@ public class JacksonDeserializer implements Deserializer { @Override public void configure(Map configs, boolean isKey) { - this.type = (Class) configs.get("value.deserializer.serializedClass"); + this.type = (Class) configs.get(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS); } @Override @@ -33,8 +34,9 @@ public class JacksonDeserializer implements Deserializer { try { return objectMapper.readValue(bytes, type); } catch (Exception e) { - throw new RuntimeException("Error deserializing value", e); + //throw new RuntimeException("Error deserializing value", e); } + return null; } } From 6d4e6886b311ffe28794033bc555e80b274fe40b Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 18:42:20 -0400 Subject: [PATCH 26/63] Renamed Inser position to patition key --- .../ordering/ExtSeqWithTimeWindowConsumer.java | 2 +- .../ordering/ExtSeqWithTimeWindowProducer.java | 6 ++---- .../message/ordering/MultiPartitionConsumer.java | 2 +- .../message/ordering/MultiPartitionProducer.java | 4 ++-- .../message/ordering/SinglePartitionConsumer.java | 3 +-- .../message/ordering/SinglePartitionProducer.java | 5 ++--- .../kafka/message/ordering/payload/Message.java | 14 +++++++------- .../message/ordering/MultiplePartitionTest.java | 2 +- .../message/ordering/SinglePartitionTest.java | 2 +- 9 files changed, 18 insertions(+), 22 deletions(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java index f5a0dbd640..d342c1a950 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java @@ -42,7 +42,7 @@ public class ExtSeqWithTimeWindowConsumer { private static void processBuffer(List buffer) { Collections.sort(buffer); buffer.forEach(message -> { - System.out.println("Processing message with Insert Position: " + message.getInsertPosition() + ", Message Id: " + message.getMessageId()); + System.out.println("Processing message with Insert Position: " + message.getPartitionKey() + ", Message Id: " + message.getMessageId()); }); buffer.clear(); } diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java index 110015de25..d1480522e5 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java @@ -8,8 +8,6 @@ import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.serialization.StringSerializer; import java.util.Properties; -import java.util.Random; -import java.util.concurrent.atomic.AtomicLong; public class ExtSeqWithTimeWindowProducer { public static void main(String[] args) { @@ -22,9 +20,9 @@ public class ExtSeqWithTimeWindowProducer { for (long insertPosition = 1; insertPosition <= 10 ; insertPosition++) { long messageId = Message.getRandomMessageId(); String key = "Key-" + insertPosition; - Message message = new Message(insertPosition, messageId); + Message message = new Message(key, messageId); producer.send(new ProducerRecord<>("multi_partition_topic", key, message)); - System.out.println("Insert Position: " + message.getInsertPosition() + ", Message Id: " + message.getMessageId()); + System.out.println("Insert Position: " + message.getPartitionKey() + ", Message Id: " + message.getMessageId()); } producer.close(); System.out.println("ExternalSequencingProducer Completed."); diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java index 542a664745..4471070f0f 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java @@ -29,7 +29,7 @@ public class MultiPartitionConsumer { records.forEach(record -> { Message message = record.value(); if (message != null) { - System.out.println("Process message with Insert Position: " + message.getInsertPosition() + ", Message Id: " + message.getMessageId()); + System.out.println("Process message with Insert Position: " + message.getPartitionKey() + ", Message Id: " + message.getMessageId()); } }); } diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java index bf9db58392..04e3dcce0a 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java @@ -20,9 +20,9 @@ public class MultiPartitionProducer { for (long insertPosition = 1; insertPosition <= 10 ; insertPosition++) { long messageId = Message.getRandomMessageId(); String key = "Key-" + insertPosition; - Message message = new Message(insertPosition, messageId); + Message message = new Message(key, messageId); producer.send(new ProducerRecord<>("multi_partition_topic", key, message)); - System.out.println("Insert Position: " + message.getInsertPosition() + ", Message Id: " + message.getMessageId()); + System.out.println("Insert Position: " + message.getPartitionKey() + ", Message Id: " + message.getMessageId()); } producer.close(); System.out.println("SinglePartitionProducer Completed."); diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java index e1a449055e..b47e4ca3b0 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java @@ -7,7 +7,6 @@ import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.common.serialization.StringDeserializer; -import org.apache.kafka.common.serialization.StringSerializer; import java.time.Duration; import java.util.Collections; @@ -30,7 +29,7 @@ public class SinglePartitionConsumer { ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); records.forEach(record -> { Message message = record.value(); - System.out.println("Process message with Insert Position: " + message.getInsertPosition() + ", Message Id: " + message.getMessageId()); + System.out.println("Process message with Insert Position: " + message.getPartitionKey() + ", Message Id: " + message.getMessageId()); }); } } diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java index c986089841..d669a0fd69 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java @@ -8,7 +8,6 @@ import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.serialization.StringSerializer; import java.util.Properties; -import java.util.Random; public class SinglePartitionProducer { public static void main(String[] args) { @@ -21,9 +20,9 @@ public class SinglePartitionProducer { for (long insertPosition = 1; insertPosition <= 10 ; insertPosition++) { long messageId = Message.getRandomMessageId(); String key = "Key-" + insertPosition; - Message message = new Message(insertPosition, messageId); + Message message = new Message(key, messageId); producer.send(new ProducerRecord<>("single_partition_topic", key, message)); - System.out.println("Insert Position: " + message.getInsertPosition() + ", Message Id: " + message.getMessageId()); + System.out.println("Insert Position: " + message.getPartitionKey() + ", Message Id: " + message.getMessageId()); } producer.close(); System.out.println("SinglePartitionProducer Completed."); diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java index 095aeef89a..de1e5135da 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java @@ -1,11 +1,11 @@ package com.baeldung.kafka.message.ordering.payload; -import javax.swing.*; +import java.util.Objects; import java.util.Random; import java.util.concurrent.ThreadLocalRandom; public class Message implements Comparable { - private long insertPosition; + private String partitionKey; private long messageId; public Message(){ @@ -13,13 +13,13 @@ public class Message implements Comparable { } //Required for Kafka Serialization and Deserialization - public Message(long insertPosition, long messageId) { - this.insertPosition = insertPosition; + public Message(String partitionKey, long messageId) { + this.partitionKey = partitionKey; this.messageId = messageId; } - public long getInsertPosition() { - return insertPosition; + public String getPartitionKey() { + return partitionKey; } public long getMessageId() { @@ -40,7 +40,7 @@ public class Message implements Comparable { return false; } Message message = (Message) obj; - return this.messageId == message.getMessageId() && this.insertPosition == message.getInsertPosition(); + return this.messageId == message.getMessageId() && Objects.equals(this.partitionKey, message.getPartitionKey()); } public static long getRandomMessageId() { diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionTest.java index 5b68544f95..aed5f30e9d 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionTest.java @@ -90,7 +90,7 @@ public class MultiplePartitionTest { for (long insertPosition = 1; insertPosition <= 10 ; insertPosition++) { long messageId = Message.getRandomMessageId(); String key = "Key-" + insertPosition; - Message message = new Message(insertPosition, messageId); + Message message = new Message(key, messageId); Future future = producer.send(new ProducerRecord<>(TOPIC, key, message)); sentMessageList.add(message); RecordMetadata metadata = future.get(); diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionTest.java index 807e21bfa8..5751c8d0e0 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionTest.java @@ -92,7 +92,7 @@ public class SinglePartitionTest { for (long insertPosition = 1; insertPosition <= 10 ; insertPosition++) { long messageId = Message.getRandomMessageId(); String key = "Key-" + insertPosition; - Message message = new Message(insertPosition, messageId); + Message message = new Message(key, messageId); ProducerRecord producerRecord = new ProducerRecord<>(TOPIC, key, message); Future future = producer.send(producerRecord); sentMessageList.add(message); From d1d456e59a6ba378c84021b26e09f05a134be131 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 19:19:01 -0400 Subject: [PATCH 27/63] Removed insertPosition, renamed messageId to applicationIdentifier --- .../ExtSeqWithTimeWindowConsumer.java | 9 ++++--- .../ExtSeqWithTimeWindowProducer.java | 18 ++++++------- .../ordering/MultiPartitionConsumer.java | 5 ++-- .../ordering/MultiPartitionProducer.java | 18 ++++++------- .../ordering/SinglePartitionConsumer.java | 9 ++++--- .../ordering/SinglePartitionProducer.java | 18 ++++++------- .../message/ordering/payload/Message.java | 20 +++++++-------- .../ordering/MultiplePartitionTest.java | 25 ++++++++----------- .../message/ordering/SinglePartitionTest.java | 23 ++++++++--------- 9 files changed, 69 insertions(+), 76 deletions(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java index d342c1a950..19595d9e95 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java @@ -6,20 +6,21 @@ import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.common.serialization.LongDeserializer; import org.apache.kafka.common.serialization.StringDeserializer; import java.time.Duration; import java.util.*; public class ExtSeqWithTimeWindowConsumer { - private static final long BUFFER_PERIOD_MS = 5000; + private static final long BUFFER_PERIOD_NS = 5000L * 1000000; // 5000 milliseconds converted to nanoseconds private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofMillis(100); public static void main(String[] args) { Properties props = new Properties(); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); - props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName()); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName()); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, Message.class); @@ -32,7 +33,7 @@ public class ExtSeqWithTimeWindowConsumer { records.forEach(record -> { buffer.add(record.value()); }); - if (System.nanoTime() - lastProcessedTime > BUFFER_PERIOD_MS) { + if (System.nanoTime() - lastProcessedTime > BUFFER_PERIOD_NS) { processBuffer(buffer); lastProcessedTime = System.nanoTime(); } @@ -42,7 +43,7 @@ public class ExtSeqWithTimeWindowConsumer { private static void processBuffer(List buffer) { Collections.sort(buffer); buffer.forEach(message -> { - System.out.println("Processing message with Insert Position: " + message.getPartitionKey() + ", Message Id: " + message.getMessageId()); + System.out.println("Processing message with Partition key: " + message.getPartitionKey() + ", Application Identifier: " + message.getApplicationIdentifier()); }); buffer.clear(); } diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java index d1480522e5..a20c569159 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java @@ -5,7 +5,7 @@ import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; -import org.apache.kafka.common.serialization.StringSerializer; +import org.apache.kafka.common.serialization.LongSerializer; import java.util.Properties; @@ -13,16 +13,14 @@ public class ExtSeqWithTimeWindowProducer { public static void main(String[] args) { Properties props = new Properties(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); - props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName()); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName()); - - KafkaProducer producer = new KafkaProducer<>(props); - for (long insertPosition = 1; insertPosition <= 10 ; insertPosition++) { - long messageId = Message.getRandomMessageId(); - String key = "Key-" + insertPosition; - Message message = new Message(key, messageId); - producer.send(new ProducerRecord<>("multi_partition_topic", key, message)); - System.out.println("Insert Position: " + message.getPartitionKey() + ", Message Id: " + message.getMessageId()); + KafkaProducer producer = new KafkaProducer<>(props); + for (long partitionKey = 1; partitionKey <= 10 ; partitionKey++) { + long applicationIdentifier = Message.getRandomApplicationIdentifier(); + Message message = new Message(partitionKey, applicationIdentifier); + producer.send(new ProducerRecord<>("multi_partition_topic", partitionKey, message)); + System.out.println("Partition key: " + message.getPartitionKey() + ", Application Identifier: " + message.getApplicationIdentifier()); } producer.close(); System.out.println("ExternalSequencingProducer Completed."); diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java index 4471070f0f..c37c645d31 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java @@ -6,6 +6,7 @@ import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.common.serialization.LongDeserializer; import org.apache.kafka.common.serialization.StringDeserializer; import java.time.Duration; @@ -18,7 +19,7 @@ public class MultiPartitionConsumer { Properties props = new Properties(); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); - props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName()); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName()); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, Message.class); @@ -29,7 +30,7 @@ public class MultiPartitionConsumer { records.forEach(record -> { Message message = record.value(); if (message != null) { - System.out.println("Process message with Insert Position: " + message.getPartitionKey() + ", Message Id: " + message.getMessageId()); + System.out.println("Process message with Partition key: " + message.getPartitionKey() + ", Application Identifier: " + message.getApplicationIdentifier()); } }); } diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java index 04e3dcce0a..81cc5c6af0 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java @@ -5,7 +5,7 @@ import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; -import org.apache.kafka.common.serialization.StringSerializer; +import org.apache.kafka.common.serialization.LongSerializer; import java.util.Properties; @@ -13,16 +13,14 @@ public class MultiPartitionProducer { public static void main(String[] args) { Properties props = new Properties(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); - props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName()); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName()); - - KafkaProducer producer = new KafkaProducer<>(props); - for (long insertPosition = 1; insertPosition <= 10 ; insertPosition++) { - long messageId = Message.getRandomMessageId(); - String key = "Key-" + insertPosition; - Message message = new Message(key, messageId); - producer.send(new ProducerRecord<>("multi_partition_topic", key, message)); - System.out.println("Insert Position: " + message.getPartitionKey() + ", Message Id: " + message.getMessageId()); + KafkaProducer producer = new KafkaProducer<>(props); + for (long partitionKey = 1; partitionKey <= 10 ; partitionKey++) { + long applicationIdentifier = Message.getRandomApplicationIdentifier(); + Message message = new Message(partitionKey, applicationIdentifier); + producer.send(new ProducerRecord<>("multi_partition_topic", partitionKey, message)); + System.out.println("Partition Key: " + message.getPartitionKey() + ", Application Identifier: " + message.getApplicationIdentifier()); } producer.close(); System.out.println("SinglePartitionProducer Completed."); diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java index b47e4ca3b0..9f44cd78c6 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java @@ -6,6 +6,7 @@ import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.common.serialization.LongDeserializer; import org.apache.kafka.common.serialization.StringDeserializer; import java.time.Duration; @@ -19,17 +20,17 @@ public class SinglePartitionConsumer { Properties props = new Properties(); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); - props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName()); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName()); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, Message.class); - Consumer consumer = new KafkaConsumer<>(props); + Consumer consumer = new KafkaConsumer<>(props); consumer.subscribe(Collections.singletonList("single_partition_topic")); while (true) { - ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); + ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); records.forEach(record -> { Message message = record.value(); - System.out.println("Process message with Insert Position: " + message.getPartitionKey() + ", Message Id: " + message.getMessageId()); + System.out.println("Process message with Partition Key: " + message.getPartitionKey() + ", Application Identifier: " + message.getApplicationIdentifier()); }); } } diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java index d669a0fd69..efa6e5b93d 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java @@ -5,7 +5,7 @@ import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; -import org.apache.kafka.common.serialization.StringSerializer; +import org.apache.kafka.common.serialization.LongSerializer; import java.util.Properties; @@ -13,16 +13,14 @@ public class SinglePartitionProducer { public static void main(String[] args) { Properties props = new Properties(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); - props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName()); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName()); - - KafkaProducer producer = new KafkaProducer<>(props); - for (long insertPosition = 1; insertPosition <= 10 ; insertPosition++) { - long messageId = Message.getRandomMessageId(); - String key = "Key-" + insertPosition; - Message message = new Message(key, messageId); - producer.send(new ProducerRecord<>("single_partition_topic", key, message)); - System.out.println("Insert Position: " + message.getPartitionKey() + ", Message Id: " + message.getMessageId()); + KafkaProducer producer = new KafkaProducer<>(props); + for (long partitionKey = 1; partitionKey <= 10 ; partitionKey++) { + long applicationIdentifier = Message.getRandomApplicationIdentifier(); + Message message = new Message(partitionKey, applicationIdentifier); + producer.send(new ProducerRecord<>("single_partition_topic", partitionKey, message)); + System.out.println("Partition key: " + message.getPartitionKey() + ", Application Identifier: " + message.getApplicationIdentifier()); } producer.close(); System.out.println("SinglePartitionProducer Completed."); diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java index de1e5135da..734ecba53d 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java @@ -5,30 +5,30 @@ import java.util.Random; import java.util.concurrent.ThreadLocalRandom; public class Message implements Comparable { - private String partitionKey; - private long messageId; + private long partitionKey; + private long applicationIdentifier; public Message(){ } //Required for Kafka Serialization and Deserialization - public Message(String partitionKey, long messageId) { + public Message(long partitionKey, long applicationIdentifier) { this.partitionKey = partitionKey; - this.messageId = messageId; + this.applicationIdentifier = applicationIdentifier; } - public String getPartitionKey() { + public long getPartitionKey() { return partitionKey; } - public long getMessageId() { - return messageId; + public long getApplicationIdentifier() { + return applicationIdentifier; } @Override public int compareTo(Message other) { - return Long.compare(this.messageId, other.messageId); + return Long.compare(this.partitionKey, other.partitionKey); } @Override @@ -40,10 +40,10 @@ public class Message implements Comparable { return false; } Message message = (Message) obj; - return this.messageId == message.getMessageId() && Objects.equals(this.partitionKey, message.getPartitionKey()); + return this.applicationIdentifier == message.getApplicationIdentifier() && Objects.equals(this.partitionKey, message.getPartitionKey()); } - public static long getRandomMessageId() { + public static long getRandomApplicationIdentifier() { Random rand = new Random(); return ThreadLocalRandom.current().nextInt(1000); } diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionTest.java index aed5f30e9d..ef0a881999 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionTest.java @@ -3,7 +3,6 @@ package com.baeldung.kafka.message.ordering; import com.baeldung.kafka.message.ordering.payload.Message; import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer; import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; -import lombok.var; import org.apache.kafka.clients.admin.*; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecords; @@ -13,9 +12,8 @@ import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; import org.apache.kafka.common.KafkaFuture; -import org.apache.kafka.common.PartitionInfo; -import org.apache.kafka.common.serialization.StringDeserializer; -import org.apache.kafka.common.serialization.StringSerializer; +import org.apache.kafka.common.serialization.LongDeserializer; +import org.apache.kafka.common.serialization.LongSerializer; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -36,8 +34,8 @@ public class MultiplePartitionTest { private static int PARTITIONS = 5; private static short REPLICATION_FACTOR = 1; private static Admin admin; - private static KafkaProducer producer; - private static KafkaConsumer consumer; + private static KafkaProducer producer; + private static KafkaConsumer consumer; private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofMillis(5000); @Container private static final KafkaContainer KAFKA_CONTAINER = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")); @@ -51,12 +49,12 @@ public class MultiplePartitionTest { Properties producerProperties = new Properties(); producerProperties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers()); - producerProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + producerProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName()); producerProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName()); Properties consumerProperties = new Properties(); consumerProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers()); - consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName()); consumerProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName()); consumerProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); consumerProperties.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, Message.class); @@ -87,11 +85,10 @@ public class MultiplePartitionTest { void givenMultiplePartitions_whenPublishedToKafkaAndConsumed_thenCheckForMessageOrder() throws ExecutionException, InterruptedException { List sentMessageList = new ArrayList<>(); List receivedMessageList = new ArrayList<>(); - for (long insertPosition = 1; insertPosition <= 10 ; insertPosition++) { - long messageId = Message.getRandomMessageId(); - String key = "Key-" + insertPosition; - Message message = new Message(key, messageId); - Future future = producer.send(new ProducerRecord<>(TOPIC, key, message)); + for (long partitionKey = 1; partitionKey <= 10 ; partitionKey++) { + long applicationIdentifier = Message.getRandomApplicationIdentifier(); + Message message = new Message(partitionKey, applicationIdentifier); + Future future = producer.send(new ProducerRecord<>(TOPIC, partitionKey, message)); sentMessageList.add(message); RecordMetadata metadata = future.get(); System.out.println("Partition : " + metadata.partition()); @@ -99,7 +96,7 @@ public class MultiplePartitionTest { boolean isOrderMaintained = true; consumer.subscribe(Collections.singletonList(TOPIC)); - ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); + ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); records.forEach(record -> { Message message = record.value(); receivedMessageList.add(message); diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionTest.java index 5751c8d0e0..350a28e7c1 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionTest.java @@ -15,8 +15,8 @@ import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; import org.apache.kafka.common.KafkaFuture; -import org.apache.kafka.common.serialization.StringDeserializer; -import org.apache.kafka.common.serialization.StringSerializer; +import org.apache.kafka.common.serialization.LongDeserializer; +import org.apache.kafka.common.serialization.LongSerializer; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -36,8 +36,8 @@ public class SinglePartitionTest { private static int PARTITIONS = 1; private static short REPLICATION_FACTOR = 1; private static Admin admin; - private static KafkaProducer producer; - private static KafkaConsumer consumer; + private static KafkaProducer producer; + private static KafkaConsumer consumer; private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofMillis(5000); @@ -53,12 +53,12 @@ public class SinglePartitionTest { Properties producerProperties = new Properties(); producerProperties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers()); - producerProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + producerProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName()); producerProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName()); Properties consumerProperties = new Properties(); consumerProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers()); - consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName()); consumerProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName()); consumerProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); consumerProperties.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, Message.class); @@ -89,11 +89,10 @@ public class SinglePartitionTest { void givenASinglePartition_whenPublishedToKafkaAndConsumed_thenCheckForMessageOrder() throws ExecutionException, InterruptedException { List sentMessageList = new ArrayList<>(); List receivedMessageList = new ArrayList<>(); - for (long insertPosition = 1; insertPosition <= 10 ; insertPosition++) { - long messageId = Message.getRandomMessageId(); - String key = "Key-" + insertPosition; - Message message = new Message(key, messageId); - ProducerRecord producerRecord = new ProducerRecord<>(TOPIC, key, message); + for (long partitionKey = 1; partitionKey <= 10 ; partitionKey++) { + long applicationIdentifier = Message.getRandomApplicationIdentifier(); + Message message = new Message(partitionKey, applicationIdentifier); + ProducerRecord producerRecord = new ProducerRecord<>(TOPIC, partitionKey, message); Future future = producer.send(producerRecord); sentMessageList.add(message); RecordMetadata metadata = future.get(); @@ -101,7 +100,7 @@ public class SinglePartitionTest { } consumer.subscribe(Collections.singletonList(TOPIC)); - ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); + ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); records.forEach(record -> { Message message = record.value(); receivedMessageList.add(message); From 7c40b82bf9fb27130ff0c8b97a0092616054fe7d Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 15 Oct 2023 19:30:29 -0400 Subject: [PATCH 28/63] Build Failure - Priority:3 Unit test class names need to end in UnitTest, integration tests with IntegrationTest, etc. --- ...PartitionTest.java => MultiplePartitionIntegrationTest.java} | 2 +- ...lePartitionTest.java => SinglePartitionIntegrationTest.java} | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) rename apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/{MultiplePartitionTest.java => MultiplePartitionIntegrationTest.java} (99%) rename apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/{SinglePartitionTest.java => SinglePartitionIntegrationTest.java} (99%) diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java similarity index 99% rename from apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionTest.java rename to apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java index ef0a881999..c948effd70 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java @@ -29,7 +29,7 @@ import java.util.concurrent.Future; import static org.junit.jupiter.api.Assertions.*; @Testcontainers -public class MultiplePartitionTest { +public class MultiplePartitionIntegrationTest { private static String TOPIC = "multi_partition_topic"; private static int PARTITIONS = 5; private static short REPLICATION_FACTOR = 1; diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java similarity index 99% rename from apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionTest.java rename to apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java index 350a28e7c1..87b7d07431 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java @@ -31,7 +31,7 @@ import java.util.concurrent.Future; import static org.junit.jupiter.api.Assertions.assertTrue; @Testcontainers -public class SinglePartitionTest { +public class SinglePartitionIntegrationTest { private static String TOPIC = "single_partition_topic"; private static int PARTITIONS = 1; private static short REPLICATION_FACTOR = 1; From baaef6bcf3599c79d90cf8e6fc9f96df59c47e4d Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Mon, 16 Oct 2023 19:50:02 -0400 Subject: [PATCH 29/63] Added tests for External Sequence number with Time Window --- .../ExtSeqWithTimeWindowConsumer.java | 6 +- .../ExtSeqWithTimeWindowProducer.java | 1 + .../message/ordering/payload/Message.java | 12 +- .../ExtSeqWithTimeWindowIntegrationTest.java | 138 ++++++++++++++++++ 4 files changed, 153 insertions(+), 4 deletions(-) create mode 100644 apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java index 19595d9e95..cd424178ae 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java @@ -24,12 +24,12 @@ public class ExtSeqWithTimeWindowConsumer { props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName()); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, Message.class); - Consumer consumer = new KafkaConsumer<>(props); + Consumer consumer = new KafkaConsumer<>(props); consumer.subscribe(Collections.singletonList("multi_partition_topic")); List buffer = new ArrayList<>(); long lastProcessedTime = System.nanoTime(); while (true) { - ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); + ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); records.forEach(record -> { buffer.add(record.value()); }); @@ -43,7 +43,7 @@ public class ExtSeqWithTimeWindowConsumer { private static void processBuffer(List buffer) { Collections.sort(buffer); buffer.forEach(message -> { - System.out.println("Processing message with Partition key: " + message.getPartitionKey() + ", Application Identifier: " + message.getApplicationIdentifier()); + System.out.println("Processing message with Global Sequence number: " + message.getPartitionKey() + ", Application Identifier: " + message.getApplicationIdentifier()); }); buffer.clear(); } diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java index a20c569159..99e05990cb 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java @@ -19,6 +19,7 @@ public class ExtSeqWithTimeWindowProducer { for (long partitionKey = 1; partitionKey <= 10 ; partitionKey++) { long applicationIdentifier = Message.getRandomApplicationIdentifier(); Message message = new Message(partitionKey, applicationIdentifier); + message.setGlobalSequenceNumber(partitionKey); producer.send(new ProducerRecord<>("multi_partition_topic", partitionKey, message)); System.out.println("Partition key: " + message.getPartitionKey() + ", Application Identifier: " + message.getApplicationIdentifier()); } diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java index 734ecba53d..694d84f9d8 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java @@ -8,6 +8,8 @@ public class Message implements Comparable { private long partitionKey; private long applicationIdentifier; + private long globalSequenceNumber; + public Message(){ } @@ -26,9 +28,17 @@ public class Message implements Comparable { return applicationIdentifier; } + public long getGlobalSequenceNumber() { + return globalSequenceNumber; + } + + public void setGlobalSequenceNumber(long globalSequenceNumber) { + this.globalSequenceNumber = globalSequenceNumber; + } + @Override public int compareTo(Message other) { - return Long.compare(this.partitionKey, other.partitionKey); + return Long.compare(this.globalSequenceNumber, other.globalSequenceNumber); } @Override diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java new file mode 100644 index 0000000000..a01c230026 --- /dev/null +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java @@ -0,0 +1,138 @@ +package com.baeldung.kafka.message.ordering; + +import com.baeldung.kafka.message.ordering.payload.Message; +import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer; +import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; +import org.apache.kafka.clients.admin.*; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.clients.producer.RecordMetadata; +import org.apache.kafka.common.KafkaFuture; +import org.apache.kafka.common.serialization.LongDeserializer; +import org.apache.kafka.common.serialization.LongSerializer; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.testcontainers.containers.KafkaContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; +import java.time.Duration; +import java.util.*; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; + +import static org.junit.jupiter.api.Assertions.*; + +@Testcontainers +public class ExtSeqWithTimeWindowIntegrationTest { + private static String TOPIC = "multi_partition_topic"; + private static int PARTITIONS = 5; + private static short REPLICATION_FACTOR = 1; + private static Admin admin; + private static KafkaProducer producer; + private static KafkaConsumer consumer; + private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofMillis(5000); + + private static final long BUFFER_PERIOD_NS = 5000L * 1000000; // 5000 milliseconds converted to nanoseconds + @Container + private static final KafkaContainer KAFKA_CONTAINER = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")); + + @BeforeAll + static void setup() throws ExecutionException, InterruptedException { + KAFKA_CONTAINER.addExposedPort(9092); + + Properties adminProperties = new Properties(); + adminProperties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers()); + + Properties producerProperties = new Properties(); + producerProperties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers()); + producerProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName()); + producerProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName()); + + Properties consumerProperties = new Properties(); + consumerProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers()); + consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName()); + consumerProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName()); + consumerProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + consumerProperties.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, Message.class); + consumerProperties.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); + admin = Admin.create(adminProperties); + producer = new KafkaProducer<>(producerProperties); + consumer = new KafkaConsumer<>(consumerProperties); + List topicList = new ArrayList<>(); + NewTopic newTopic = new NewTopic(TOPIC, PARTITIONS, REPLICATION_FACTOR); + topicList.add(newTopic); + CreateTopicsResult result = admin.createTopics(topicList); + KafkaFuture future = result.values().get(TOPIC); + future.whenComplete((voidResult, exception) -> { + if (exception != null) { + System.err.println("Error creating the topic: " + exception.getMessage()); + } else { + System.out.println("Topic created successfully!"); + } + }).get(); + } + + @AfterAll + static void destroy() { + KAFKA_CONTAINER.stop(); + } + + @Test + void givenMultiplePartitions_whenPublishedToKafkaAndConsumedWithExtSeqNumberAndTimeWindow_thenCheckForMessageOrder() throws ExecutionException, InterruptedException { + List sentMessageList = new ArrayList<>(); + List receivedMessageList = new ArrayList<>(); + for (long partitionKey = 1; partitionKey <= 10 ; partitionKey++) { + long applicationIdentifier = Message.getRandomApplicationIdentifier(); + Message message = new Message(partitionKey, applicationIdentifier); + message.setGlobalSequenceNumber(partitionKey); + Future future = producer.send(new ProducerRecord<>(TOPIC, partitionKey, message)); + sentMessageList.add(message); + RecordMetadata metadata = future.get(); + System.out.println("Partition : " + metadata.partition()); + } + + boolean isOrderMaintained = true; + consumer.subscribe(Collections.singletonList(TOPIC)); + List buffer = new ArrayList<>(); + long lastProcessedTime = System.nanoTime(); + ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); + records.forEach(record -> { + buffer.add(record.value()); + }); + while (buffer.size() > 0) { + if (System.nanoTime() - lastProcessedTime > BUFFER_PERIOD_NS) { + processBuffer(buffer, receivedMessageList); + lastProcessedTime = System.nanoTime(); + } + records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); + records.forEach(record -> { + buffer.add(record.value()); + }); + } + for (int insertPosition = 0; insertPosition <= receivedMessageList.size() - 1; insertPosition++) { + if (isOrderMaintained){ + Message sentMessage = sentMessageList.get(insertPosition); + Message receivedMessage = receivedMessageList.get(insertPosition); + if (!sentMessage.equals(receivedMessage)) { + isOrderMaintained = false; + } + } + } + assertTrue(isOrderMaintained); + } + + private static void processBuffer(List buffer, List receivedMessageList) { + Collections.sort(buffer); + buffer.forEach(message -> { + receivedMessageList.add(message); + System.out.println("Processing message with Global Sequence number: " + message.getGlobalSequenceNumber() + ", Application Identifier: " + message.getApplicationIdentifier()); + }); + buffer.clear(); + } +} From 5b936c47a0c9a4cbf1ad4f8b695d47d7b76ca5be Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Tue, 17 Oct 2023 19:39:27 -0400 Subject: [PATCH 30/63] Message to User Event Rename --- .../ExtSeqWithTimeWindowConsumer.java | 17 +++--- .../ExtSeqWithTimeWindowProducer.java | 17 +++--- .../ordering/MultiPartitionConsumer.java | 15 +++-- .../ordering/MultiPartitionProducer.java | 15 ++--- .../ordering/SinglePartitionConsumer.java | 13 ++-- .../ordering/SinglePartitionProducer.java | 16 ++--- .../message/ordering/payload/Message.java | 61 ------------------- .../message/ordering/payload/UserEvent.java | 58 ++++++++++++++++++ .../ExtSeqWithTimeWindowIntegrationTest.java | 46 +++++++------- .../MultiplePartitionIntegrationTest.java | 36 +++++------ .../SinglePartitionIntegrationTest.java | 34 +++++------ 11 files changed, 163 insertions(+), 165 deletions(-) delete mode 100644 apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java create mode 100644 apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java index cd424178ae..639a980462 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java @@ -1,13 +1,12 @@ package com.baeldung.kafka.message.ordering; -import com.baeldung.kafka.message.ordering.payload.Message; +import com.baeldung.kafka.message.ordering.payload.UserEvent; import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer; import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.common.serialization.LongDeserializer; -import org.apache.kafka.common.serialization.StringDeserializer; import java.time.Duration; import java.util.*; @@ -23,13 +22,13 @@ public class ExtSeqWithTimeWindowConsumer { props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName()); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName()); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); - props.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, Message.class); - Consumer consumer = new KafkaConsumer<>(props); + props.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, UserEvent.class); + Consumer consumer = new KafkaConsumer<>(props); consumer.subscribe(Collections.singletonList("multi_partition_topic")); - List buffer = new ArrayList<>(); + List buffer = new ArrayList<>(); long lastProcessedTime = System.nanoTime(); while (true) { - ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); + ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); records.forEach(record -> { buffer.add(record.value()); }); @@ -40,10 +39,10 @@ public class ExtSeqWithTimeWindowConsumer { } } - private static void processBuffer(List buffer) { + private static void processBuffer(List buffer) { Collections.sort(buffer); - buffer.forEach(message -> { - System.out.println("Processing message with Global Sequence number: " + message.getPartitionKey() + ", Application Identifier: " + message.getApplicationIdentifier()); + buffer.forEach(userEvent -> { + System.out.println("Processing message with Global Sequence number: " + userEvent.getGlobalSequenceNumber() + ", event nano time : " + userEvent.getEventNanoTime()); }); buffer.clear(); } diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java index 99e05990cb..c18e35b351 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java @@ -1,6 +1,6 @@ package com.baeldung.kafka.message.ordering; -import com.baeldung.kafka.message.ordering.payload.Message; +import com.baeldung.kafka.message.ordering.payload.UserEvent; import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; @@ -8,6 +8,7 @@ import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.serialization.LongSerializer; import java.util.Properties; +import java.util.UUID; public class ExtSeqWithTimeWindowProducer { public static void main(String[] args) { @@ -15,13 +16,13 @@ public class ExtSeqWithTimeWindowProducer { props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName()); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName()); - KafkaProducer producer = new KafkaProducer<>(props); - for (long partitionKey = 1; partitionKey <= 10 ; partitionKey++) { - long applicationIdentifier = Message.getRandomApplicationIdentifier(); - Message message = new Message(partitionKey, applicationIdentifier); - message.setGlobalSequenceNumber(partitionKey); - producer.send(new ProducerRecord<>("multi_partition_topic", partitionKey, message)); - System.out.println("Partition key: " + message.getPartitionKey() + ", Application Identifier: " + message.getApplicationIdentifier()); + KafkaProducer producer = new KafkaProducer<>(props); + for (long sequenceNumber = 1; sequenceNumber <= 10 ; sequenceNumber++) { + UserEvent userEvent = new UserEvent(UUID.randomUUID().toString()); + userEvent.setEventNanoTime(System.nanoTime()); + userEvent.setGlobalSequenceNumber(sequenceNumber); + producer.send(new ProducerRecord<>("multi_partition_topic", sequenceNumber, userEvent)); + System.out.println("User Event Nano time : " + userEvent.getEventNanoTime() + ", User Event Id: " + userEvent.getUserEventId()); } producer.close(); System.out.println("ExternalSequencingProducer Completed."); diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java index c37c645d31..e738832425 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java @@ -1,13 +1,12 @@ package com.baeldung.kafka.message.ordering; -import com.baeldung.kafka.message.ordering.payload.Message; +import com.baeldung.kafka.message.ordering.payload.UserEvent; import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer; import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.common.serialization.LongDeserializer; -import org.apache.kafka.common.serialization.StringDeserializer; import java.time.Duration; import java.util.Collections; @@ -22,15 +21,15 @@ public class MultiPartitionConsumer { props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName()); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName()); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); - props.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, Message.class); - Consumer consumer = new KafkaConsumer<>(props); + props.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, UserEvent.class); + Consumer consumer = new KafkaConsumer<>(props); consumer.subscribe(Collections.singletonList("multi_partition_topic")); while (true) { - ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); + ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); records.forEach(record -> { - Message message = record.value(); - if (message != null) { - System.out.println("Process message with Partition key: " + message.getPartitionKey() + ", Application Identifier: " + message.getApplicationIdentifier()); + UserEvent userEvent = record.value(); + if (userEvent != null) { + System.out.println("Process message with event nano time : " + userEvent.getEventNanoTime() + ", Event ID: " + userEvent.getUserEventId()); } }); } diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java index 81cc5c6af0..db02c87bbe 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java @@ -1,6 +1,6 @@ package com.baeldung.kafka.message.ordering; -import com.baeldung.kafka.message.ordering.payload.Message; +import com.baeldung.kafka.message.ordering.payload.UserEvent; import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; @@ -8,6 +8,7 @@ import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.serialization.LongSerializer; import java.util.Properties; +import java.util.UUID; public class MultiPartitionProducer { public static void main(String[] args) { @@ -15,12 +16,12 @@ public class MultiPartitionProducer { props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName()); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName()); - KafkaProducer producer = new KafkaProducer<>(props); - for (long partitionKey = 1; partitionKey <= 10 ; partitionKey++) { - long applicationIdentifier = Message.getRandomApplicationIdentifier(); - Message message = new Message(partitionKey, applicationIdentifier); - producer.send(new ProducerRecord<>("multi_partition_topic", partitionKey, message)); - System.out.println("Partition Key: " + message.getPartitionKey() + ", Application Identifier: " + message.getApplicationIdentifier()); + KafkaProducer producer = new KafkaProducer<>(props); + for (long count = 1; count <= 10 ; count++) { + UserEvent userEvent = new UserEvent(UUID.randomUUID().toString()); + userEvent.setEventNanoTime(System.nanoTime()); + producer.send(new ProducerRecord<>("multi_partition_topic", count, userEvent)); + System.out.println("Process message with Event ID: " + userEvent.getUserEventId()); } producer.close(); System.out.println("SinglePartitionProducer Completed."); diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java index 9f44cd78c6..5f5ce86924 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java @@ -1,13 +1,12 @@ package com.baeldung.kafka.message.ordering; -import com.baeldung.kafka.message.ordering.payload.Message; +import com.baeldung.kafka.message.ordering.payload.UserEvent; import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer; import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.common.serialization.LongDeserializer; -import org.apache.kafka.common.serialization.StringDeserializer; import java.time.Duration; import java.util.Collections; @@ -23,14 +22,14 @@ public class SinglePartitionConsumer { props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName()); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName()); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); - props.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, Message.class); - Consumer consumer = new KafkaConsumer<>(props); + props.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, UserEvent.class); + Consumer consumer = new KafkaConsumer<>(props); consumer.subscribe(Collections.singletonList("single_partition_topic")); while (true) { - ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); + ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); records.forEach(record -> { - Message message = record.value(); - System.out.println("Process message with Partition Key: " + message.getPartitionKey() + ", Application Identifier: " + message.getApplicationIdentifier()); + UserEvent userEvent = record.value(); + System.out.println("Process message with event nano time : " + userEvent.getEventNanoTime() + ", Event ID: " + userEvent.getUserEventId()); }); } } diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java index efa6e5b93d..2a7719e34f 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java @@ -1,13 +1,15 @@ package com.baeldung.kafka.message.ordering; -import com.baeldung.kafka.message.ordering.payload.Message; +import com.baeldung.kafka.message.ordering.payload.UserEvent; import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.serialization.LongSerializer; +import java.time.Instant; import java.util.Properties; +import java.util.UUID; public class SinglePartitionProducer { public static void main(String[] args) { @@ -15,12 +17,12 @@ public class SinglePartitionProducer { props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName()); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName()); - KafkaProducer producer = new KafkaProducer<>(props); - for (long partitionKey = 1; partitionKey <= 10 ; partitionKey++) { - long applicationIdentifier = Message.getRandomApplicationIdentifier(); - Message message = new Message(partitionKey, applicationIdentifier); - producer.send(new ProducerRecord<>("single_partition_topic", partitionKey, message)); - System.out.println("Partition key: " + message.getPartitionKey() + ", Application Identifier: " + message.getApplicationIdentifier()); + KafkaProducer producer = new KafkaProducer<>(props); + for (long count = 1; count <= 10 ; count++) { + UserEvent userEvent = new UserEvent(UUID.randomUUID().toString()); + userEvent.setEventNanoTime(System.nanoTime()); + producer.send(new ProducerRecord<>("single_partition_topic", count, userEvent)); + System.out.println("Process message with Event ID: " + userEvent.getUserEventId()); } producer.close(); System.out.println("SinglePartitionProducer Completed."); diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java deleted file mode 100644 index 694d84f9d8..0000000000 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/Message.java +++ /dev/null @@ -1,61 +0,0 @@ -package com.baeldung.kafka.message.ordering.payload; - -import java.util.Objects; -import java.util.Random; -import java.util.concurrent.ThreadLocalRandom; - -public class Message implements Comparable { - private long partitionKey; - private long applicationIdentifier; - - private long globalSequenceNumber; - - public Message(){ - - } - - //Required for Kafka Serialization and Deserialization - public Message(long partitionKey, long applicationIdentifier) { - this.partitionKey = partitionKey; - this.applicationIdentifier = applicationIdentifier; - } - - public long getPartitionKey() { - return partitionKey; - } - - public long getApplicationIdentifier() { - return applicationIdentifier; - } - - public long getGlobalSequenceNumber() { - return globalSequenceNumber; - } - - public void setGlobalSequenceNumber(long globalSequenceNumber) { - this.globalSequenceNumber = globalSequenceNumber; - } - - @Override - public int compareTo(Message other) { - return Long.compare(this.globalSequenceNumber, other.globalSequenceNumber); - } - - @Override - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof Message)) { - return false; - } - Message message = (Message) obj; - return this.applicationIdentifier == message.getApplicationIdentifier() && Objects.equals(this.partitionKey, message.getPartitionKey()); - } - - public static long getRandomApplicationIdentifier() { - Random rand = new Random(); - return ThreadLocalRandom.current().nextInt(1000); - } -} - diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java new file mode 100644 index 0000000000..0c4018e624 --- /dev/null +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java @@ -0,0 +1,58 @@ +package com.baeldung.kafka.message.ordering.payload; + +import java.util.Objects; +public class UserEvent implements Comparable { + private String userEventId; + + private long eventNanoTime; + + private long globalSequenceNumber; + + public UserEvent(){ + + } + + //Required for Kafka Serialization and Deserialization + public UserEvent(String userEventId) { + this.userEventId = userEventId; + } + + public String getUserEventId() { + return userEventId; + } + + public long getEventNanoTime() { + return eventNanoTime; + } + + public void setEventNanoTime(long eventNanoTime) { + this.eventNanoTime = eventNanoTime; + } + + public long getGlobalSequenceNumber() { + return globalSequenceNumber; + } + + public void setGlobalSequenceNumber(long globalSequenceNumber) { + this.globalSequenceNumber = globalSequenceNumber; + } + + @Override + public int compareTo(UserEvent other) { + return Long.compare(this.globalSequenceNumber, other.globalSequenceNumber); + } + + @Override + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof UserEvent)) { + return false; + } + UserEvent userEvent = (UserEvent) obj; + return Objects.equals(this.userEventId, userEvent.getUserEventId()) + && userEvent.getEventNanoTime() == this.eventNanoTime; + } +} + diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java index a01c230026..5f540d5b50 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java @@ -1,6 +1,6 @@ package com.baeldung.kafka.message.ordering; -import com.baeldung.kafka.message.ordering.payload.Message; +import com.baeldung.kafka.message.ordering.payload.UserEvent; import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer; import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; import org.apache.kafka.clients.admin.*; @@ -34,8 +34,8 @@ public class ExtSeqWithTimeWindowIntegrationTest { private static int PARTITIONS = 5; private static short REPLICATION_FACTOR = 1; private static Admin admin; - private static KafkaProducer producer; - private static KafkaConsumer consumer; + private static KafkaProducer producer; + private static KafkaConsumer consumer; private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofMillis(5000); private static final long BUFFER_PERIOD_NS = 5000L * 1000000; // 5000 milliseconds converted to nanoseconds @@ -59,7 +59,7 @@ public class ExtSeqWithTimeWindowIntegrationTest { consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName()); consumerProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName()); consumerProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); - consumerProperties.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, Message.class); + consumerProperties.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, UserEvent.class); consumerProperties.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); admin = Admin.create(adminProperties); producer = new KafkaProducer<>(producerProperties); @@ -85,29 +85,29 @@ public class ExtSeqWithTimeWindowIntegrationTest { @Test void givenMultiplePartitions_whenPublishedToKafkaAndConsumedWithExtSeqNumberAndTimeWindow_thenCheckForMessageOrder() throws ExecutionException, InterruptedException { - List sentMessageList = new ArrayList<>(); - List receivedMessageList = new ArrayList<>(); - for (long partitionKey = 1; partitionKey <= 10 ; partitionKey++) { - long applicationIdentifier = Message.getRandomApplicationIdentifier(); - Message message = new Message(partitionKey, applicationIdentifier); - message.setGlobalSequenceNumber(partitionKey); - Future future = producer.send(new ProducerRecord<>(TOPIC, partitionKey, message)); - sentMessageList.add(message); + List sentUserEventList = new ArrayList<>(); + List receivedUserEventList = new ArrayList<>(); + for (long sequenceNumber = 1; sequenceNumber <= 10 ; sequenceNumber++) { + UserEvent userEvent = new UserEvent(UUID.randomUUID().toString()); + userEvent.setEventNanoTime(System.nanoTime()); + userEvent.setGlobalSequenceNumber(sequenceNumber); + Future future = producer.send(new ProducerRecord<>(TOPIC, sequenceNumber, userEvent)); + sentUserEventList.add(userEvent); RecordMetadata metadata = future.get(); System.out.println("Partition : " + metadata.partition()); } boolean isOrderMaintained = true; consumer.subscribe(Collections.singletonList(TOPIC)); - List buffer = new ArrayList<>(); + List buffer = new ArrayList<>(); long lastProcessedTime = System.nanoTime(); - ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); + ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); records.forEach(record -> { buffer.add(record.value()); }); while (buffer.size() > 0) { if (System.nanoTime() - lastProcessedTime > BUFFER_PERIOD_NS) { - processBuffer(buffer, receivedMessageList); + processBuffer(buffer, receivedUserEventList); lastProcessedTime = System.nanoTime(); } records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); @@ -115,11 +115,11 @@ public class ExtSeqWithTimeWindowIntegrationTest { buffer.add(record.value()); }); } - for (int insertPosition = 0; insertPosition <= receivedMessageList.size() - 1; insertPosition++) { + for (int insertPosition = 0; insertPosition <= receivedUserEventList.size() - 1; insertPosition++) { if (isOrderMaintained){ - Message sentMessage = sentMessageList.get(insertPosition); - Message receivedMessage = receivedMessageList.get(insertPosition); - if (!sentMessage.equals(receivedMessage)) { + UserEvent sentUserEvent = sentUserEventList.get(insertPosition); + UserEvent receivedUserEvent = receivedUserEventList.get(insertPosition); + if (!sentUserEvent.equals(receivedUserEvent)) { isOrderMaintained = false; } } @@ -127,11 +127,11 @@ public class ExtSeqWithTimeWindowIntegrationTest { assertTrue(isOrderMaintained); } - private static void processBuffer(List buffer, List receivedMessageList) { + private static void processBuffer(List buffer, List receivedUserEventList) { Collections.sort(buffer); - buffer.forEach(message -> { - receivedMessageList.add(message); - System.out.println("Processing message with Global Sequence number: " + message.getGlobalSequenceNumber() + ", Application Identifier: " + message.getApplicationIdentifier()); + buffer.forEach(userEvent -> { + receivedUserEventList.add(userEvent); + System.out.println("Process message with Event ID: " + userEvent.getUserEventId()); }); buffer.clear(); } diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java index c948effd70..d4b88ad06f 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java @@ -1,6 +1,6 @@ package com.baeldung.kafka.message.ordering; -import com.baeldung.kafka.message.ordering.payload.Message; +import com.baeldung.kafka.message.ordering.payload.UserEvent; import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer; import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; import org.apache.kafka.clients.admin.*; @@ -34,8 +34,8 @@ public class MultiplePartitionIntegrationTest { private static int PARTITIONS = 5; private static short REPLICATION_FACTOR = 1; private static Admin admin; - private static KafkaProducer producer; - private static KafkaConsumer consumer; + private static KafkaProducer producer; + private static KafkaConsumer consumer; private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofMillis(5000); @Container private static final KafkaContainer KAFKA_CONTAINER = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")); @@ -57,7 +57,7 @@ public class MultiplePartitionIntegrationTest { consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName()); consumerProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName()); consumerProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); - consumerProperties.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, Message.class); + consumerProperties.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, UserEvent.class); consumerProperties.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); admin = Admin.create(adminProperties); producer = new KafkaProducer<>(producerProperties); @@ -83,29 +83,29 @@ public class MultiplePartitionIntegrationTest { @Test void givenMultiplePartitions_whenPublishedToKafkaAndConsumed_thenCheckForMessageOrder() throws ExecutionException, InterruptedException { - List sentMessageList = new ArrayList<>(); - List receivedMessageList = new ArrayList<>(); - for (long partitionKey = 1; partitionKey <= 10 ; partitionKey++) { - long applicationIdentifier = Message.getRandomApplicationIdentifier(); - Message message = new Message(partitionKey, applicationIdentifier); - Future future = producer.send(new ProducerRecord<>(TOPIC, partitionKey, message)); - sentMessageList.add(message); + List sentUserEventList = new ArrayList<>(); + List receivedUserEventList = new ArrayList<>(); + for (long count = 1; count <= 10 ; count++) { + UserEvent userEvent = new UserEvent(UUID.randomUUID().toString()); + userEvent.setEventNanoTime(System.nanoTime()); + Future future = producer.send(new ProducerRecord<>(TOPIC, count, userEvent)); + sentUserEventList.add(userEvent); RecordMetadata metadata = future.get(); System.out.println("Partition : " + metadata.partition()); } boolean isOrderMaintained = true; consumer.subscribe(Collections.singletonList(TOPIC)); - ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); + ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); records.forEach(record -> { - Message message = record.value(); - receivedMessageList.add(message); + UserEvent userEvent = record.value(); + receivedUserEventList.add(userEvent); }); - for (int insertPosition = 0; insertPosition <= receivedMessageList.size() - 1; insertPosition++) { + for (int insertPosition = 0; insertPosition <= receivedUserEventList.size() - 1; insertPosition++) { if (isOrderMaintained){ - Message sentMessage = sentMessageList.get(insertPosition); - Message receivedMessage = receivedMessageList.get(insertPosition); - if (!sentMessage.equals(receivedMessage)) { + UserEvent sentUserEvent = sentUserEventList.get(insertPosition); + UserEvent receivedUserEvent = receivedUserEventList.get(insertPosition); + if (!sentUserEvent.equals(receivedUserEvent)) { isOrderMaintained = false; } } diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java index 87b7d07431..b41dc67686 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java @@ -1,6 +1,6 @@ package com.baeldung.kafka.message.ordering; -import com.baeldung.kafka.message.ordering.payload.Message; +import com.baeldung.kafka.message.ordering.payload.UserEvent; import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer; import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; import org.apache.kafka.clients.admin.Admin; @@ -36,8 +36,8 @@ public class SinglePartitionIntegrationTest { private static int PARTITIONS = 1; private static short REPLICATION_FACTOR = 1; private static Admin admin; - private static KafkaProducer producer; - private static KafkaConsumer consumer; + private static KafkaProducer producer; + private static KafkaConsumer consumer; private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofMillis(5000); @@ -61,7 +61,7 @@ public class SinglePartitionIntegrationTest { consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName()); consumerProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName()); consumerProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); - consumerProperties.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, Message.class); + consumerProperties.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, UserEvent.class); consumerProperties.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); admin = Admin.create(adminProperties); producer = new KafkaProducer<>(producerProperties); @@ -87,29 +87,29 @@ public class SinglePartitionIntegrationTest { @Test void givenASinglePartition_whenPublishedToKafkaAndConsumed_thenCheckForMessageOrder() throws ExecutionException, InterruptedException { - List sentMessageList = new ArrayList<>(); - List receivedMessageList = new ArrayList<>(); - for (long partitionKey = 1; partitionKey <= 10 ; partitionKey++) { - long applicationIdentifier = Message.getRandomApplicationIdentifier(); - Message message = new Message(partitionKey, applicationIdentifier); - ProducerRecord producerRecord = new ProducerRecord<>(TOPIC, partitionKey, message); + List sentUserEventList = new ArrayList<>(); + List receivedUserEventList = new ArrayList<>(); + for (long count = 1; count <= 10 ; count++) { + UserEvent userEvent = new UserEvent(UUID.randomUUID().toString()); + userEvent.setEventNanoTime(System.nanoTime()); + ProducerRecord producerRecord = new ProducerRecord<>(TOPIC, userEvent); Future future = producer.send(producerRecord); - sentMessageList.add(message); + sentUserEventList.add(userEvent); RecordMetadata metadata = future.get(); System.out.println("Partition : " + metadata.partition()); } consumer.subscribe(Collections.singletonList(TOPIC)); - ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); + ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); records.forEach(record -> { - Message message = record.value(); - receivedMessageList.add(message); + UserEvent userEvent = record.value(); + receivedUserEventList.add(userEvent); }); boolean result = true; for (int count = 0; count <= 9 ; count++) { - Message sentMessage = sentMessageList.get(count); - Message receivedMessage = receivedMessageList.get(count); - if (!sentMessage.equals(receivedMessage) && result){ + UserEvent sentUserEvent = sentUserEventList.get(count); + UserEvent receivedUserEvent = receivedUserEventList.get(count); + if (!sentUserEvent.equals(receivedUserEvent) && result){ result = false; } } From feca50daca29d74b161e7ccb6fff6bde710f01e6 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Tue, 31 Oct 2023 20:17:39 -0400 Subject: [PATCH 31/63] Incorporated review comments --- .../baeldung/kafka/message/ordering/Config.java | 3 +++ .../ordering/ExtSeqWithTimeWindowConsumer.java | 6 +++--- .../ordering/ExtSeqWithTimeWindowProducer.java | 12 ++++++++---- .../message/ordering/MultiPartitionConsumer.java | 9 +++++---- .../message/ordering/MultiPartitionProducer.java | 14 +++++++++----- .../message/ordering/ProducerConfigurations.java | 2 +- .../message/ordering/SinglePartitionConsumer.java | 6 +++--- .../message/ordering/SinglePartitionProducer.java | 14 +++++++++----- 8 files changed, 41 insertions(+), 25 deletions(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/Config.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/Config.java index 2635e72431..12acfecf51 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/Config.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/Config.java @@ -2,4 +2,7 @@ package com.baeldung.kafka.message.ordering; public class Config { public static final String CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS = "value.deserializer.serializedClass"; + public static final String KAFKA_LOCAL = "localhost:9092"; + public static final String MULTI_PARTITION_TOPIC = "multi_partition_topic"; + public static final String SINGLE_PARTITION_TOPIC = "single_partition_topic"; } diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java index 639a980462..06cb7104b7 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java @@ -17,14 +17,14 @@ public class ExtSeqWithTimeWindowConsumer { public static void main(String[] args) { Properties props = new Properties(); - props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); + props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, Config.KAFKA_LOCAL); props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName()); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName()); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, UserEvent.class); Consumer consumer = new KafkaConsumer<>(props); - consumer.subscribe(Collections.singletonList("multi_partition_topic")); + consumer.subscribe(Collections.singletonList(Config.MULTI_PARTITION_TOPIC)); List buffer = new ArrayList<>(); long lastProcessedTime = System.nanoTime(); while (true) { @@ -42,7 +42,7 @@ public class ExtSeqWithTimeWindowConsumer { private static void processBuffer(List buffer) { Collections.sort(buffer); buffer.forEach(userEvent -> { - System.out.println("Processing message with Global Sequence number: " + userEvent.getGlobalSequenceNumber() + ", event nano time : " + userEvent.getEventNanoTime()); + System.out.println("Processing message with Global Sequence number: " + userEvent.getGlobalSequenceNumber() + ", User Event Id: " + userEvent.getUserEventId()); }); buffer.clear(); } diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java index c18e35b351..73a62c0bf2 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java @@ -5,15 +5,18 @@ import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.clients.producer.RecordMetadata; import org.apache.kafka.common.serialization.LongSerializer; import java.util.Properties; import java.util.UUID; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; public class ExtSeqWithTimeWindowProducer { - public static void main(String[] args) { + public static void main(String[] args) throws ExecutionException, InterruptedException { Properties props = new Properties(); - props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); + props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, Config.KAFKA_LOCAL); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName()); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName()); KafkaProducer producer = new KafkaProducer<>(props); @@ -21,8 +24,9 @@ public class ExtSeqWithTimeWindowProducer { UserEvent userEvent = new UserEvent(UUID.randomUUID().toString()); userEvent.setEventNanoTime(System.nanoTime()); userEvent.setGlobalSequenceNumber(sequenceNumber); - producer.send(new ProducerRecord<>("multi_partition_topic", sequenceNumber, userEvent)); - System.out.println("User Event Nano time : " + userEvent.getEventNanoTime() + ", User Event Id: " + userEvent.getUserEventId()); + Future future = producer.send(new ProducerRecord<>(Config.MULTI_PARTITION_TOPIC, sequenceNumber, userEvent)); + RecordMetadata metadata = future.get(); + System.out.println("User Event ID: " + userEvent.getUserEventId() + ", Partition : " + metadata.partition()); } producer.close(); System.out.println("ExternalSequencingProducer Completed."); diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java index e738832425..82f05cc80e 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java @@ -14,22 +14,23 @@ import java.util.Properties; public class MultiPartitionConsumer { private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofMillis(100); + public static void main(String[] args) { Properties props = new Properties(); - props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); - props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); + props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, Config.KAFKA_LOCAL); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName()); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName()); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, UserEvent.class); + props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); Consumer consumer = new KafkaConsumer<>(props); - consumer.subscribe(Collections.singletonList("multi_partition_topic")); + consumer.subscribe(Collections.singletonList(Config.MULTI_PARTITION_TOPIC)); while (true) { ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); records.forEach(record -> { UserEvent userEvent = record.value(); if (userEvent != null) { - System.out.println("Process message with event nano time : " + userEvent.getEventNanoTime() + ", Event ID: " + userEvent.getUserEventId()); + System.out.println("User Event ID: " + userEvent.getUserEventId()); } }); } diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java index db02c87bbe..52da49ab80 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java @@ -5,25 +5,29 @@ import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.clients.producer.RecordMetadata; import org.apache.kafka.common.serialization.LongSerializer; import java.util.Properties; import java.util.UUID; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; public class MultiPartitionProducer { - public static void main(String[] args) { + public static void main(String[] args) throws ExecutionException, InterruptedException { Properties props = new Properties(); - props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); + props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, Config.KAFKA_LOCAL); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName()); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName()); KafkaProducer producer = new KafkaProducer<>(props); for (long count = 1; count <= 10 ; count++) { UserEvent userEvent = new UserEvent(UUID.randomUUID().toString()); userEvent.setEventNanoTime(System.nanoTime()); - producer.send(new ProducerRecord<>("multi_partition_topic", count, userEvent)); - System.out.println("Process message with Event ID: " + userEvent.getUserEventId()); + Future future = producer.send(new ProducerRecord<>(Config.MULTI_PARTITION_TOPIC, count, userEvent)); + RecordMetadata metadata = future.get(); + System.out.println("User Event ID: " + userEvent.getUserEventId() + ", Partition : " + metadata.partition()); } producer.close(); - System.out.println("SinglePartitionProducer Completed."); + System.out.println("MultiPartitionProducer Completed."); } } diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java index 0eb563910e..61c9cb48aa 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java @@ -10,7 +10,7 @@ import java.util.Properties; public class ProducerConfigurations { public static void main(String[] args) { Properties props = new Properties(); - props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); + props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, Config.KAFKA_LOCAL); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); props.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, "1"); diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java index 5f5ce86924..1c50f3cf7a 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java @@ -17,19 +17,19 @@ public class SinglePartitionConsumer { public static void main(String[] args) { Properties props = new Properties(); - props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); + props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, Config.KAFKA_LOCAL); props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName()); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName()); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, UserEvent.class); Consumer consumer = new KafkaConsumer<>(props); - consumer.subscribe(Collections.singletonList("single_partition_topic")); + consumer.subscribe(Collections.singletonList(Config.SINGLE_PARTITION_TOPIC)); while (true) { ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); records.forEach(record -> { UserEvent userEvent = record.value(); - System.out.println("Process message with event nano time : " + userEvent.getEventNanoTime() + ", Event ID: " + userEvent.getUserEventId()); + System.out.println("User Event ID: " + userEvent.getUserEventId()); }); } } diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java index 2a7719e34f..9306abaebf 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java @@ -5,24 +5,28 @@ import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.clients.producer.RecordMetadata; import org.apache.kafka.common.serialization.LongSerializer; import java.time.Instant; import java.util.Properties; import java.util.UUID; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; public class SinglePartitionProducer { - public static void main(String[] args) { + public static void main(String[] args) throws ExecutionException, InterruptedException { Properties props = new Properties(); - props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); + props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, Config.KAFKA_LOCAL); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName()); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName()); KafkaProducer producer = new KafkaProducer<>(props); - for (long count = 1; count <= 10 ; count++) { + for (long count = 1; count <= 10; count++) { UserEvent userEvent = new UserEvent(UUID.randomUUID().toString()); userEvent.setEventNanoTime(System.nanoTime()); - producer.send(new ProducerRecord<>("single_partition_topic", count, userEvent)); - System.out.println("Process message with Event ID: " + userEvent.getUserEventId()); + Future future = producer.send(new ProducerRecord<>(Config.SINGLE_PARTITION_TOPIC, count, userEvent)); + RecordMetadata metadata = future.get(); + System.out.println("User Event ID: " + userEvent.getUserEventId() + ", Partition : " + metadata.partition()); } producer.close(); System.out.println("SinglePartitionProducer Completed."); From 88f85963bb1614a10025cadb123ab5b26bc2b489 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Tue, 31 Oct 2023 22:04:32 -0400 Subject: [PATCH 32/63] Config changes --- .../kafka/message/ordering/ConsumerConfigurations.java | 4 ++-- .../kafka/message/ordering/ProducerConfigurations.java | 7 ++++--- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java index 15562cfcef..5b5a1f8e0b 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java @@ -16,7 +16,7 @@ import java.util.Properties; public class ConsumerConfigurations { public static void main(String[] args) { Properties props = new Properties(); - props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); + props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, Config.KAFKA_LOCAL); props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); @@ -24,7 +24,7 @@ public class ConsumerConfigurations { props.put(ConsumerConfig.FETCH_MIN_BYTES_CONFIG, "1"); props.put(ConsumerConfig.FETCH_MAX_WAIT_MS_CONFIG, "500"); Consumer consumer = new KafkaConsumer<>(props); - consumer.subscribe(Collections.singletonList("multi_partition_topic")); + consumer.subscribe(Collections.singletonList(Config.MULTI_PARTITION_TOPIC)); while (true) { ConsumerRecords records = consumer.poll(Duration.ofMillis(100)); diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java index 61c9cb48aa..79fc42be3d 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java @@ -16,14 +16,15 @@ public class ProducerConfigurations { props.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, "1"); props.put(ProducerConfig.BATCH_SIZE_CONFIG, "16384"); props.put(ProducerConfig.LINGER_MS_CONFIG, "5"); + props.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "true"); KafkaProducer producer = new KafkaProducer<>(props); for (int i = 0; i < 10; i++) { - String key = "Key-" + (i % 3); // Assuming 3 partitions - producer.send(new ProducerRecord<>("multi_partition_topic", key, "Message-" + i)); + String key = "Key-" + (i % 5); // Assuming 5 partitions + producer.send(new ProducerRecord<>(Config.MULTI_PARTITION_TOPIC, key, "Message-" + i)); } producer.close(); - System.out.println("MultiPartitionProducer Completed."); + System.out.println("Producer Configurations Completed."); } } From b7d743c62973913c244eed64befca97b45c6c8ee Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Thu, 2 Nov 2023 19:40:36 -0400 Subject: [PATCH 33/63] Incorporated Review comments --- .../kafka/message/ordering/Config.java | 5 +- .../ordering/ConsumerConfigurations.java | 36 -------------- .../ExtSeqWithTimeWindowConsumer.java | 49 ------------------- .../ExtSeqWithTimeWindowProducer.java | 34 ------------- .../ordering/MultiPartitionConsumer.java | 39 --------------- .../ordering/MultiPartitionProducer.java | 33 ------------- .../ordering/ProducerConfigurations.java | 30 ------------ .../ordering/SinglePartitionConsumer.java | 37 -------------- .../ordering/SinglePartitionProducer.java | 35 ------------- .../ExtSeqWithTimeWindowIntegrationTest.java | 16 +++--- .../MultiplePartitionIntegrationTest.java | 15 +++--- .../SinglePartitionIntegrationTest.java | 15 +++--- 12 files changed, 25 insertions(+), 319 deletions(-) delete mode 100644 apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java delete mode 100644 apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java delete mode 100644 apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java delete mode 100644 apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java delete mode 100644 apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java delete mode 100644 apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java delete mode 100644 apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java delete mode 100644 apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/Config.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/Config.java index 12acfecf51..9cc6314309 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/Config.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/Config.java @@ -2,7 +2,10 @@ package com.baeldung.kafka.message.ordering; public class Config { public static final String CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS = "value.deserializer.serializedClass"; - public static final String KAFKA_LOCAL = "localhost:9092"; public static final String MULTI_PARTITION_TOPIC = "multi_partition_topic"; public static final String SINGLE_PARTITION_TOPIC = "single_partition_topic"; + + public static final int MULTIPLE_PARTITIONS = 5; + public static final int SINGLE_PARTITION = 1; + public static short REPLICATION_FACTOR = 1; } diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java deleted file mode 100644 index 5b5a1f8e0b..0000000000 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ConsumerConfigurations.java +++ /dev/null @@ -1,36 +0,0 @@ -package com.baeldung.kafka.message.ordering; - -import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; -import org.apache.kafka.clients.consumer.Consumer; -import org.apache.kafka.clients.consumer.ConsumerConfig; -import org.apache.kafka.clients.consumer.ConsumerRecords; -import org.apache.kafka.clients.consumer.KafkaConsumer; -import org.apache.kafka.clients.producer.ProducerConfig; -import org.apache.kafka.common.serialization.StringDeserializer; -import org.apache.kafka.common.serialization.StringSerializer; - -import java.time.Duration; -import java.util.Collections; -import java.util.Properties; - -public class ConsumerConfigurations { - public static void main(String[] args) { - Properties props = new Properties(); - props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, Config.KAFKA_LOCAL); - props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); - props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); - props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); - props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "500"); - props.put(ConsumerConfig.FETCH_MIN_BYTES_CONFIG, "1"); - props.put(ConsumerConfig.FETCH_MAX_WAIT_MS_CONFIG, "500"); - Consumer consumer = new KafkaConsumer<>(props); - consumer.subscribe(Collections.singletonList(Config.MULTI_PARTITION_TOPIC)); - - while (true) { - ConsumerRecords records = consumer.poll(Duration.ofMillis(100)); - records.forEach(record -> { - System.out.println("Partition: " + record.partition() + ", Message: " + record.value()); - }); - } - } -} diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java deleted file mode 100644 index 06cb7104b7..0000000000 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowConsumer.java +++ /dev/null @@ -1,49 +0,0 @@ -package com.baeldung.kafka.message.ordering; - -import com.baeldung.kafka.message.ordering.payload.UserEvent; -import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer; -import org.apache.kafka.clients.consumer.Consumer; -import org.apache.kafka.clients.consumer.ConsumerConfig; -import org.apache.kafka.clients.consumer.ConsumerRecords; -import org.apache.kafka.clients.consumer.KafkaConsumer; -import org.apache.kafka.common.serialization.LongDeserializer; - -import java.time.Duration; -import java.util.*; - -public class ExtSeqWithTimeWindowConsumer { - private static final long BUFFER_PERIOD_NS = 5000L * 1000000; // 5000 milliseconds converted to nanoseconds - private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofMillis(100); - - public static void main(String[] args) { - Properties props = new Properties(); - props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, Config.KAFKA_LOCAL); - props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); - props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName()); - props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName()); - props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); - props.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, UserEvent.class); - Consumer consumer = new KafkaConsumer<>(props); - consumer.subscribe(Collections.singletonList(Config.MULTI_PARTITION_TOPIC)); - List buffer = new ArrayList<>(); - long lastProcessedTime = System.nanoTime(); - while (true) { - ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); - records.forEach(record -> { - buffer.add(record.value()); - }); - if (System.nanoTime() - lastProcessedTime > BUFFER_PERIOD_NS) { - processBuffer(buffer); - lastProcessedTime = System.nanoTime(); - } - } - } - - private static void processBuffer(List buffer) { - Collections.sort(buffer); - buffer.forEach(userEvent -> { - System.out.println("Processing message with Global Sequence number: " + userEvent.getGlobalSequenceNumber() + ", User Event Id: " + userEvent.getUserEventId()); - }); - buffer.clear(); - } -} diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java deleted file mode 100644 index 73a62c0bf2..0000000000 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowProducer.java +++ /dev/null @@ -1,34 +0,0 @@ -package com.baeldung.kafka.message.ordering; - -import com.baeldung.kafka.message.ordering.payload.UserEvent; -import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; -import org.apache.kafka.clients.producer.KafkaProducer; -import org.apache.kafka.clients.producer.ProducerConfig; -import org.apache.kafka.clients.producer.ProducerRecord; -import org.apache.kafka.clients.producer.RecordMetadata; -import org.apache.kafka.common.serialization.LongSerializer; - -import java.util.Properties; -import java.util.UUID; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.Future; - -public class ExtSeqWithTimeWindowProducer { - public static void main(String[] args) throws ExecutionException, InterruptedException { - Properties props = new Properties(); - props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, Config.KAFKA_LOCAL); - props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName()); - props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName()); - KafkaProducer producer = new KafkaProducer<>(props); - for (long sequenceNumber = 1; sequenceNumber <= 10 ; sequenceNumber++) { - UserEvent userEvent = new UserEvent(UUID.randomUUID().toString()); - userEvent.setEventNanoTime(System.nanoTime()); - userEvent.setGlobalSequenceNumber(sequenceNumber); - Future future = producer.send(new ProducerRecord<>(Config.MULTI_PARTITION_TOPIC, sequenceNumber, userEvent)); - RecordMetadata metadata = future.get(); - System.out.println("User Event ID: " + userEvent.getUserEventId() + ", Partition : " + metadata.partition()); - } - producer.close(); - System.out.println("ExternalSequencingProducer Completed."); - } -} diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java deleted file mode 100644 index 82f05cc80e..0000000000 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionConsumer.java +++ /dev/null @@ -1,39 +0,0 @@ -package com.baeldung.kafka.message.ordering; - -import com.baeldung.kafka.message.ordering.payload.UserEvent; -import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer; -import org.apache.kafka.clients.consumer.Consumer; -import org.apache.kafka.clients.consumer.ConsumerConfig; -import org.apache.kafka.clients.consumer.ConsumerRecords; -import org.apache.kafka.clients.consumer.KafkaConsumer; -import org.apache.kafka.common.serialization.LongDeserializer; - -import java.time.Duration; -import java.util.Collections; -import java.util.Properties; - -public class MultiPartitionConsumer { - private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofMillis(100); - - public static void main(String[] args) { - Properties props = new Properties(); - props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, Config.KAFKA_LOCAL); - props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName()); - props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName()); - props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); - props.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, UserEvent.class); - props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); - Consumer consumer = new KafkaConsumer<>(props); - consumer.subscribe(Collections.singletonList(Config.MULTI_PARTITION_TOPIC)); - while (true) { - ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); - records.forEach(record -> { - UserEvent userEvent = record.value(); - if (userEvent != null) { - System.out.println("User Event ID: " + userEvent.getUserEventId()); - } - }); - } - } -} - diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java deleted file mode 100644 index 52da49ab80..0000000000 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/MultiPartitionProducer.java +++ /dev/null @@ -1,33 +0,0 @@ -package com.baeldung.kafka.message.ordering; - -import com.baeldung.kafka.message.ordering.payload.UserEvent; -import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; -import org.apache.kafka.clients.producer.KafkaProducer; -import org.apache.kafka.clients.producer.ProducerConfig; -import org.apache.kafka.clients.producer.ProducerRecord; -import org.apache.kafka.clients.producer.RecordMetadata; -import org.apache.kafka.common.serialization.LongSerializer; - -import java.util.Properties; -import java.util.UUID; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.Future; - -public class MultiPartitionProducer { - public static void main(String[] args) throws ExecutionException, InterruptedException { - Properties props = new Properties(); - props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, Config.KAFKA_LOCAL); - props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName()); - props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName()); - KafkaProducer producer = new KafkaProducer<>(props); - for (long count = 1; count <= 10 ; count++) { - UserEvent userEvent = new UserEvent(UUID.randomUUID().toString()); - userEvent.setEventNanoTime(System.nanoTime()); - Future future = producer.send(new ProducerRecord<>(Config.MULTI_PARTITION_TOPIC, count, userEvent)); - RecordMetadata metadata = future.get(); - System.out.println("User Event ID: " + userEvent.getUserEventId() + ", Partition : " + metadata.partition()); - } - producer.close(); - System.out.println("MultiPartitionProducer Completed."); - } -} diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java deleted file mode 100644 index 79fc42be3d..0000000000 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/ProducerConfigurations.java +++ /dev/null @@ -1,30 +0,0 @@ -package com.baeldung.kafka.message.ordering; - -import org.apache.kafka.clients.producer.KafkaProducer; -import org.apache.kafka.clients.producer.ProducerConfig; -import org.apache.kafka.clients.producer.ProducerRecord; -import org.apache.kafka.common.serialization.StringSerializer; - -import java.util.Properties; - -public class ProducerConfigurations { - public static void main(String[] args) { - Properties props = new Properties(); - props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, Config.KAFKA_LOCAL); - props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); - props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); - props.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, "1"); - props.put(ProducerConfig.BATCH_SIZE_CONFIG, "16384"); - props.put(ProducerConfig.LINGER_MS_CONFIG, "5"); - props.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "true"); - KafkaProducer producer = new KafkaProducer<>(props); - - for (int i = 0; i < 10; i++) { - String key = "Key-" + (i % 5); // Assuming 5 partitions - producer.send(new ProducerRecord<>(Config.MULTI_PARTITION_TOPIC, key, "Message-" + i)); - } - - producer.close(); - System.out.println("Producer Configurations Completed."); - } -} diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java deleted file mode 100644 index 1c50f3cf7a..0000000000 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionConsumer.java +++ /dev/null @@ -1,37 +0,0 @@ -package com.baeldung.kafka.message.ordering; - -import com.baeldung.kafka.message.ordering.payload.UserEvent; -import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer; -import org.apache.kafka.clients.consumer.Consumer; -import org.apache.kafka.clients.consumer.ConsumerConfig; -import org.apache.kafka.clients.consumer.ConsumerRecords; -import org.apache.kafka.clients.consumer.KafkaConsumer; -import org.apache.kafka.common.serialization.LongDeserializer; - -import java.time.Duration; -import java.util.Collections; -import java.util.Properties; - -public class SinglePartitionConsumer { - private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofMillis(100); - - public static void main(String[] args) { - Properties props = new Properties(); - props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, Config.KAFKA_LOCAL); - props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); - props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName()); - props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName()); - props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); - props.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, UserEvent.class); - Consumer consumer = new KafkaConsumer<>(props); - consumer.subscribe(Collections.singletonList(Config.SINGLE_PARTITION_TOPIC)); - while (true) { - ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); - records.forEach(record -> { - UserEvent userEvent = record.value(); - System.out.println("User Event ID: " + userEvent.getUserEventId()); - }); - } - } -} - diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java deleted file mode 100644 index 9306abaebf..0000000000 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/SinglePartitionProducer.java +++ /dev/null @@ -1,35 +0,0 @@ -package com.baeldung.kafka.message.ordering; - -import com.baeldung.kafka.message.ordering.payload.UserEvent; -import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; -import org.apache.kafka.clients.producer.KafkaProducer; -import org.apache.kafka.clients.producer.ProducerConfig; -import org.apache.kafka.clients.producer.ProducerRecord; -import org.apache.kafka.clients.producer.RecordMetadata; -import org.apache.kafka.common.serialization.LongSerializer; - -import java.time.Instant; -import java.util.Properties; -import java.util.UUID; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.Future; - -public class SinglePartitionProducer { - public static void main(String[] args) throws ExecutionException, InterruptedException { - Properties props = new Properties(); - props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, Config.KAFKA_LOCAL); - props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName()); - props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName()); - KafkaProducer producer = new KafkaProducer<>(props); - for (long count = 1; count <= 10; count++) { - UserEvent userEvent = new UserEvent(UUID.randomUUID().toString()); - userEvent.setEventNanoTime(System.nanoTime()); - Future future = producer.send(new ProducerRecord<>(Config.SINGLE_PARTITION_TOPIC, count, userEvent)); - RecordMetadata metadata = future.get(); - System.out.println("User Event ID: " + userEvent.getUserEventId() + ", Partition : " + metadata.partition()); - } - producer.close(); - System.out.println("SinglePartitionProducer Completed."); - } - -} diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java index 5f540d5b50..0535e3c4e6 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java @@ -30,9 +30,7 @@ import static org.junit.jupiter.api.Assertions.*; @Testcontainers public class ExtSeqWithTimeWindowIntegrationTest { - private static String TOPIC = "multi_partition_topic"; - private static int PARTITIONS = 5; - private static short REPLICATION_FACTOR = 1; + private static Admin admin; private static KafkaProducer producer; private static KafkaConsumer consumer; @@ -65,10 +63,10 @@ public class ExtSeqWithTimeWindowIntegrationTest { producer = new KafkaProducer<>(producerProperties); consumer = new KafkaConsumer<>(consumerProperties); List topicList = new ArrayList<>(); - NewTopic newTopic = new NewTopic(TOPIC, PARTITIONS, REPLICATION_FACTOR); + NewTopic newTopic = new NewTopic(Config.MULTI_PARTITION_TOPIC, Config.MULTIPLE_PARTITIONS, Config.REPLICATION_FACTOR); topicList.add(newTopic); CreateTopicsResult result = admin.createTopics(topicList); - KafkaFuture future = result.values().get(TOPIC); + KafkaFuture future = result.values().get(Config.MULTI_PARTITION_TOPIC); future.whenComplete((voidResult, exception) -> { if (exception != null) { System.err.println("Error creating the topic: " + exception.getMessage()); @@ -91,14 +89,14 @@ public class ExtSeqWithTimeWindowIntegrationTest { UserEvent userEvent = new UserEvent(UUID.randomUUID().toString()); userEvent.setEventNanoTime(System.nanoTime()); userEvent.setGlobalSequenceNumber(sequenceNumber); - Future future = producer.send(new ProducerRecord<>(TOPIC, sequenceNumber, userEvent)); + Future future = producer.send(new ProducerRecord<>(Config.MULTI_PARTITION_TOPIC, sequenceNumber, userEvent)); sentUserEventList.add(userEvent); RecordMetadata metadata = future.get(); - System.out.println("Partition : " + metadata.partition()); + System.out.println("User Event ID: " + userEvent.getUserEventId() + ", Partition : " + metadata.partition()); } boolean isOrderMaintained = true; - consumer.subscribe(Collections.singletonList(TOPIC)); + consumer.subscribe(Collections.singletonList(Config.MULTI_PARTITION_TOPIC)); List buffer = new ArrayList<>(); long lastProcessedTime = System.nanoTime(); ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); @@ -131,7 +129,7 @@ public class ExtSeqWithTimeWindowIntegrationTest { Collections.sort(buffer); buffer.forEach(userEvent -> { receivedUserEventList.add(userEvent); - System.out.println("Process message with Event ID: " + userEvent.getUserEventId()); + System.out.println("Processing message with Global Sequence number: " + userEvent.getGlobalSequenceNumber() + ", User Event Id: " + userEvent.getUserEventId()); }); buffer.clear(); } diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java index d4b88ad06f..bbe9fcf7f8 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java @@ -30,9 +30,7 @@ import static org.junit.jupiter.api.Assertions.*; @Testcontainers public class MultiplePartitionIntegrationTest { - private static String TOPIC = "multi_partition_topic"; - private static int PARTITIONS = 5; - private static short REPLICATION_FACTOR = 1; + private static Admin admin; private static KafkaProducer producer; private static KafkaConsumer consumer; @@ -63,10 +61,10 @@ public class MultiplePartitionIntegrationTest { producer = new KafkaProducer<>(producerProperties); consumer = new KafkaConsumer<>(consumerProperties); List topicList = new ArrayList<>(); - NewTopic newTopic = new NewTopic(TOPIC, PARTITIONS, REPLICATION_FACTOR); + NewTopic newTopic = new NewTopic(Config.MULTI_PARTITION_TOPIC, Config.MULTIPLE_PARTITIONS, Config.REPLICATION_FACTOR); topicList.add(newTopic); CreateTopicsResult result = admin.createTopics(topicList); - KafkaFuture future = result.values().get(TOPIC); + KafkaFuture future = result.values().get(Config.MULTI_PARTITION_TOPIC); future.whenComplete((voidResult, exception) -> { if (exception != null) { System.err.println("Error creating the topic: " + exception.getMessage()); @@ -88,18 +86,19 @@ public class MultiplePartitionIntegrationTest { for (long count = 1; count <= 10 ; count++) { UserEvent userEvent = new UserEvent(UUID.randomUUID().toString()); userEvent.setEventNanoTime(System.nanoTime()); - Future future = producer.send(new ProducerRecord<>(TOPIC, count, userEvent)); + Future future = producer.send(new ProducerRecord<>(Config.MULTI_PARTITION_TOPIC, count, userEvent)); sentUserEventList.add(userEvent); RecordMetadata metadata = future.get(); - System.out.println("Partition : " + metadata.partition()); + System.out.println("User Event ID: " + userEvent.getUserEventId() + ", Partition : " + metadata.partition()); } boolean isOrderMaintained = true; - consumer.subscribe(Collections.singletonList(TOPIC)); + consumer.subscribe(Collections.singletonList(Config.MULTI_PARTITION_TOPIC)); ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); records.forEach(record -> { UserEvent userEvent = record.value(); receivedUserEventList.add(userEvent); + System.out.println("User Event ID: " + userEvent.getUserEventId()); }); for (int insertPosition = 0; insertPosition <= receivedUserEventList.size() - 1; insertPosition++) { if (isOrderMaintained){ diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java index b41dc67686..6894b0fe80 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java @@ -32,9 +32,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; @Testcontainers public class SinglePartitionIntegrationTest { - private static String TOPIC = "single_partition_topic"; - private static int PARTITIONS = 1; - private static short REPLICATION_FACTOR = 1; + private static Admin admin; private static KafkaProducer producer; private static KafkaConsumer consumer; @@ -67,10 +65,10 @@ public class SinglePartitionIntegrationTest { producer = new KafkaProducer<>(producerProperties); consumer = new KafkaConsumer<>(consumerProperties); List topicList = new ArrayList<>(); - NewTopic newTopic = new NewTopic(TOPIC, PARTITIONS, REPLICATION_FACTOR); + NewTopic newTopic = new NewTopic(Config.SINGLE_PARTITION_TOPIC, Config.SINGLE_PARTITION, Config.REPLICATION_FACTOR); topicList.add(newTopic); CreateTopicsResult result = admin.createTopics(topicList); - KafkaFuture future = result.values().get(TOPIC); + KafkaFuture future = result.values().get(Config.SINGLE_PARTITION_TOPIC); future.whenComplete((voidResult, exception) -> { if (exception != null) { System.err.println("Error creating the topic: " + exception.getMessage()); @@ -92,18 +90,19 @@ public class SinglePartitionIntegrationTest { for (long count = 1; count <= 10 ; count++) { UserEvent userEvent = new UserEvent(UUID.randomUUID().toString()); userEvent.setEventNanoTime(System.nanoTime()); - ProducerRecord producerRecord = new ProducerRecord<>(TOPIC, userEvent); + ProducerRecord producerRecord = new ProducerRecord<>(Config.SINGLE_PARTITION_TOPIC, userEvent); Future future = producer.send(producerRecord); sentUserEventList.add(userEvent); RecordMetadata metadata = future.get(); - System.out.println("Partition : " + metadata.partition()); + System.out.println("User Event ID: " + userEvent.getUserEventId() + ", Partition : " + metadata.partition()); } - consumer.subscribe(Collections.singletonList(TOPIC)); + consumer.subscribe(Collections.singletonList(Config.SINGLE_PARTITION_TOPIC)); ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); records.forEach(record -> { UserEvent userEvent = record.value(); receivedUserEventList.add(userEvent); + System.out.println("User Event ID: " + userEvent.getUserEventId()); }); boolean result = true; for (int count = 0; count <= 9 ; count++) { From 667620821b143d5c750bb4869fceb939dbc60c47 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Thu, 2 Nov 2023 19:41:15 -0400 Subject: [PATCH 34/63] Update apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java Co-authored-by: Liam Williams --- .../message/ordering/ExtSeqWithTimeWindowIntegrationTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java index 0535e3c4e6..ab3e37916a 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java @@ -38,7 +38,7 @@ public class ExtSeqWithTimeWindowIntegrationTest { private static final long BUFFER_PERIOD_NS = 5000L * 1000000; // 5000 milliseconds converted to nanoseconds @Container - private static final KafkaContainer KAFKA_CONTAINER = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")); + private static final long BUFFER_PERIOD_NS = Duration.ofSeconds(5).toNanos(); @BeforeAll static void setup() throws ExecutionException, InterruptedException { From d0edfbc423a834b8ce6a6674cf54713a44ea4513 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Thu, 2 Nov 2023 19:41:27 -0400 Subject: [PATCH 35/63] Update apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java Co-authored-by: Liam Williams --- .../message/ordering/ExtSeqWithTimeWindowIntegrationTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java index ab3e37916a..af4aa53cc0 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java @@ -34,7 +34,7 @@ public class ExtSeqWithTimeWindowIntegrationTest { private static Admin admin; private static KafkaProducer producer; private static KafkaConsumer consumer; - private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofMillis(5000); + private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofSeconds(5); private static final long BUFFER_PERIOD_NS = 5000L * 1000000; // 5000 milliseconds converted to nanoseconds @Container From 83d6d89db5287e00c9e563737e24b4d979a2d2d9 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Thu, 2 Nov 2023 19:41:34 -0400 Subject: [PATCH 36/63] Update apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java Co-authored-by: Liam Williams --- .../kafka/message/ordering/SinglePartitionIntegrationTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java index 6894b0fe80..35b6602510 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java @@ -37,7 +37,7 @@ public class SinglePartitionIntegrationTest { private static KafkaProducer producer; private static KafkaConsumer consumer; - private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofMillis(5000); + private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofSeconds(5); @Container private static final KafkaContainer KAFKA_CONTAINER = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")); From bfa7c53b5463ad47c32dc595c234b2d6e5837da2 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Thu, 2 Nov 2023 19:41:54 -0400 Subject: [PATCH 37/63] Update apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java Co-authored-by: Liam Williams --- .../message/ordering/MultiplePartitionIntegrationTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java index bbe9fcf7f8..64a063c6c9 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java @@ -34,7 +34,7 @@ public class MultiplePartitionIntegrationTest { private static Admin admin; private static KafkaProducer producer; private static KafkaConsumer consumer; - private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofMillis(5000); + private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofSeconds(5); @Container private static final KafkaContainer KAFKA_CONTAINER = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")); From 1ab4017ad65984900919fa396e52a51637555a3b Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Thu, 2 Nov 2023 19:47:05 -0400 Subject: [PATCH 38/63] Issue fixes --- .../ordering/ExtSeqWithTimeWindowIntegrationTest.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java index af4aa53cc0..a18efe9961 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java @@ -35,11 +35,11 @@ public class ExtSeqWithTimeWindowIntegrationTest { private static KafkaProducer producer; private static KafkaConsumer consumer; private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofSeconds(5); - - private static final long BUFFER_PERIOD_NS = 5000L * 1000000; // 5000 milliseconds converted to nanoseconds - @Container private static final long BUFFER_PERIOD_NS = Duration.ofSeconds(5).toNanos(); + @Container + private static final KafkaContainer KAFKA_CONTAINER = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")); + @BeforeAll static void setup() throws ExecutionException, InterruptedException { KAFKA_CONTAINER.addExposedPort(9092); From 45bc3d94d407819a1d4c4e139c9b79b38e797e54 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Thu, 2 Nov 2023 20:18:19 -0400 Subject: [PATCH 39/63] Code Formatting --- .../ordering/SinglePartitionIntegrationTest.java | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java index 35b6602510..0c65618014 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java @@ -24,10 +24,12 @@ import org.testcontainers.containers.KafkaContainer; import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; import org.testcontainers.utility.DockerImageName; + import java.time.Duration; import java.util.*; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; + import static org.junit.jupiter.api.Assertions.assertTrue; @Testcontainers @@ -53,6 +55,7 @@ public class SinglePartitionIntegrationTest { producerProperties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers()); producerProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName()); producerProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName()); + producer = new KafkaProducer<>(producerProperties); Properties consumerProperties = new Properties(); consumerProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers()); @@ -61,9 +64,10 @@ public class SinglePartitionIntegrationTest { consumerProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); consumerProperties.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, UserEvent.class); consumerProperties.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); - admin = Admin.create(adminProperties); - producer = new KafkaProducer<>(producerProperties); consumer = new KafkaConsumer<>(consumerProperties); + admin = Admin.create(adminProperties); + + List topicList = new ArrayList<>(); NewTopic newTopic = new NewTopic(Config.SINGLE_PARTITION_TOPIC, Config.SINGLE_PARTITION, Config.REPLICATION_FACTOR); topicList.add(newTopic); @@ -87,7 +91,7 @@ public class SinglePartitionIntegrationTest { void givenASinglePartition_whenPublishedToKafkaAndConsumed_thenCheckForMessageOrder() throws ExecutionException, InterruptedException { List sentUserEventList = new ArrayList<>(); List receivedUserEventList = new ArrayList<>(); - for (long count = 1; count <= 10 ; count++) { + for (long count = 1; count <= 10; count++) { UserEvent userEvent = new UserEvent(UUID.randomUUID().toString()); userEvent.setEventNanoTime(System.nanoTime()); ProducerRecord producerRecord = new ProducerRecord<>(Config.SINGLE_PARTITION_TOPIC, userEvent); @@ -105,10 +109,10 @@ public class SinglePartitionIntegrationTest { System.out.println("User Event ID: " + userEvent.getUserEventId()); }); boolean result = true; - for (int count = 0; count <= 9 ; count++) { + for (int count = 0; count <= 9; count++) { UserEvent sentUserEvent = sentUserEventList.get(count); UserEvent receivedUserEvent = receivedUserEventList.get(count); - if (!sentUserEvent.equals(receivedUserEvent) && result){ + if (!sentUserEvent.equals(receivedUserEvent) && result) { result = false; } } From 1394665355ba988ac79b65b98158fa3cd1c0eb97 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sat, 4 Nov 2023 16:46:09 -0400 Subject: [PATCH 40/63] Update apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java Co-authored-by: Liam Williams --- .../ordering/SinglePartitionIntegrationTest.java | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java index 0c65618014..068d536b1a 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java @@ -68,18 +68,7 @@ public class SinglePartitionIntegrationTest { admin = Admin.create(adminProperties); - List topicList = new ArrayList<>(); - NewTopic newTopic = new NewTopic(Config.SINGLE_PARTITION_TOPIC, Config.SINGLE_PARTITION, Config.REPLICATION_FACTOR); - topicList.add(newTopic); - CreateTopicsResult result = admin.createTopics(topicList); - KafkaFuture future = result.values().get(Config.SINGLE_PARTITION_TOPIC); - future.whenComplete((voidResult, exception) -> { - if (exception != null) { - System.err.println("Error creating the topic: " + exception.getMessage()); - } else { - System.out.println("Topic created successfully!"); - } - }).get(); + admin.createTopics(ImmutableList.of(new NewTopic(Config.SINGLE_PARTITION_TOPIC, Config.SINGLE_PARTITION, Config.REPLICATION_FACTOR))).all().get(); } @AfterAll From c2453a503f9e7168469f5fa32084c632b1380913 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sat, 4 Nov 2023 16:46:17 -0400 Subject: [PATCH 41/63] Update apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java Co-authored-by: Liam Williams --- .../ExtSeqWithTimeWindowIntegrationTest.java | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java index a18efe9961..f723d290c2 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java @@ -62,18 +62,7 @@ public class ExtSeqWithTimeWindowIntegrationTest { admin = Admin.create(adminProperties); producer = new KafkaProducer<>(producerProperties); consumer = new KafkaConsumer<>(consumerProperties); - List topicList = new ArrayList<>(); - NewTopic newTopic = new NewTopic(Config.MULTI_PARTITION_TOPIC, Config.MULTIPLE_PARTITIONS, Config.REPLICATION_FACTOR); - topicList.add(newTopic); - CreateTopicsResult result = admin.createTopics(topicList); - KafkaFuture future = result.values().get(Config.MULTI_PARTITION_TOPIC); - future.whenComplete((voidResult, exception) -> { - if (exception != null) { - System.err.println("Error creating the topic: " + exception.getMessage()); - } else { - System.out.println("Topic created successfully!"); - } - }).get(); + admin.createTopics(ImmutableList.of(new NewTopic(Config.MULTI_PARTITION_TOPIC, Config.MULTIPLE_PARTITIONS, Config.REPLICATION_FACTOR))).all().get(); } @AfterAll From e5da66643251d2f66eecd0ca3d607e10fa143179 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sat, 4 Nov 2023 16:49:26 -0400 Subject: [PATCH 42/63] Update apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java Co-authored-by: Liam Williams --- .../ordering/MultiplePartitionIntegrationTest.java | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java index 64a063c6c9..88281014b6 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java @@ -60,18 +60,7 @@ public class MultiplePartitionIntegrationTest { admin = Admin.create(adminProperties); producer = new KafkaProducer<>(producerProperties); consumer = new KafkaConsumer<>(consumerProperties); - List topicList = new ArrayList<>(); - NewTopic newTopic = new NewTopic(Config.MULTI_PARTITION_TOPIC, Config.MULTIPLE_PARTITIONS, Config.REPLICATION_FACTOR); - topicList.add(newTopic); - CreateTopicsResult result = admin.createTopics(topicList); - KafkaFuture future = result.values().get(Config.MULTI_PARTITION_TOPIC); - future.whenComplete((voidResult, exception) -> { - if (exception != null) { - System.err.println("Error creating the topic: " + exception.getMessage()); - } else { - System.out.println("Topic created successfully!"); - } - }).get(); + admin.createTopics(ImmutableList.of(new NewTopic(Config.MULTI_PARTITION_TOPIC, Config.MULTIPLE_PARTITIONS, Config.REPLICATION_FACTOR))).all().get(); } @AfterAll From 41ab2ed2213b8a20fbeb8f3ee52357757b552e31 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sat, 4 Nov 2023 16:55:03 -0400 Subject: [PATCH 43/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java Co-authored-by: Liam Williams --- .../com/baeldung/kafka/message/ordering/payload/UserEvent.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java index 0c4018e624..040c7d6995 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java @@ -3,9 +3,7 @@ package com.baeldung.kafka.message.ordering.payload; import java.util.Objects; public class UserEvent implements Comparable { private String userEventId; - private long eventNanoTime; - private long globalSequenceNumber; public UserEvent(){ From 0399cf0e5f0d788367d5209def3ad41be8ae92d3 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sat, 4 Nov 2023 16:55:32 -0400 Subject: [PATCH 44/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java Co-authored-by: Liam Williams --- .../baeldung/kafka/message/ordering/payload/UserEvent.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java index 040c7d6995..67e6b70c08 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java @@ -6,11 +6,11 @@ public class UserEvent implements Comparable { private long eventNanoTime; private long globalSequenceNumber; + @SuppressWarnings("unused") public UserEvent(){ - + // Required for Jackson Serialization and Deserialization } - //Required for Kafka Serialization and Deserialization public UserEvent(String userEventId) { this.userEventId = userEventId; } From d4842ac511422a11dd746d07d337fe9dda74e76f Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sat, 4 Nov 2023 16:56:07 -0400 Subject: [PATCH 45/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java Co-authored-by: Liam Williams --- .../message/ordering/serialization/JacksonDeserializer.java | 1 - 1 file changed, 1 deletion(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java index be2b104761..2cc0bde853 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java @@ -8,7 +8,6 @@ import java.util.Map; /** * Configured via {@link org.apache.kafka.clients.consumer.ConsumerConfig#VALUE_DESERIALIZER_CLASS_CONFIG} */ -@SuppressWarnings("unused") public class JacksonDeserializer implements Deserializer { private final ObjectMapper objectMapper = new ObjectMapper(); private Class type; From aeb5f55e0e27a67aaa18fd756fdf21664e56f6d4 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sat, 4 Nov 2023 16:56:47 -0400 Subject: [PATCH 46/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java Co-authored-by: Liam Williams --- .../message/ordering/serialization/JacksonDeserializer.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java index 2cc0bde853..300a43ca7c 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java @@ -33,9 +33,8 @@ public class JacksonDeserializer implements Deserializer { try { return objectMapper.readValue(bytes, type); } catch (Exception e) { - //throw new RuntimeException("Error deserializing value", e); + throw new RuntimeException("Error deserializing value", e); } - return null; } } From 9a76f1a6375eceba23f8ce24748db06e9c1519f8 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sat, 4 Nov 2023 16:57:02 -0400 Subject: [PATCH 47/63] Update apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java Co-authored-by: Liam Williams --- .../message/ordering/ExtSeqWithTimeWindowIntegrationTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java index f723d290c2..e9a77bd2d7 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java @@ -29,7 +29,7 @@ import java.util.concurrent.Future; import static org.junit.jupiter.api.Assertions.*; @Testcontainers -public class ExtSeqWithTimeWindowIntegrationTest { +public class ExternalSequenceWithTimeWindowIntegrationTest { private static Admin admin; private static KafkaProducer producer; From 33b18c0ca49d4814eb032df2122dc302e2bb5ebf Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sat, 4 Nov 2023 16:57:19 -0400 Subject: [PATCH 48/63] Update apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java Co-authored-by: Liam Williams --- .../message/ordering/ExtSeqWithTimeWindowIntegrationTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java index e9a77bd2d7..f47a01486c 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java @@ -92,7 +92,7 @@ public class ExternalSequenceWithTimeWindowIntegrationTest { records.forEach(record -> { buffer.add(record.value()); }); - while (buffer.size() > 0) { + while (!buffer.isEmpty()) { if (System.nanoTime() - lastProcessedTime > BUFFER_PERIOD_NS) { processBuffer(buffer, receivedUserEventList); lastProcessedTime = System.nanoTime(); From 3e2975196d8842459acfaa23d4b22b86e5d08f1b Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sat, 4 Nov 2023 16:57:32 -0400 Subject: [PATCH 49/63] Update apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java Co-authored-by: Liam Williams --- .../ExtSeqWithTimeWindowIntegrationTest.java | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java index f47a01486c..76e4a47d17 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java @@ -102,16 +102,9 @@ public class ExternalSequenceWithTimeWindowIntegrationTest { buffer.add(record.value()); }); } - for (int insertPosition = 0; insertPosition <= receivedUserEventList.size() - 1; insertPosition++) { - if (isOrderMaintained){ - UserEvent sentUserEvent = sentUserEventList.get(insertPosition); - UserEvent receivedUserEvent = receivedUserEventList.get(insertPosition); - if (!sentUserEvent.equals(receivedUserEvent)) { - isOrderMaintained = false; - } - } - } - assertTrue(isOrderMaintained); + assertThat(receivedUserEventList) + .isEqualTo(sentUserEventList) + .containsExactlyElementsOf(sentUserEventList); } private static void processBuffer(List buffer, List receivedUserEventList) { From 9c463c752b17ca3843490cef337a44f1a96f07f3 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sat, 4 Nov 2023 16:57:47 -0400 Subject: [PATCH 50/63] Update apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java Co-authored-by: Liam Williams --- .../ordering/MultiplePartitionIntegrationTest.java | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java index 88281014b6..752514c09a 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java @@ -89,15 +89,8 @@ public class MultiplePartitionIntegrationTest { receivedUserEventList.add(userEvent); System.out.println("User Event ID: " + userEvent.getUserEventId()); }); - for (int insertPosition = 0; insertPosition <= receivedUserEventList.size() - 1; insertPosition++) { - if (isOrderMaintained){ - UserEvent sentUserEvent = sentUserEventList.get(insertPosition); - UserEvent receivedUserEvent = receivedUserEventList.get(insertPosition); - if (!sentUserEvent.equals(receivedUserEvent)) { - isOrderMaintained = false; - } - } - } - assertFalse(isOrderMaintained); + assertThat(receivedUserEventList) + .isNotEqualTo(sentUserEventList) + .containsExactlyInAnyOrderElementsOf(sentUserEventList); } } From ddc5e0a5efbfb15a8a19c6bf0b91a2986883efbe Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sat, 4 Nov 2023 16:58:01 -0400 Subject: [PATCH 51/63] Update apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java Co-authored-by: Liam Williams --- .../ordering/SinglePartitionIntegrationTest.java | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java index 068d536b1a..a767133627 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java @@ -97,14 +97,8 @@ public class SinglePartitionIntegrationTest { receivedUserEventList.add(userEvent); System.out.println("User Event ID: " + userEvent.getUserEventId()); }); - boolean result = true; - for (int count = 0; count <= 9; count++) { - UserEvent sentUserEvent = sentUserEventList.get(count); - UserEvent receivedUserEvent = receivedUserEventList.get(count); - if (!sentUserEvent.equals(receivedUserEvent) && result) { - result = false; - } - } - assertTrue(result); + assertThat(receivedUserEventList) + .isEqualTo(sentUserEventList) + .containsExactlyElementsOf(sentUserEventList); } } From cb274a7ed4a86fbdb1fa129d1a68c31d316a8011 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sat, 4 Nov 2023 16:58:28 -0400 Subject: [PATCH 52/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonSerializer.java Co-authored-by: Liam Williams --- .../kafka/message/ordering/serialization/JacksonSerializer.java | 1 - 1 file changed, 1 deletion(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonSerializer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonSerializer.java index 2d7432cc7b..4c081de3cc 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonSerializer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonSerializer.java @@ -6,7 +6,6 @@ import org.apache.kafka.common.serialization.Serializer; /** * Configured via {@link org.apache.kafka.clients.producer.ProducerConfig#VALUE_SERIALIZER_CLASS_CONFIG} */ -@SuppressWarnings("unused") public class JacksonSerializer implements Serializer { private final ObjectMapper objectMapper = new ObjectMapper(); From b24851bfcee4223b94d51e5b3d2d079824f8cb09 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sat, 4 Nov 2023 16:59:10 -0400 Subject: [PATCH 53/63] Update apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java Co-authored-by: Liam Williams --- .../ordering/serialization/JacksonDeserializer.java | 7 ------- 1 file changed, 7 deletions(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java index 300a43ca7c..4868ecaf2e 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java @@ -12,13 +12,6 @@ public class JacksonDeserializer implements Deserializer { private final ObjectMapper objectMapper = new ObjectMapper(); private Class type; - public JacksonDeserializer(Class type) { - this.type = type; - } - - public JacksonDeserializer() { - - } @Override public void configure(Map configs, boolean isKey) { From 2cecae1dfb6af13c311e48d89dfc38d1f70ba181 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sat, 4 Nov 2023 17:18:20 -0400 Subject: [PATCH 54/63] Incorporate Review comments --- .../com/baeldung/kafka/message/ordering/Config.java | 2 +- ...ternalSequenceWithTimeWindowIntegrationTest.java} | 12 +++++------- .../ordering/MultiplePartitionIntegrationTest.java | 7 +++---- .../ordering/SinglePartitionIntegrationTest.java | 6 ++---- 4 files changed, 11 insertions(+), 16 deletions(-) rename apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/{ExtSeqWithTimeWindowIntegrationTest.java => ExternalSequenceWithTimeWindowIntegrationTest.java} (95%) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/Config.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/Config.java index 9cc6314309..7fae8403b5 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/Config.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/Config.java @@ -7,5 +7,5 @@ public class Config { public static final int MULTIPLE_PARTITIONS = 5; public static final int SINGLE_PARTITION = 1; - public static short REPLICATION_FACTOR = 1; + public static final short REPLICATION_FACTOR = 1; } diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExternalSequenceWithTimeWindowIntegrationTest.java similarity index 95% rename from apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java rename to apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExternalSequenceWithTimeWindowIntegrationTest.java index 76e4a47d17..0c64f663f3 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExtSeqWithTimeWindowIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExternalSequenceWithTimeWindowIntegrationTest.java @@ -11,7 +11,6 @@ import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; -import org.apache.kafka.common.KafkaFuture; import org.apache.kafka.common.serialization.LongDeserializer; import org.apache.kafka.common.serialization.LongSerializer; import org.junit.jupiter.api.AfterAll; @@ -25,8 +24,8 @@ import java.time.Duration; import java.util.*; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; - -import static org.junit.jupiter.api.Assertions.*; +import com.google.common.collect.ImmutableList; +import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat; @Testcontainers public class ExternalSequenceWithTimeWindowIntegrationTest { @@ -84,7 +83,6 @@ public class ExternalSequenceWithTimeWindowIntegrationTest { System.out.println("User Event ID: " + userEvent.getUserEventId() + ", Partition : " + metadata.partition()); } - boolean isOrderMaintained = true; consumer.subscribe(Collections.singletonList(Config.MULTI_PARTITION_TOPIC)); List buffer = new ArrayList<>(); long lastProcessedTime = System.nanoTime(); @@ -102,9 +100,9 @@ public class ExternalSequenceWithTimeWindowIntegrationTest { buffer.add(record.value()); }); } - assertThat(receivedUserEventList) - .isEqualTo(sentUserEventList) - .containsExactlyElementsOf(sentUserEventList); + assertThat(receivedUserEventList) + .isEqualTo(sentUserEventList) + .containsExactlyElementsOf(sentUserEventList); } private static void processBuffer(List buffer, List receivedUserEventList) { diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java index 752514c09a..2fde24114c 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java @@ -11,7 +11,6 @@ import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; -import org.apache.kafka.common.KafkaFuture; import org.apache.kafka.common.serialization.LongDeserializer; import org.apache.kafka.common.serialization.LongSerializer; import org.junit.jupiter.api.AfterAll; @@ -25,8 +24,8 @@ import java.time.Duration; import java.util.*; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; - -import static org.junit.jupiter.api.Assertions.*; +import com.google.common.collect.ImmutableList; +import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat; @Testcontainers public class MultiplePartitionIntegrationTest { @@ -89,7 +88,7 @@ public class MultiplePartitionIntegrationTest { receivedUserEventList.add(userEvent); System.out.println("User Event ID: " + userEvent.getUserEventId()); }); - assertThat(receivedUserEventList) + assertThat(receivedUserEventList) .isNotEqualTo(sentUserEventList) .containsExactlyInAnyOrderElementsOf(sentUserEventList); } diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java index a767133627..0826365f97 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java @@ -5,7 +5,6 @@ import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer; import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; import org.apache.kafka.clients.admin.Admin; import org.apache.kafka.clients.admin.AdminClientConfig; -import org.apache.kafka.clients.admin.CreateTopicsResult; import org.apache.kafka.clients.admin.NewTopic; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecords; @@ -14,7 +13,6 @@ import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; -import org.apache.kafka.common.KafkaFuture; import org.apache.kafka.common.serialization.LongDeserializer; import org.apache.kafka.common.serialization.LongSerializer; import org.junit.jupiter.api.AfterAll; @@ -29,8 +27,8 @@ import java.time.Duration; import java.util.*; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; - -import static org.junit.jupiter.api.Assertions.assertTrue; +import com.google.common.collect.ImmutableList; +import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat; @Testcontainers public class SinglePartitionIntegrationTest { From 2a6e561f769cd38cfa43c2c1d7742a24b43e52d7 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sat, 4 Nov 2023 18:09:21 -0400 Subject: [PATCH 55/63] Fixed unit test case failures --- .../kafka/message/ordering/payload/UserEvent.java | 10 +++++++--- .../ordering/MultiplePartitionIntegrationTest.java | 6 +++--- .../ordering/SinglePartitionIntegrationTest.java | 3 ++- 3 files changed, 12 insertions(+), 7 deletions(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java index 67e6b70c08..676b469ce8 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java @@ -42,15 +42,19 @@ public class UserEvent implements Comparable { @Override public boolean equals(Object obj) { - if (obj == this) { + if (this == obj) { return true; } if (!(obj instanceof UserEvent)) { return false; } UserEvent userEvent = (UserEvent) obj; - return Objects.equals(this.userEventId, userEvent.getUserEventId()) - && userEvent.getEventNanoTime() == this.eventNanoTime; + return this.globalSequenceNumber == userEvent.globalSequenceNumber; + } + + @Override + public int hashCode() { + return Objects.hash(globalSequenceNumber); } } diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java index 2fde24114c..0405184074 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java @@ -71,16 +71,16 @@ public class MultiplePartitionIntegrationTest { void givenMultiplePartitions_whenPublishedToKafkaAndConsumed_thenCheckForMessageOrder() throws ExecutionException, InterruptedException { List sentUserEventList = new ArrayList<>(); List receivedUserEventList = new ArrayList<>(); - for (long count = 1; count <= 10 ; count++) { + for (long sequenceNumber = 1; sequenceNumber <= 10; sequenceNumber++) { UserEvent userEvent = new UserEvent(UUID.randomUUID().toString()); + userEvent.setGlobalSequenceNumber(sequenceNumber); userEvent.setEventNanoTime(System.nanoTime()); - Future future = producer.send(new ProducerRecord<>(Config.MULTI_PARTITION_TOPIC, count, userEvent)); + Future future = producer.send(new ProducerRecord<>(Config.MULTI_PARTITION_TOPIC, sequenceNumber, userEvent)); sentUserEventList.add(userEvent); RecordMetadata metadata = future.get(); System.out.println("User Event ID: " + userEvent.getUserEventId() + ", Partition : " + metadata.partition()); } - boolean isOrderMaintained = true; consumer.subscribe(Collections.singletonList(Config.MULTI_PARTITION_TOPIC)); ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES); records.forEach(record -> { diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java index 0826365f97..39d298826c 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java @@ -78,8 +78,9 @@ public class SinglePartitionIntegrationTest { void givenASinglePartition_whenPublishedToKafkaAndConsumed_thenCheckForMessageOrder() throws ExecutionException, InterruptedException { List sentUserEventList = new ArrayList<>(); List receivedUserEventList = new ArrayList<>(); - for (long count = 1; count <= 10; count++) { + for (long sequenceNumber = 1; sequenceNumber <= 10; sequenceNumber++) { UserEvent userEvent = new UserEvent(UUID.randomUUID().toString()); + userEvent.setGlobalSequenceNumber(sequenceNumber); userEvent.setEventNanoTime(System.nanoTime()); ProducerRecord producerRecord = new ProducerRecord<>(Config.SINGLE_PARTITION_TOPIC, userEvent); Future future = producer.send(producerRecord); From 80cd71fff3e37122a22884fa85b394537e709e20 Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sat, 4 Nov 2023 18:33:09 -0400 Subject: [PATCH 56/63] Removing slf4j-log4j12, deleting log4j.properties and adding a standard src/test/resources/logback.xml, since the Kafka libraries use the logback SLF4J binding. --- apache-kafka-2/log4j.properties | 1 - apache-kafka-2/pom.xml | 5 ----- ...ExternalSequenceWithTimeWindowIntegrationTest.java | 9 +++++++-- .../ordering/MultiplePartitionIntegrationTest.java | 8 ++++++-- .../ordering/SinglePartitionIntegrationTest.java | 7 +++++-- apache-kafka-2/src/test/resources/logback.xml | 11 +++++++++++ 6 files changed, 29 insertions(+), 12 deletions(-) delete mode 100644 apache-kafka-2/log4j.properties create mode 100644 apache-kafka-2/src/test/resources/logback.xml diff --git a/apache-kafka-2/log4j.properties b/apache-kafka-2/log4j.properties deleted file mode 100644 index 2173c5d96f..0000000000 --- a/apache-kafka-2/log4j.properties +++ /dev/null @@ -1 +0,0 @@ -log4j.rootLogger=INFO, stdout diff --git a/apache-kafka-2/pom.xml b/apache-kafka-2/pom.xml index d0838a386e..d1f74e8aae 100644 --- a/apache-kafka-2/pom.xml +++ b/apache-kafka-2/pom.xml @@ -23,11 +23,6 @@ slf4j-api ${org.slf4j.version} - - org.slf4j - slf4j-log4j12 - ${org.slf4j.version} - com.google.guava guava diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExternalSequenceWithTimeWindowIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExternalSequenceWithTimeWindowIntegrationTest.java index 0c64f663f3..a5ec7a98a3 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExternalSequenceWithTimeWindowIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExternalSequenceWithTimeWindowIntegrationTest.java @@ -1,5 +1,6 @@ package com.baeldung.kafka.message.ordering; +import com.baeldung.kafka.headers.KafkaMessageHeaders; import com.baeldung.kafka.message.ordering.payload.UserEvent; import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer; import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; @@ -16,6 +17,8 @@ import org.apache.kafka.common.serialization.LongSerializer; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.testcontainers.containers.KafkaContainer; import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; @@ -36,6 +39,8 @@ public class ExternalSequenceWithTimeWindowIntegrationTest { private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofSeconds(5); private static final long BUFFER_PERIOD_NS = Duration.ofSeconds(5).toNanos(); + private static Logger logger = LoggerFactory.getLogger(ExternalSequenceWithTimeWindowIntegrationTest.class); + @Container private static final KafkaContainer KAFKA_CONTAINER = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")); @@ -80,7 +85,7 @@ public class ExternalSequenceWithTimeWindowIntegrationTest { Future future = producer.send(new ProducerRecord<>(Config.MULTI_PARTITION_TOPIC, sequenceNumber, userEvent)); sentUserEventList.add(userEvent); RecordMetadata metadata = future.get(); - System.out.println("User Event ID: " + userEvent.getUserEventId() + ", Partition : " + metadata.partition()); + logger.info("User Event ID: " + userEvent.getUserEventId() + ", Partition : " + metadata.partition()); } consumer.subscribe(Collections.singletonList(Config.MULTI_PARTITION_TOPIC)); @@ -109,7 +114,7 @@ public class ExternalSequenceWithTimeWindowIntegrationTest { Collections.sort(buffer); buffer.forEach(userEvent -> { receivedUserEventList.add(userEvent); - System.out.println("Processing message with Global Sequence number: " + userEvent.getGlobalSequenceNumber() + ", User Event Id: " + userEvent.getUserEventId()); + logger.info("Processing message with Global Sequence number: " + userEvent.getGlobalSequenceNumber() + ", User Event Id: " + userEvent.getUserEventId()); }); buffer.clear(); } diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java index 0405184074..adfa9a0399 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java @@ -16,6 +16,8 @@ import org.apache.kafka.common.serialization.LongSerializer; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.testcontainers.containers.KafkaContainer; import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; @@ -34,6 +36,8 @@ public class MultiplePartitionIntegrationTest { private static KafkaProducer producer; private static KafkaConsumer consumer; private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofSeconds(5); + + private static Logger logger = LoggerFactory.getLogger(MultiplePartitionIntegrationTest.class); @Container private static final KafkaContainer KAFKA_CONTAINER = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")); @@ -78,7 +82,7 @@ public class MultiplePartitionIntegrationTest { Future future = producer.send(new ProducerRecord<>(Config.MULTI_PARTITION_TOPIC, sequenceNumber, userEvent)); sentUserEventList.add(userEvent); RecordMetadata metadata = future.get(); - System.out.println("User Event ID: " + userEvent.getUserEventId() + ", Partition : " + metadata.partition()); + logger.info("User Event ID: " + userEvent.getUserEventId() + ", Partition : " + metadata.partition()); } consumer.subscribe(Collections.singletonList(Config.MULTI_PARTITION_TOPIC)); @@ -86,7 +90,7 @@ public class MultiplePartitionIntegrationTest { records.forEach(record -> { UserEvent userEvent = record.value(); receivedUserEventList.add(userEvent); - System.out.println("User Event ID: " + userEvent.getUserEventId()); + logger.info("User Event ID: " + userEvent.getUserEventId()); }); assertThat(receivedUserEventList) .isNotEqualTo(sentUserEventList) diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java index 39d298826c..7280a1218a 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java @@ -18,6 +18,8 @@ import org.apache.kafka.common.serialization.LongSerializer; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.testcontainers.containers.KafkaContainer; import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; @@ -39,6 +41,7 @@ public class SinglePartitionIntegrationTest { private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofSeconds(5); + private static Logger logger = LoggerFactory.getLogger(SinglePartitionIntegrationTest.class); @Container private static final KafkaContainer KAFKA_CONTAINER = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")); @@ -86,7 +89,7 @@ public class SinglePartitionIntegrationTest { Future future = producer.send(producerRecord); sentUserEventList.add(userEvent); RecordMetadata metadata = future.get(); - System.out.println("User Event ID: " + userEvent.getUserEventId() + ", Partition : " + metadata.partition()); + logger.info("User Event ID: " + userEvent.getUserEventId() + ", Partition : " + metadata.partition()); } consumer.subscribe(Collections.singletonList(Config.SINGLE_PARTITION_TOPIC)); @@ -94,7 +97,7 @@ public class SinglePartitionIntegrationTest { records.forEach(record -> { UserEvent userEvent = record.value(); receivedUserEventList.add(userEvent); - System.out.println("User Event ID: " + userEvent.getUserEventId()); + logger.info("User Event ID: " + userEvent.getUserEventId()); }); assertThat(receivedUserEventList) .isEqualTo(sentUserEventList) diff --git a/apache-kafka-2/src/test/resources/logback.xml b/apache-kafka-2/src/test/resources/logback.xml new file mode 100644 index 0000000000..6156c2188e --- /dev/null +++ b/apache-kafka-2/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file From 5d3f08d0e915ca8b077666253fa4b3899475155c Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 5 Nov 2023 21:32:49 -0500 Subject: [PATCH 57/63] Fix the whitespace and newline issues - wuth code formatter --- .../message/ordering/payload/UserEvent.java | 3 ++- .../serialization/JacksonDeserializer.java | 3 ++- .../serialization/JacksonSerializer.java | 1 + ...SequenceWithTimeWindowIntegrationTest.java | 22 ++++++++++++------- .../MultiplePartitionIntegrationTest.java | 14 ++++++++---- .../SinglePartitionIntegrationTest.java | 19 +++++++++------- 6 files changed, 40 insertions(+), 22 deletions(-) diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java index 676b469ce8..99e0cc6c7e 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java @@ -1,13 +1,14 @@ package com.baeldung.kafka.message.ordering.payload; import java.util.Objects; + public class UserEvent implements Comparable { private String userEventId; private long eventNanoTime; private long globalSequenceNumber; @SuppressWarnings("unused") - public UserEvent(){ + public UserEvent() { // Required for Jackson Serialization and Deserialization } diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java index 4868ecaf2e..cf72ab12df 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java @@ -1,6 +1,8 @@ package com.baeldung.kafka.message.ordering.serialization; + import com.baeldung.kafka.message.ordering.Config; import com.fasterxml.jackson.databind.ObjectMapper; + import org.apache.kafka.common.serialization.Deserializer; import java.util.Map; @@ -12,7 +14,6 @@ public class JacksonDeserializer implements Deserializer { private final ObjectMapper objectMapper = new ObjectMapper(); private Class type; - @Override public void configure(Map configs, boolean isKey) { this.type = (Class) configs.get(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS); diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonSerializer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonSerializer.java index 4c081de3cc..b2ace3b8ed 100644 --- a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonSerializer.java +++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonSerializer.java @@ -1,6 +1,7 @@ package com.baeldung.kafka.message.ordering.serialization; import com.fasterxml.jackson.databind.ObjectMapper; + import org.apache.kafka.common.serialization.Serializer; /** diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExternalSequenceWithTimeWindowIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExternalSequenceWithTimeWindowIntegrationTest.java index a5ec7a98a3..caffe12620 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExternalSequenceWithTimeWindowIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExternalSequenceWithTimeWindowIntegrationTest.java @@ -4,6 +4,7 @@ import com.baeldung.kafka.headers.KafkaMessageHeaders; import com.baeldung.kafka.message.ordering.payload.UserEvent; import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer; import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; + import org.apache.kafka.clients.admin.*; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecords; @@ -23,11 +24,14 @@ import org.testcontainers.containers.KafkaContainer; import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; import org.testcontainers.utility.DockerImageName; + import java.time.Duration; import java.util.*; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; + import com.google.common.collect.ImmutableList; + import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat; @Testcontainers @@ -37,8 +41,8 @@ public class ExternalSequenceWithTimeWindowIntegrationTest { private static KafkaProducer producer; private static KafkaConsumer consumer; private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofSeconds(5); - private static final long BUFFER_PERIOD_NS = Duration.ofSeconds(5).toNanos(); - + private static final long BUFFER_PERIOD_NS = Duration.ofSeconds(5) + .toNanos(); private static Logger logger = LoggerFactory.getLogger(ExternalSequenceWithTimeWindowIntegrationTest.class); @Container @@ -66,7 +70,9 @@ public class ExternalSequenceWithTimeWindowIntegrationTest { admin = Admin.create(adminProperties); producer = new KafkaProducer<>(producerProperties); consumer = new KafkaConsumer<>(consumerProperties); - admin.createTopics(ImmutableList.of(new NewTopic(Config.MULTI_PARTITION_TOPIC, Config.MULTIPLE_PARTITIONS, Config.REPLICATION_FACTOR))).all().get(); + admin.createTopics(ImmutableList.of(new NewTopic(Config.MULTI_PARTITION_TOPIC, Config.MULTIPLE_PARTITIONS, Config.REPLICATION_FACTOR))) + .all() + .get(); } @AfterAll @@ -78,8 +84,9 @@ public class ExternalSequenceWithTimeWindowIntegrationTest { void givenMultiplePartitions_whenPublishedToKafkaAndConsumedWithExtSeqNumberAndTimeWindow_thenCheckForMessageOrder() throws ExecutionException, InterruptedException { List sentUserEventList = new ArrayList<>(); List receivedUserEventList = new ArrayList<>(); - for (long sequenceNumber = 1; sequenceNumber <= 10 ; sequenceNumber++) { - UserEvent userEvent = new UserEvent(UUID.randomUUID().toString()); + for (long sequenceNumber = 1; sequenceNumber <= 10; sequenceNumber++) { + UserEvent userEvent = new UserEvent(UUID.randomUUID() + .toString()); userEvent.setEventNanoTime(System.nanoTime()); userEvent.setGlobalSequenceNumber(sequenceNumber); Future future = producer.send(new ProducerRecord<>(Config.MULTI_PARTITION_TOPIC, sequenceNumber, userEvent)); @@ -105,9 +112,8 @@ public class ExternalSequenceWithTimeWindowIntegrationTest { buffer.add(record.value()); }); } - assertThat(receivedUserEventList) - .isEqualTo(sentUserEventList) - .containsExactlyElementsOf(sentUserEventList); + assertThat(receivedUserEventList).isEqualTo(sentUserEventList) + .containsExactlyElementsOf(sentUserEventList); } private static void processBuffer(List buffer, List receivedUserEventList) { diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java index adfa9a0399..bb25486f00 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java @@ -3,6 +3,7 @@ package com.baeldung.kafka.message.ordering; import com.baeldung.kafka.message.ordering.payload.UserEvent; import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer; import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; + import org.apache.kafka.clients.admin.*; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecords; @@ -22,11 +23,14 @@ import org.testcontainers.containers.KafkaContainer; import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; import org.testcontainers.utility.DockerImageName; + import java.time.Duration; import java.util.*; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; + import com.google.common.collect.ImmutableList; + import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat; @Testcontainers @@ -63,7 +67,9 @@ public class MultiplePartitionIntegrationTest { admin = Admin.create(adminProperties); producer = new KafkaProducer<>(producerProperties); consumer = new KafkaConsumer<>(consumerProperties); - admin.createTopics(ImmutableList.of(new NewTopic(Config.MULTI_PARTITION_TOPIC, Config.MULTIPLE_PARTITIONS, Config.REPLICATION_FACTOR))).all().get(); + admin.createTopics(ImmutableList.of(new NewTopic(Config.MULTI_PARTITION_TOPIC, Config.MULTIPLE_PARTITIONS, Config.REPLICATION_FACTOR))) + .all() + .get(); } @AfterAll @@ -76,7 +82,8 @@ public class MultiplePartitionIntegrationTest { List sentUserEventList = new ArrayList<>(); List receivedUserEventList = new ArrayList<>(); for (long sequenceNumber = 1; sequenceNumber <= 10; sequenceNumber++) { - UserEvent userEvent = new UserEvent(UUID.randomUUID().toString()); + UserEvent userEvent = new UserEvent(UUID.randomUUID() + .toString()); userEvent.setGlobalSequenceNumber(sequenceNumber); userEvent.setEventNanoTime(System.nanoTime()); Future future = producer.send(new ProducerRecord<>(Config.MULTI_PARTITION_TOPIC, sequenceNumber, userEvent)); @@ -92,8 +99,7 @@ public class MultiplePartitionIntegrationTest { receivedUserEventList.add(userEvent); logger.info("User Event ID: " + userEvent.getUserEventId()); }); - assertThat(receivedUserEventList) - .isNotEqualTo(sentUserEventList) + assertThat(receivedUserEventList).isNotEqualTo(sentUserEventList) .containsExactlyInAnyOrderElementsOf(sentUserEventList); } } diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java index 7280a1218a..8656df1bf3 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java @@ -3,6 +3,7 @@ package com.baeldung.kafka.message.ordering; import com.baeldung.kafka.message.ordering.payload.UserEvent; import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer; import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; + import org.apache.kafka.clients.admin.Admin; import org.apache.kafka.clients.admin.AdminClientConfig; import org.apache.kafka.clients.admin.NewTopic; @@ -29,7 +30,9 @@ import java.time.Duration; import java.util.*; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; + import com.google.common.collect.ImmutableList; + import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat; @Testcontainers @@ -56,7 +59,6 @@ public class SinglePartitionIntegrationTest { producerProperties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers()); producerProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName()); producerProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName()); - producer = new KafkaProducer<>(producerProperties); Properties consumerProperties = new Properties(); consumerProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers()); @@ -65,11 +67,12 @@ public class SinglePartitionIntegrationTest { consumerProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); consumerProperties.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, UserEvent.class); consumerProperties.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); - consumer = new KafkaConsumer<>(consumerProperties); admin = Admin.create(adminProperties); - - - admin.createTopics(ImmutableList.of(new NewTopic(Config.SINGLE_PARTITION_TOPIC, Config.SINGLE_PARTITION, Config.REPLICATION_FACTOR))).all().get(); + producer = new KafkaProducer<>(producerProperties); + consumer = new KafkaConsumer<>(consumerProperties); + admin.createTopics(ImmutableList.of(new NewTopic(Config.SINGLE_PARTITION_TOPIC, Config.SINGLE_PARTITION, Config.REPLICATION_FACTOR))) + .all() + .get(); } @AfterAll @@ -82,7 +85,8 @@ public class SinglePartitionIntegrationTest { List sentUserEventList = new ArrayList<>(); List receivedUserEventList = new ArrayList<>(); for (long sequenceNumber = 1; sequenceNumber <= 10; sequenceNumber++) { - UserEvent userEvent = new UserEvent(UUID.randomUUID().toString()); + UserEvent userEvent = new UserEvent(UUID.randomUUID() + .toString()); userEvent.setGlobalSequenceNumber(sequenceNumber); userEvent.setEventNanoTime(System.nanoTime()); ProducerRecord producerRecord = new ProducerRecord<>(Config.SINGLE_PARTITION_TOPIC, userEvent); @@ -99,8 +103,7 @@ public class SinglePartitionIntegrationTest { receivedUserEventList.add(userEvent); logger.info("User Event ID: " + userEvent.getUserEventId()); }); - assertThat(receivedUserEventList) - .isEqualTo(sentUserEventList) + assertThat(receivedUserEventList).isEqualTo(sentUserEventList) .containsExactlyElementsOf(sentUserEventList); } } From 6e758b8438de2bd367ccf741fa2a53a01c9399ec Mon Sep 17 00:00:00 2001 From: Amol Gote Date: Sun, 5 Nov 2023 21:39:02 -0500 Subject: [PATCH 58/63] The test names should be LiveTest not IntegrationTest, as per the other similar tests in this module. This naming is used to mark these tests as manually runnable only, since they are heavy tests that spin up a container, so we don't want them to be run as part of the automatic CI build. Same for the other tests. --- ...Test.java => ExternalSequenceWithTimeWindowLiveTest.java} | 5 ++--- ...onIntegrationTest.java => MultiplePartitionLiveTest.java} | 4 ++-- ...tionIntegrationTest.java => SinglePartitionLiveTest.java} | 4 ++-- 3 files changed, 6 insertions(+), 7 deletions(-) rename apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/{ExternalSequenceWithTimeWindowIntegrationTest.java => ExternalSequenceWithTimeWindowLiveTest.java} (97%) rename apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/{MultiplePartitionIntegrationTest.java => MultiplePartitionLiveTest.java} (98%) rename apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/{SinglePartitionIntegrationTest.java => SinglePartitionLiveTest.java} (98%) diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExternalSequenceWithTimeWindowIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExternalSequenceWithTimeWindowLiveTest.java similarity index 97% rename from apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExternalSequenceWithTimeWindowIntegrationTest.java rename to apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExternalSequenceWithTimeWindowLiveTest.java index caffe12620..f36c6ebd63 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExternalSequenceWithTimeWindowIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExternalSequenceWithTimeWindowLiveTest.java @@ -1,6 +1,5 @@ package com.baeldung.kafka.message.ordering; -import com.baeldung.kafka.headers.KafkaMessageHeaders; import com.baeldung.kafka.message.ordering.payload.UserEvent; import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer; import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer; @@ -35,7 +34,7 @@ import com.google.common.collect.ImmutableList; import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat; @Testcontainers -public class ExternalSequenceWithTimeWindowIntegrationTest { +public class ExternalSequenceWithTimeWindowLiveTest { private static Admin admin; private static KafkaProducer producer; @@ -43,7 +42,7 @@ public class ExternalSequenceWithTimeWindowIntegrationTest { private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofSeconds(5); private static final long BUFFER_PERIOD_NS = Duration.ofSeconds(5) .toNanos(); - private static Logger logger = LoggerFactory.getLogger(ExternalSequenceWithTimeWindowIntegrationTest.class); + private static Logger logger = LoggerFactory.getLogger(ExternalSequenceWithTimeWindowLiveTest.class); @Container private static final KafkaContainer KAFKA_CONTAINER = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")); diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionLiveTest.java similarity index 98% rename from apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java rename to apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionLiveTest.java index bb25486f00..407b4d52a9 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionLiveTest.java @@ -34,14 +34,14 @@ import com.google.common.collect.ImmutableList; import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat; @Testcontainers -public class MultiplePartitionIntegrationTest { +public class MultiplePartitionLiveTest { private static Admin admin; private static KafkaProducer producer; private static KafkaConsumer consumer; private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofSeconds(5); - private static Logger logger = LoggerFactory.getLogger(MultiplePartitionIntegrationTest.class); + private static Logger logger = LoggerFactory.getLogger(MultiplePartitionLiveTest.class); @Container private static final KafkaContainer KAFKA_CONTAINER = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")); diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionLiveTest.java similarity index 98% rename from apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java rename to apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionLiveTest.java index 8656df1bf3..9c6a15ebeb 100644 --- a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionIntegrationTest.java +++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionLiveTest.java @@ -36,7 +36,7 @@ import com.google.common.collect.ImmutableList; import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat; @Testcontainers -public class SinglePartitionIntegrationTest { +public class SinglePartitionLiveTest { private static Admin admin; private static KafkaProducer producer; @@ -44,7 +44,7 @@ public class SinglePartitionIntegrationTest { private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofSeconds(5); - private static Logger logger = LoggerFactory.getLogger(SinglePartitionIntegrationTest.class); + private static Logger logger = LoggerFactory.getLogger(SinglePartitionLiveTest.class); @Container private static final KafkaContainer KAFKA_CONTAINER = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")); From b38b5327fd82481b3828e453286e1b2eddc7324f Mon Sep 17 00:00:00 2001 From: panos-kakos Date: Tue, 14 Nov 2023 07:30:13 +0200 Subject: [PATCH 59/63] [JAVA-26731] Upgraded bucket4j-spring-boot-starter version --- spring-boot-modules/spring-boot-libraries/pom.xml | 10 +++++++--- .../ratelimiting/application-bucket4j-starter.yml | 6 +++--- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/spring-boot-modules/spring-boot-libraries/pom.xml b/spring-boot-modules/spring-boot-libraries/pom.xml index ed9a414a60..b0f0c780aa 100644 --- a/spring-boot-modules/spring-boot-libraries/pom.xml +++ b/spring-boot-modules/spring-boot-libraries/pom.xml @@ -18,6 +18,10 @@ org.springframework.boot spring-boot-starter-web + + org.springframework.boot + spring-boot-starter-validation + org.springframework.boot spring-boot-starter-security @@ -95,7 +99,7 @@ - com.github.vladimir-bukhtoyarov + com.bucket4j bucket4j-core ${bucket4j.version} @@ -226,8 +230,8 @@ 2.1 2.6.0 3.3.0 - 7.6.0 - 0.7.0 + 8.1.0 + 0.8.1 3.1.8 diff --git a/spring-boot-modules/spring-boot-libraries/src/main/resources/ratelimiting/application-bucket4j-starter.yml b/spring-boot-modules/spring-boot-libraries/src/main/resources/ratelimiting/application-bucket4j-starter.yml index ecc9f22e0a..efff65555b 100644 --- a/spring-boot-modules/spring-boot-libraries/src/main/resources/ratelimiting/application-bucket4j-starter.yml +++ b/spring-boot-modules/spring-boot-libraries/src/main/resources/ratelimiting/application-bucket4j-starter.yml @@ -21,19 +21,19 @@ bucket4j: url: /api/v1/area.* http-response-body: "{ \"status\": 429, \"error\": \"Too Many Requests\", \"message\": \"You have exhausted your API Request Quota\" }" rate-limits: - - expression: "getHeader('X-api-key')" + - cache-key: "getHeader('X-api-key')" execute-condition: "getHeader('X-api-key').startsWith('PX001-')" bandwidths: - capacity: 100 time: 1 unit: hours - - expression: "getHeader('X-api-key')" + - cache-key: "getHeader('X-api-key')" execute-condition: "getHeader('X-api-key').startsWith('BX001-')" bandwidths: - capacity: 40 time: 1 unit: hours - - expression: "getHeader('X-api-key')" + - cache-key: "getHeader('X-api-key')" bandwidths: - capacity: 20 time: 1 From 4d9a5ea8b46689b1c7f3a1f224a30c9207b431c5 Mon Sep 17 00:00:00 2001 From: Gaetano Piazzolla Date: Wed, 15 Nov 2023 10:55:56 +0100 Subject: [PATCH 60/63] JAVA-12712 | intelliJ Module (#15152) --- intelliJ-modules/.gitignore | 1 + .../gradle/wrapper/gradle-wrapper.properties | 7 + intelliJ-modules/gradlew | 248 ++++++++++++++++++ intelliJ-modules/gradlew.bat | 92 +++++++ intelliJ-modules/settings.gradle | 3 + .../stackoverflow-plugin-gradle/build.gradle | 10 +- .../stackoverflow-plugin/build.gradle | 5 + .../stackoverflow-plugin/settings.gradle | 1 + 8 files changed, 363 insertions(+), 4 deletions(-) create mode 100644 intelliJ-modules/.gitignore create mode 100644 intelliJ-modules/gradle/wrapper/gradle-wrapper.properties create mode 100644 intelliJ-modules/gradlew create mode 100644 intelliJ-modules/gradlew.bat create mode 100644 intelliJ-modules/settings.gradle create mode 100644 intelliJ-modules/stackoverflow-plugin/build.gradle create mode 100644 intelliJ-modules/stackoverflow-plugin/settings.gradle diff --git a/intelliJ-modules/.gitignore b/intelliJ-modules/.gitignore new file mode 100644 index 0000000000..d16386367f --- /dev/null +++ b/intelliJ-modules/.gitignore @@ -0,0 +1 @@ +build/ \ No newline at end of file diff --git a/intelliJ-modules/gradle/wrapper/gradle-wrapper.properties b/intelliJ-modules/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 0000000000..62f495dfed --- /dev/null +++ b/intelliJ-modules/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,7 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-8.2-bin.zip +networkTimeout=10000 +validateDistributionUrl=true +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/intelliJ-modules/gradlew b/intelliJ-modules/gradlew new file mode 100644 index 0000000000..fcb6fca147 --- /dev/null +++ b/intelliJ-modules/gradlew @@ -0,0 +1,248 @@ +#!/bin/sh + +# +# Copyright © 2015-2021 the original authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +############################################################################## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# +############################################################################## + +# Attempt to set APP_HOME + +# Resolve links: $0 may be a link +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac +done + +# This is normally unused +# shellcheck disable=SC2034 +APP_BASE_NAME=${0##*/} +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD=maximum + +warn () { + echo "$*" +} >&2 + +die () { + echo + echo "$*" + echo + exit 1 +} >&2 + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD=$JAVA_HOME/jre/sh/java + else + JAVACMD=$JAVA_HOME/bin/java + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD=java + if ! command -v java >/dev/null 2>&1 + then + die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +fi + +# Increase the maximum file descriptors if we can. +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC3045 + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC3045 + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac +fi + +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. + +# For Cygwin or MSYS, switch paths to Windows format before running java +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + + # Now convert the arguments - kludge to limit ourselves to /bin/sh + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) + fi + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg + done +fi + + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Collect all arguments for the java command; +# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of +# shell script including quotes and variable substitutions, so put them in +# double quotes to make sure that they get re-expanded; and +# * put everything else in single quotes, so that it's not re-expanded. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Stop when "xargs" is not available. +if ! command -v xargs >/dev/null 2>&1 +then + die "xargs is not available" +fi + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# + +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' + +exec "$JAVACMD" "$@" diff --git a/intelliJ-modules/gradlew.bat b/intelliJ-modules/gradlew.bat new file mode 100644 index 0000000000..93e3f59f13 --- /dev/null +++ b/intelliJ-modules/gradlew.bat @@ -0,0 +1,92 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%"=="" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%"=="" set DIRNAME=. +@rem This is normally unused +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if %ERRORLEVEL% equ 0 goto execute + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto execute + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* + +:end +@rem End local scope for the variables with windows NT shell +if %ERRORLEVEL% equ 0 goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +set EXIT_CODE=%ERRORLEVEL% +if %EXIT_CODE% equ 0 set EXIT_CODE=1 +if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% +exit /b %EXIT_CODE% + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/intelliJ-modules/settings.gradle b/intelliJ-modules/settings.gradle new file mode 100644 index 0000000000..ff10e09b3c --- /dev/null +++ b/intelliJ-modules/settings.gradle @@ -0,0 +1,3 @@ +rootProject.name = 'intellij-modules' +include 'stackoverflow-plugin' +include 'stackoverflow-plugin-gradle' diff --git a/intelliJ-modules/stackoverflow-plugin-gradle/build.gradle b/intelliJ-modules/stackoverflow-plugin-gradle/build.gradle index cd0cc258bf..3967a1f075 100644 --- a/intelliJ-modules/stackoverflow-plugin-gradle/build.gradle +++ b/intelliJ-modules/stackoverflow-plugin-gradle/build.gradle @@ -1,6 +1,6 @@ plugins { id 'java' - id 'org.jetbrains.intellij' version '0.4.21' + id 'org.jetbrains.intellij' version '1.16.0' } group 'com.baeldung' @@ -11,15 +11,17 @@ repositories { } dependencies { - testCompile group: 'junit', name: 'junit', version: '4.12' + testImplementation('junit:junit:4.12') } // See https://github.com/JetBrains/gradle-intellij-plugin/ intellij { - version '2020.1.1' + version = "2022.2.5" + type = "IC" } + patchPluginXml { - changeNotes """ + changeNotes = """ Add change notes here.
most HTML tags may be used""" } \ No newline at end of file diff --git a/intelliJ-modules/stackoverflow-plugin/build.gradle b/intelliJ-modules/stackoverflow-plugin/build.gradle new file mode 100644 index 0000000000..81962e0e0a --- /dev/null +++ b/intelliJ-modules/stackoverflow-plugin/build.gradle @@ -0,0 +1,5 @@ +// this project is not supposed to be built. We check only if it compiles fine. + +plugins { + id 'java' +} \ No newline at end of file diff --git a/intelliJ-modules/stackoverflow-plugin/settings.gradle b/intelliJ-modules/stackoverflow-plugin/settings.gradle new file mode 100644 index 0000000000..4ebea35284 --- /dev/null +++ b/intelliJ-modules/stackoverflow-plugin/settings.gradle @@ -0,0 +1 @@ +rootProject.name = 'stackoverflow-plugin' From de50130c7461ddb7c12ee4b5832eb4244f852c1d Mon Sep 17 00:00:00 2001 From: Michael Olayemi Date: Wed, 15 Nov 2023 14:30:00 +0100 Subject: [PATCH 61/63] Create Table using ASCII in a Console in Java (#15158) --- core-java-modules/core-java-console/pom.xml | 6 ++ .../consoletableoutput/BodyMassIndex.java | 44 +++++++++++++ .../BodyMassIndexApplication.java | 62 +++++++++++++++++++ 3 files changed, 112 insertions(+) create mode 100644 core-java-modules/core-java-console/src/main/java/com/baeldung/consoletableoutput/BodyMassIndex.java create mode 100644 core-java-modules/core-java-console/src/main/java/com/baeldung/consoletableoutput/BodyMassIndexApplication.java diff --git a/core-java-modules/core-java-console/pom.xml b/core-java-modules/core-java-console/pom.xml index 1b56f1f27c..8677b672ea 100644 --- a/core-java-modules/core-java-console/pom.xml +++ b/core-java-modules/core-java-console/pom.xml @@ -29,6 +29,11 @@ + + de.vandermeer + asciitable + ${ascii.version} + @@ -157,6 +162,7 @@ 3.0.0-M1 1.8 1.8 + 0.3.2 \ No newline at end of file diff --git a/core-java-modules/core-java-console/src/main/java/com/baeldung/consoletableoutput/BodyMassIndex.java b/core-java-modules/core-java-console/src/main/java/com/baeldung/consoletableoutput/BodyMassIndex.java new file mode 100644 index 0000000000..96cede7020 --- /dev/null +++ b/core-java-modules/core-java-console/src/main/java/com/baeldung/consoletableoutput/BodyMassIndex.java @@ -0,0 +1,44 @@ +package com.baeldung.consoletableoutput; + +public class BodyMassIndex { + + private String name; + private double height; + private double weight; + + public BodyMassIndex(String name, double height, double weight) { + this.name = name; + this.height = height; + this.weight = weight; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public double getHeight() { + return height; + } + + public void setHeight(double height) { + this.height = height; + } + + public double getWeight() { + return weight; + } + + public void setWeight(double weight) { + this.weight = weight; + } + + public double calculate() { + double bmi = weight / (height * height); + String formattedBmi = String.format("%.2f", bmi); + return Double.parseDouble(formattedBmi); + } +} diff --git a/core-java-modules/core-java-console/src/main/java/com/baeldung/consoletableoutput/BodyMassIndexApplication.java b/core-java-modules/core-java-console/src/main/java/com/baeldung/consoletableoutput/BodyMassIndexApplication.java new file mode 100644 index 0000000000..cb340256aa --- /dev/null +++ b/core-java-modules/core-java-console/src/main/java/com/baeldung/consoletableoutput/BodyMassIndexApplication.java @@ -0,0 +1,62 @@ +package com.baeldung.consoletableoutput; + +import java.util.ArrayList; +import java.util.List; + +import de.vandermeer.asciitable.AsciiTable; +import de.vandermeer.skb.interfaces.transformers.textformat.TextAlignment; + +public class BodyMassIndexApplication { + + public static void main(String[] args) { + stringFormat(); + asciiTable(); + + } + + public static void stringFormat() { + List bodyMassIndices = new ArrayList<>(); + bodyMassIndices.add(new BodyMassIndex("Tom", 1.8, 80)); + bodyMassIndices.add(new BodyMassIndex("Elton", 1.9, 90)); + bodyMassIndices.add(new BodyMassIndex("Harry", 1.9, 90)); + bodyMassIndices.add(new BodyMassIndex("Hannah", 1.9, 90)); + + String leftAlignment = "| %-7s | %-7.2f | %-7.2f | %-5.2f |%n"; + + System.out.format("+---------+---------+---------+-------+%n"); + System.out.format("| Name | Height | Weight | BMI |%n"); + System.out.format("+---------+---------+---------+-------+%n"); + + for (BodyMassIndex bodyMassIndex : bodyMassIndices) { + System.out.format(leftAlignment, bodyMassIndex.getName(), bodyMassIndex.getHeight(), bodyMassIndex.getWeight(), bodyMassIndex.calculate()); + System.out.format("+---------+---------+---------+-------+%n"); + } + + } + + public static void asciiTable() { + List bodyMassIndices = new ArrayList<>(); + bodyMassIndices.add(new BodyMassIndex("Tom", 1.8, 80)); + bodyMassIndices.add(new BodyMassIndex("Elton", 1.9, 90)); + bodyMassIndices.add(new BodyMassIndex("Harry", 1.9, 90)); + bodyMassIndices.add(new BodyMassIndex("Hannah", 1.9, 90)); + + AsciiTable asciiTable = new AsciiTable(); + asciiTable.addRule(); + asciiTable.addRow("Name", "Height", "Weight", "BMI"); + asciiTable.addRule(); + + for (BodyMassIndex bodyMassIndex : bodyMassIndices) { + + asciiTable.addRow(bodyMassIndex.getName(), bodyMassIndex.getHeight(), bodyMassIndex.getWeight(), bodyMassIndex.calculate()); + asciiTable.addRule(); + + } + + asciiTable.setTextAlignment(TextAlignment.CENTER); + String render = asciiTable.render(); + System.out.println(render); + + } + +} From 5787183a36f8a30196f5630f4b702010db1b0042 Mon Sep 17 00:00:00 2001 From: Wynn Teo <49014791+wynnteo@users.noreply.github.com> Date: Wed, 15 Nov 2023 22:55:03 +0800 Subject: [PATCH 62/63] BAEL-7109 - added code to demostrate how to get max date in java (#14944) * BAEL-7109 - added code to demostrate how to get max date in java * BAEL-7109 Fix test case typo * BAEL-7109 Fix test case file name * Fix the test method * Fix the test method * Fix the test method * Remove public * add new line --- .../com/baeldung/maxdate/DateComparison.java | 18 ++++++++++++++++++ .../com/baeldung/maxdate/MaxDateDisplay.java | 18 ++++++++++++++++++ .../maxdate/DateComparisonUnitTest.java | 16 ++++++++++++++++ .../maxdate/MaxDateDisplayUnitTest.java | 18 ++++++++++++++++++ 4 files changed, 70 insertions(+) create mode 100644 core-java-modules/core-java-8-datetime-2/src/main/java/com/baeldung/maxdate/DateComparison.java create mode 100644 core-java-modules/core-java-8-datetime-2/src/main/java/com/baeldung/maxdate/MaxDateDisplay.java create mode 100644 core-java-modules/core-java-8-datetime-2/src/test/java/com/baeldung/maxdate/DateComparisonUnitTest.java create mode 100644 core-java-modules/core-java-8-datetime-2/src/test/java/com/baeldung/maxdate/MaxDateDisplayUnitTest.java diff --git a/core-java-modules/core-java-8-datetime-2/src/main/java/com/baeldung/maxdate/DateComparison.java b/core-java-modules/core-java-8-datetime-2/src/main/java/com/baeldung/maxdate/DateComparison.java new file mode 100644 index 0000000000..d6450670a2 --- /dev/null +++ b/core-java-modules/core-java-8-datetime-2/src/main/java/com/baeldung/maxdate/DateComparison.java @@ -0,0 +1,18 @@ +package com.baeldung.maxdate; + +import java.util.Date; + +public class DateComparison { + public int compareTodayWithMaxDate() { + Date today = new Date(); + Date maxDate = new Date(Long.MAX_VALUE); + + int comparisonResult = today.compareTo(maxDate); + return comparisonResult; + } + + public static void main(String[] args) { + DateComparison comparator = new DateComparison(); + System.out.println(comparator.compareTodayWithMaxDate()); + } +} \ No newline at end of file diff --git a/core-java-modules/core-java-8-datetime-2/src/main/java/com/baeldung/maxdate/MaxDateDisplay.java b/core-java-modules/core-java-8-datetime-2/src/main/java/com/baeldung/maxdate/MaxDateDisplay.java new file mode 100644 index 0000000000..b79b5f4422 --- /dev/null +++ b/core-java-modules/core-java-8-datetime-2/src/main/java/com/baeldung/maxdate/MaxDateDisplay.java @@ -0,0 +1,18 @@ +package com.baeldung.maxdate; + +import java.util.Date; +import java.text.SimpleDateFormat; + +public class MaxDateDisplay { + public String getMaxDateValue() { + Date maxDate = new Date(Long.MAX_VALUE); + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); + return "The maximum date value in Java is: " + sdf.format(maxDate); + } + + public static void main(String[] args) { + MaxDateDisplay display = new MaxDateDisplay(); + System.out.println(display.getMaxDateValue()); + } +} + diff --git a/core-java-modules/core-java-8-datetime-2/src/test/java/com/baeldung/maxdate/DateComparisonUnitTest.java b/core-java-modules/core-java-8-datetime-2/src/test/java/com/baeldung/maxdate/DateComparisonUnitTest.java new file mode 100644 index 0000000000..70aef126da --- /dev/null +++ b/core-java-modules/core-java-8-datetime-2/src/test/java/com/baeldung/maxdate/DateComparisonUnitTest.java @@ -0,0 +1,16 @@ +package com.baeldung.maxdate; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.Test; + +class DateComparisonUnitTest { + + @Test + void whenCompareTodayWithMaxDate_thenCorrectResult() { + DateComparison comparator = new DateComparison(); + int result = comparator.compareTodayWithMaxDate(); + + assertTrue(result < 0); + } +} diff --git a/core-java-modules/core-java-8-datetime-2/src/test/java/com/baeldung/maxdate/MaxDateDisplayUnitTest.java b/core-java-modules/core-java-8-datetime-2/src/test/java/com/baeldung/maxdate/MaxDateDisplayUnitTest.java new file mode 100644 index 0000000000..dd7000bd29 --- /dev/null +++ b/core-java-modules/core-java-8-datetime-2/src/test/java/com/baeldung/maxdate/MaxDateDisplayUnitTest.java @@ -0,0 +1,18 @@ +package com.baeldung.maxdate; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; + +class MaxDateDisplayUnitTest { + + @Test + void whenGetMaxDate_thenCorrectResult() { + MaxDateDisplay display = new MaxDateDisplay(); + String result = display.getMaxDateValue(); + assertEquals( + "The maximum date value in Java is: 292278994-08-17 07:12:55.807", + result + ); + } +} From 0838279c6afa9bf92a2a89cdb2edcafaddd1a039 Mon Sep 17 00:00:00 2001 From: ACHRAF TAITAI <43656331+achraftt@users.noreply.github.com> Date: Wed, 15 Nov 2023 22:16:28 +0100 Subject: [PATCH 63/63] BAEL-7092: How to get the size of a file in MB, KB & GB in java (#15117) * BAEL-7092: How to get the size of a file in MB, KB & GB in java * Update JavaFileSizeUnitTest.java * BAEL-7092: How to get the size of a file in MB, KB & GB in java --------- Co-authored-by: Grzegorz Piwowarek --- .../java/com/baeldung/size/FileSizeUtils.java | 28 +++++++++++++++++ .../baeldung/size/JavaFileSizeUnitTest.java | 31 ++++++++++++++++--- 2 files changed, 54 insertions(+), 5 deletions(-) create mode 100644 core-java-modules/core-java-io/src/main/java/com/baeldung/size/FileSizeUtils.java diff --git a/core-java-modules/core-java-io/src/main/java/com/baeldung/size/FileSizeUtils.java b/core-java-modules/core-java-io/src/main/java/com/baeldung/size/FileSizeUtils.java new file mode 100644 index 0000000000..18c8687043 --- /dev/null +++ b/core-java-modules/core-java-io/src/main/java/com/baeldung/size/FileSizeUtils.java @@ -0,0 +1,28 @@ +package com.baeldung.size; + +import java.io.File; + +public class FileSizeUtils { + public static long getFileSizeInBytes(File file) { + if (file.exists()) { + return file.length(); + } else { + throw new IllegalArgumentException("File not found."); + } + } + + public static double getFileSizeInKilobytes(File file) { + long bytes = getFileSizeInBytes(file); + return (double) bytes / 1024; + } + + public static double getFileSizeInMegabytes(File file) { + double kilobytes = getFileSizeInKilobytes(file); + return kilobytes / 1024; + } + + public static double getFileSizeInGigabytes(File file) { + double megabytes = getFileSizeInMegabytes(file); + return megabytes / 1024; + } +} diff --git a/core-java-modules/core-java-io/src/test/java/com/baeldung/size/JavaFileSizeUnitTest.java b/core-java-modules/core-java-io/src/test/java/com/baeldung/size/JavaFileSizeUnitTest.java index d015f2602e..d8f4361260 100644 --- a/core-java-modules/core-java-io/src/test/java/com/baeldung/size/JavaFileSizeUnitTest.java +++ b/core-java-modules/core-java-io/src/test/java/com/baeldung/size/JavaFileSizeUnitTest.java @@ -1,6 +1,8 @@ package com.baeldung.size; -import static org.junit.Assert.assertEquals; +import org.apache.commons.io.FileUtils; +import org.junit.Before; +import org.junit.Test; import java.io.File; import java.io.FileInputStream; @@ -10,9 +12,9 @@ import java.net.URL; import java.nio.channels.FileChannel; import java.nio.file.Path; import java.nio.file.Paths; -import org.apache.commons.io.FileUtils; -import org.junit.Before; -import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; public class JavaFileSizeUnitTest { private static final long EXPECTED_FILE_SIZE_IN_BYTES = 11; @@ -85,4 +87,23 @@ public class JavaFileSizeUnitTest { assertEquals(EXPECTED_FILE_SIZE_IN_BYTES, stream.available()); } } -} \ No newline at end of file + + @Test + public void whenGetFileSizeInDifferentUnits_thenCorrect(){ + filePath = String.join(File.separator, new String[] { "src", "test", "resources", "size", "sample_file_1.in" }); + File file = new File(filePath); + if (file.exists()) { + long expectedBytes = file.length(); + double expectedKilobytes = (double) expectedBytes / 1024; + double expectedMegabytes = expectedKilobytes / 1024; + double expectedGigabytes = expectedMegabytes / 1024; + + assertEquals(expectedBytes, FileSizeUtils.getFileSizeInBytes(file)); + assertEquals(expectedKilobytes, FileSizeUtils.getFileSizeInKilobytes(file), 0.01); + assertEquals(expectedMegabytes, FileSizeUtils.getFileSizeInMegabytes(file), 0.01); + assertEquals(expectedGigabytes, FileSizeUtils.getFileSizeInGigabytes(file), 0.01); + } else { + fail("File not found."); + } + } +}