diff --git a/apache-kafka-2/log4j.properties b/apache-kafka-2/log4j.properties
deleted file mode 100644
index 2173c5d96f..0000000000
--- a/apache-kafka-2/log4j.properties
+++ /dev/null
@@ -1 +0,0 @@
-log4j.rootLogger=INFO, stdout
diff --git a/apache-kafka-2/pom.xml b/apache-kafka-2/pom.xml
index 067dedef8a..d1f74e8aae 100644
--- a/apache-kafka-2/pom.xml
+++ b/apache-kafka-2/pom.xml
@@ -23,11 +23,6 @@
slf4j-api
${org.slf4j.version}
-
- org.slf4j
- slf4j-log4j12
- ${org.slf4j.version}
-
com.google.guava
guava
@@ -57,6 +52,11 @@
${lombok.version}
provided
+
+ com.fasterxml.jackson.core
+ jackson-databind
+ ${jackson.databind.version}
+
@@ -64,6 +64,7 @@
2.8.0
1.15.3
1.15.3
+ 2.15.2
\ No newline at end of file
diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/Config.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/Config.java
new file mode 100644
index 0000000000..7fae8403b5
--- /dev/null
+++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/Config.java
@@ -0,0 +1,11 @@
+package com.baeldung.kafka.message.ordering;
+
+public class Config {
+ public static final String CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS = "value.deserializer.serializedClass";
+ public static final String MULTI_PARTITION_TOPIC = "multi_partition_topic";
+ public static final String SINGLE_PARTITION_TOPIC = "single_partition_topic";
+
+ public static final int MULTIPLE_PARTITIONS = 5;
+ public static final int SINGLE_PARTITION = 1;
+ public static final short REPLICATION_FACTOR = 1;
+}
diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java
new file mode 100644
index 0000000000..99e0cc6c7e
--- /dev/null
+++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/payload/UserEvent.java
@@ -0,0 +1,61 @@
+package com.baeldung.kafka.message.ordering.payload;
+
+import java.util.Objects;
+
+public class UserEvent implements Comparable {
+ private String userEventId;
+ private long eventNanoTime;
+ private long globalSequenceNumber;
+
+ @SuppressWarnings("unused")
+ public UserEvent() {
+ // Required for Jackson Serialization and Deserialization
+ }
+
+ public UserEvent(String userEventId) {
+ this.userEventId = userEventId;
+ }
+
+ public String getUserEventId() {
+ return userEventId;
+ }
+
+ public long getEventNanoTime() {
+ return eventNanoTime;
+ }
+
+ public void setEventNanoTime(long eventNanoTime) {
+ this.eventNanoTime = eventNanoTime;
+ }
+
+ public long getGlobalSequenceNumber() {
+ return globalSequenceNumber;
+ }
+
+ public void setGlobalSequenceNumber(long globalSequenceNumber) {
+ this.globalSequenceNumber = globalSequenceNumber;
+ }
+
+ @Override
+ public int compareTo(UserEvent other) {
+ return Long.compare(this.globalSequenceNumber, other.globalSequenceNumber);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (!(obj instanceof UserEvent)) {
+ return false;
+ }
+ UserEvent userEvent = (UserEvent) obj;
+ return this.globalSequenceNumber == userEvent.globalSequenceNumber;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(globalSequenceNumber);
+ }
+}
+
diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java
new file mode 100644
index 0000000000..cf72ab12df
--- /dev/null
+++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonDeserializer.java
@@ -0,0 +1,34 @@
+package com.baeldung.kafka.message.ordering.serialization;
+
+import com.baeldung.kafka.message.ordering.Config;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import org.apache.kafka.common.serialization.Deserializer;
+
+import java.util.Map;
+
+/**
+ * Configured via {@link org.apache.kafka.clients.consumer.ConsumerConfig#VALUE_DESERIALIZER_CLASS_CONFIG}
+ */
+public class JacksonDeserializer implements Deserializer {
+ private final ObjectMapper objectMapper = new ObjectMapper();
+ private Class type;
+
+ @Override
+ public void configure(Map configs, boolean isKey) {
+ this.type = (Class) configs.get(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS);
+ }
+
+ @Override
+ public T deserialize(String topic, byte[] bytes) {
+ if (bytes == null) {
+ return null;
+ }
+ try {
+ return objectMapper.readValue(bytes, type);
+ } catch (Exception e) {
+ throw new RuntimeException("Error deserializing value", e);
+ }
+ }
+}
+
diff --git a/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonSerializer.java b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonSerializer.java
new file mode 100644
index 0000000000..b2ace3b8ed
--- /dev/null
+++ b/apache-kafka-2/src/main/java/com/baeldung/kafka/message/ordering/serialization/JacksonSerializer.java
@@ -0,0 +1,24 @@
+package com.baeldung.kafka.message.ordering.serialization;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import org.apache.kafka.common.serialization.Serializer;
+
+/**
+ * Configured via {@link org.apache.kafka.clients.producer.ProducerConfig#VALUE_SERIALIZER_CLASS_CONFIG}
+ */
+public class JacksonSerializer implements Serializer {
+ private final ObjectMapper objectMapper = new ObjectMapper();
+
+ @Override
+ public byte[] serialize(String topic, T data) {
+ if (data == null) {
+ return null;
+ }
+ try {
+ return objectMapper.writeValueAsBytes(data);
+ } catch (Exception e) {
+ throw new RuntimeException("Error serializing value", e);
+ }
+ }
+}
diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExternalSequenceWithTimeWindowLiveTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExternalSequenceWithTimeWindowLiveTest.java
new file mode 100644
index 0000000000..f36c6ebd63
--- /dev/null
+++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/ExternalSequenceWithTimeWindowLiveTest.java
@@ -0,0 +1,126 @@
+package com.baeldung.kafka.message.ordering;
+
+import com.baeldung.kafka.message.ordering.payload.UserEvent;
+import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer;
+import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer;
+
+import org.apache.kafka.clients.admin.*;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.clients.producer.ProducerConfig;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.apache.kafka.clients.producer.RecordMetadata;
+import org.apache.kafka.common.serialization.LongDeserializer;
+import org.apache.kafka.common.serialization.LongSerializer;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.testcontainers.containers.KafkaContainer;
+import org.testcontainers.junit.jupiter.Container;
+import org.testcontainers.junit.jupiter.Testcontainers;
+import org.testcontainers.utility.DockerImageName;
+
+import java.time.Duration;
+import java.util.*;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Future;
+
+import com.google.common.collect.ImmutableList;
+
+import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat;
+
+@Testcontainers
+public class ExternalSequenceWithTimeWindowLiveTest {
+
+ private static Admin admin;
+ private static KafkaProducer producer;
+ private static KafkaConsumer consumer;
+ private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofSeconds(5);
+ private static final long BUFFER_PERIOD_NS = Duration.ofSeconds(5)
+ .toNanos();
+ private static Logger logger = LoggerFactory.getLogger(ExternalSequenceWithTimeWindowLiveTest.class);
+
+ @Container
+ private static final KafkaContainer KAFKA_CONTAINER = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest"));
+
+ @BeforeAll
+ static void setup() throws ExecutionException, InterruptedException {
+ KAFKA_CONTAINER.addExposedPort(9092);
+
+ Properties adminProperties = new Properties();
+ adminProperties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers());
+
+ Properties producerProperties = new Properties();
+ producerProperties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers());
+ producerProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName());
+ producerProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName());
+
+ Properties consumerProperties = new Properties();
+ consumerProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers());
+ consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName());
+ consumerProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName());
+ consumerProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
+ consumerProperties.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, UserEvent.class);
+ consumerProperties.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group");
+ admin = Admin.create(adminProperties);
+ producer = new KafkaProducer<>(producerProperties);
+ consumer = new KafkaConsumer<>(consumerProperties);
+ admin.createTopics(ImmutableList.of(new NewTopic(Config.MULTI_PARTITION_TOPIC, Config.MULTIPLE_PARTITIONS, Config.REPLICATION_FACTOR)))
+ .all()
+ .get();
+ }
+
+ @AfterAll
+ static void destroy() {
+ KAFKA_CONTAINER.stop();
+ }
+
+ @Test
+ void givenMultiplePartitions_whenPublishedToKafkaAndConsumedWithExtSeqNumberAndTimeWindow_thenCheckForMessageOrder() throws ExecutionException, InterruptedException {
+ List sentUserEventList = new ArrayList<>();
+ List receivedUserEventList = new ArrayList<>();
+ for (long sequenceNumber = 1; sequenceNumber <= 10; sequenceNumber++) {
+ UserEvent userEvent = new UserEvent(UUID.randomUUID()
+ .toString());
+ userEvent.setEventNanoTime(System.nanoTime());
+ userEvent.setGlobalSequenceNumber(sequenceNumber);
+ Future future = producer.send(new ProducerRecord<>(Config.MULTI_PARTITION_TOPIC, sequenceNumber, userEvent));
+ sentUserEventList.add(userEvent);
+ RecordMetadata metadata = future.get();
+ logger.info("User Event ID: " + userEvent.getUserEventId() + ", Partition : " + metadata.partition());
+ }
+
+ consumer.subscribe(Collections.singletonList(Config.MULTI_PARTITION_TOPIC));
+ List buffer = new ArrayList<>();
+ long lastProcessedTime = System.nanoTime();
+ ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES);
+ records.forEach(record -> {
+ buffer.add(record.value());
+ });
+ while (!buffer.isEmpty()) {
+ if (System.nanoTime() - lastProcessedTime > BUFFER_PERIOD_NS) {
+ processBuffer(buffer, receivedUserEventList);
+ lastProcessedTime = System.nanoTime();
+ }
+ records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES);
+ records.forEach(record -> {
+ buffer.add(record.value());
+ });
+ }
+ assertThat(receivedUserEventList).isEqualTo(sentUserEventList)
+ .containsExactlyElementsOf(sentUserEventList);
+ }
+
+ private static void processBuffer(List buffer, List receivedUserEventList) {
+ Collections.sort(buffer);
+ buffer.forEach(userEvent -> {
+ receivedUserEventList.add(userEvent);
+ logger.info("Processing message with Global Sequence number: " + userEvent.getGlobalSequenceNumber() + ", User Event Id: " + userEvent.getUserEventId());
+ });
+ buffer.clear();
+ }
+}
diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionLiveTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionLiveTest.java
new file mode 100644
index 0000000000..407b4d52a9
--- /dev/null
+++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/MultiplePartitionLiveTest.java
@@ -0,0 +1,105 @@
+package com.baeldung.kafka.message.ordering;
+
+import com.baeldung.kafka.message.ordering.payload.UserEvent;
+import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer;
+import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer;
+
+import org.apache.kafka.clients.admin.*;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.clients.producer.ProducerConfig;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.apache.kafka.clients.producer.RecordMetadata;
+import org.apache.kafka.common.serialization.LongDeserializer;
+import org.apache.kafka.common.serialization.LongSerializer;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.testcontainers.containers.KafkaContainer;
+import org.testcontainers.junit.jupiter.Container;
+import org.testcontainers.junit.jupiter.Testcontainers;
+import org.testcontainers.utility.DockerImageName;
+
+import java.time.Duration;
+import java.util.*;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Future;
+
+import com.google.common.collect.ImmutableList;
+
+import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat;
+
+@Testcontainers
+public class MultiplePartitionLiveTest {
+
+ private static Admin admin;
+ private static KafkaProducer producer;
+ private static KafkaConsumer consumer;
+ private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofSeconds(5);
+
+ private static Logger logger = LoggerFactory.getLogger(MultiplePartitionLiveTest.class);
+ @Container
+ private static final KafkaContainer KAFKA_CONTAINER = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest"));
+
+ @BeforeAll
+ static void setup() throws ExecutionException, InterruptedException {
+ KAFKA_CONTAINER.addExposedPort(9092);
+
+ Properties adminProperties = new Properties();
+ adminProperties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers());
+
+ Properties producerProperties = new Properties();
+ producerProperties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers());
+ producerProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName());
+ producerProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName());
+
+ Properties consumerProperties = new Properties();
+ consumerProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers());
+ consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName());
+ consumerProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName());
+ consumerProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
+ consumerProperties.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, UserEvent.class);
+ consumerProperties.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group");
+ admin = Admin.create(adminProperties);
+ producer = new KafkaProducer<>(producerProperties);
+ consumer = new KafkaConsumer<>(consumerProperties);
+ admin.createTopics(ImmutableList.of(new NewTopic(Config.MULTI_PARTITION_TOPIC, Config.MULTIPLE_PARTITIONS, Config.REPLICATION_FACTOR)))
+ .all()
+ .get();
+ }
+
+ @AfterAll
+ static void destroy() {
+ KAFKA_CONTAINER.stop();
+ }
+
+ @Test
+ void givenMultiplePartitions_whenPublishedToKafkaAndConsumed_thenCheckForMessageOrder() throws ExecutionException, InterruptedException {
+ List sentUserEventList = new ArrayList<>();
+ List receivedUserEventList = new ArrayList<>();
+ for (long sequenceNumber = 1; sequenceNumber <= 10; sequenceNumber++) {
+ UserEvent userEvent = new UserEvent(UUID.randomUUID()
+ .toString());
+ userEvent.setGlobalSequenceNumber(sequenceNumber);
+ userEvent.setEventNanoTime(System.nanoTime());
+ Future future = producer.send(new ProducerRecord<>(Config.MULTI_PARTITION_TOPIC, sequenceNumber, userEvent));
+ sentUserEventList.add(userEvent);
+ RecordMetadata metadata = future.get();
+ logger.info("User Event ID: " + userEvent.getUserEventId() + ", Partition : " + metadata.partition());
+ }
+
+ consumer.subscribe(Collections.singletonList(Config.MULTI_PARTITION_TOPIC));
+ ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES);
+ records.forEach(record -> {
+ UserEvent userEvent = record.value();
+ receivedUserEventList.add(userEvent);
+ logger.info("User Event ID: " + userEvent.getUserEventId());
+ });
+ assertThat(receivedUserEventList).isNotEqualTo(sentUserEventList)
+ .containsExactlyInAnyOrderElementsOf(sentUserEventList);
+ }
+}
diff --git a/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionLiveTest.java b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionLiveTest.java
new file mode 100644
index 0000000000..9c6a15ebeb
--- /dev/null
+++ b/apache-kafka-2/src/test/java/com/baeldung/kafka/message/ordering/SinglePartitionLiveTest.java
@@ -0,0 +1,109 @@
+package com.baeldung.kafka.message.ordering;
+
+import com.baeldung.kafka.message.ordering.payload.UserEvent;
+import com.baeldung.kafka.message.ordering.serialization.JacksonDeserializer;
+import com.baeldung.kafka.message.ordering.serialization.JacksonSerializer;
+
+import org.apache.kafka.clients.admin.Admin;
+import org.apache.kafka.clients.admin.AdminClientConfig;
+import org.apache.kafka.clients.admin.NewTopic;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.clients.producer.ProducerConfig;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.apache.kafka.clients.producer.RecordMetadata;
+import org.apache.kafka.common.serialization.LongDeserializer;
+import org.apache.kafka.common.serialization.LongSerializer;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.testcontainers.containers.KafkaContainer;
+import org.testcontainers.junit.jupiter.Container;
+import org.testcontainers.junit.jupiter.Testcontainers;
+import org.testcontainers.utility.DockerImageName;
+
+import java.time.Duration;
+import java.util.*;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Future;
+
+import com.google.common.collect.ImmutableList;
+
+import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat;
+
+@Testcontainers
+public class SinglePartitionLiveTest {
+
+ private static Admin admin;
+ private static KafkaProducer producer;
+ private static KafkaConsumer consumer;
+
+ private static final Duration TIMEOUT_WAIT_FOR_MESSAGES = Duration.ofSeconds(5);
+
+ private static Logger logger = LoggerFactory.getLogger(SinglePartitionLiveTest.class);
+ @Container
+ private static final KafkaContainer KAFKA_CONTAINER = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest"));
+
+ @BeforeAll
+ static void setup() throws ExecutionException, InterruptedException {
+ KAFKA_CONTAINER.addExposedPort(9092);
+
+ Properties adminProperties = new Properties();
+ adminProperties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers());
+
+ Properties producerProperties = new Properties();
+ producerProperties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers());
+ producerProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName());
+ producerProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JacksonSerializer.class.getName());
+
+ Properties consumerProperties = new Properties();
+ consumerProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers());
+ consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName());
+ consumerProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JacksonDeserializer.class.getName());
+ consumerProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
+ consumerProperties.put(Config.CONSUMER_VALUE_DESERIALIZER_SERIALIZED_CLASS, UserEvent.class);
+ consumerProperties.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group");
+ admin = Admin.create(adminProperties);
+ producer = new KafkaProducer<>(producerProperties);
+ consumer = new KafkaConsumer<>(consumerProperties);
+ admin.createTopics(ImmutableList.of(new NewTopic(Config.SINGLE_PARTITION_TOPIC, Config.SINGLE_PARTITION, Config.REPLICATION_FACTOR)))
+ .all()
+ .get();
+ }
+
+ @AfterAll
+ static void destroy() {
+ KAFKA_CONTAINER.stop();
+ }
+
+ @Test
+ void givenASinglePartition_whenPublishedToKafkaAndConsumed_thenCheckForMessageOrder() throws ExecutionException, InterruptedException {
+ List sentUserEventList = new ArrayList<>();
+ List receivedUserEventList = new ArrayList<>();
+ for (long sequenceNumber = 1; sequenceNumber <= 10; sequenceNumber++) {
+ UserEvent userEvent = new UserEvent(UUID.randomUUID()
+ .toString());
+ userEvent.setGlobalSequenceNumber(sequenceNumber);
+ userEvent.setEventNanoTime(System.nanoTime());
+ ProducerRecord producerRecord = new ProducerRecord<>(Config.SINGLE_PARTITION_TOPIC, userEvent);
+ Future future = producer.send(producerRecord);
+ sentUserEventList.add(userEvent);
+ RecordMetadata metadata = future.get();
+ logger.info("User Event ID: " + userEvent.getUserEventId() + ", Partition : " + metadata.partition());
+ }
+
+ consumer.subscribe(Collections.singletonList(Config.SINGLE_PARTITION_TOPIC));
+ ConsumerRecords records = consumer.poll(TIMEOUT_WAIT_FOR_MESSAGES);
+ records.forEach(record -> {
+ UserEvent userEvent = record.value();
+ receivedUserEventList.add(userEvent);
+ logger.info("User Event ID: " + userEvent.getUserEventId());
+ });
+ assertThat(receivedUserEventList).isEqualTo(sentUserEventList)
+ .containsExactlyElementsOf(sentUserEventList);
+ }
+}
diff --git a/apache-kafka-2/src/test/resources/logback.xml b/apache-kafka-2/src/test/resources/logback.xml
new file mode 100644
index 0000000000..6156c2188e
--- /dev/null
+++ b/apache-kafka-2/src/test/resources/logback.xml
@@ -0,0 +1,11 @@
+
+
+
+ %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/core-java-modules/core-java-8-datetime-2/src/main/java/com/baeldung/maxdate/DateComparison.java b/core-java-modules/core-java-8-datetime-2/src/main/java/com/baeldung/maxdate/DateComparison.java
new file mode 100644
index 0000000000..d6450670a2
--- /dev/null
+++ b/core-java-modules/core-java-8-datetime-2/src/main/java/com/baeldung/maxdate/DateComparison.java
@@ -0,0 +1,18 @@
+package com.baeldung.maxdate;
+
+import java.util.Date;
+
+public class DateComparison {
+ public int compareTodayWithMaxDate() {
+ Date today = new Date();
+ Date maxDate = new Date(Long.MAX_VALUE);
+
+ int comparisonResult = today.compareTo(maxDate);
+ return comparisonResult;
+ }
+
+ public static void main(String[] args) {
+ DateComparison comparator = new DateComparison();
+ System.out.println(comparator.compareTodayWithMaxDate());
+ }
+}
\ No newline at end of file
diff --git a/core-java-modules/core-java-8-datetime-2/src/main/java/com/baeldung/maxdate/MaxDateDisplay.java b/core-java-modules/core-java-8-datetime-2/src/main/java/com/baeldung/maxdate/MaxDateDisplay.java
new file mode 100644
index 0000000000..b79b5f4422
--- /dev/null
+++ b/core-java-modules/core-java-8-datetime-2/src/main/java/com/baeldung/maxdate/MaxDateDisplay.java
@@ -0,0 +1,18 @@
+package com.baeldung.maxdate;
+
+import java.util.Date;
+import java.text.SimpleDateFormat;
+
+public class MaxDateDisplay {
+ public String getMaxDateValue() {
+ Date maxDate = new Date(Long.MAX_VALUE);
+ SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
+ return "The maximum date value in Java is: " + sdf.format(maxDate);
+ }
+
+ public static void main(String[] args) {
+ MaxDateDisplay display = new MaxDateDisplay();
+ System.out.println(display.getMaxDateValue());
+ }
+}
+
diff --git a/core-java-modules/core-java-8-datetime-2/src/test/java/com/baeldung/maxdate/DateComparisonUnitTest.java b/core-java-modules/core-java-8-datetime-2/src/test/java/com/baeldung/maxdate/DateComparisonUnitTest.java
new file mode 100644
index 0000000000..70aef126da
--- /dev/null
+++ b/core-java-modules/core-java-8-datetime-2/src/test/java/com/baeldung/maxdate/DateComparisonUnitTest.java
@@ -0,0 +1,16 @@
+package com.baeldung.maxdate;
+
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+import org.junit.jupiter.api.Test;
+
+class DateComparisonUnitTest {
+
+ @Test
+ void whenCompareTodayWithMaxDate_thenCorrectResult() {
+ DateComparison comparator = new DateComparison();
+ int result = comparator.compareTodayWithMaxDate();
+
+ assertTrue(result < 0);
+ }
+}
diff --git a/core-java-modules/core-java-8-datetime-2/src/test/java/com/baeldung/maxdate/MaxDateDisplayUnitTest.java b/core-java-modules/core-java-8-datetime-2/src/test/java/com/baeldung/maxdate/MaxDateDisplayUnitTest.java
new file mode 100644
index 0000000000..dd7000bd29
--- /dev/null
+++ b/core-java-modules/core-java-8-datetime-2/src/test/java/com/baeldung/maxdate/MaxDateDisplayUnitTest.java
@@ -0,0 +1,18 @@
+package com.baeldung.maxdate;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+import org.junit.jupiter.api.Test;
+
+class MaxDateDisplayUnitTest {
+
+ @Test
+ void whenGetMaxDate_thenCorrectResult() {
+ MaxDateDisplay display = new MaxDateDisplay();
+ String result = display.getMaxDateValue();
+ assertEquals(
+ "The maximum date value in Java is: 292278994-08-17 07:12:55.807",
+ result
+ );
+ }
+}
diff --git a/core-java-modules/core-java-console/pom.xml b/core-java-modules/core-java-console/pom.xml
index 1b56f1f27c..8677b672ea 100644
--- a/core-java-modules/core-java-console/pom.xml
+++ b/core-java-modules/core-java-console/pom.xml
@@ -29,6 +29,11 @@
+
+ de.vandermeer
+ asciitable
+ ${ascii.version}
+
@@ -157,6 +162,7 @@
3.0.0-M1
1.8
1.8
+ 0.3.2
\ No newline at end of file
diff --git a/core-java-modules/core-java-console/src/main/java/com/baeldung/consoletableoutput/BodyMassIndex.java b/core-java-modules/core-java-console/src/main/java/com/baeldung/consoletableoutput/BodyMassIndex.java
new file mode 100644
index 0000000000..96cede7020
--- /dev/null
+++ b/core-java-modules/core-java-console/src/main/java/com/baeldung/consoletableoutput/BodyMassIndex.java
@@ -0,0 +1,44 @@
+package com.baeldung.consoletableoutput;
+
+public class BodyMassIndex {
+
+ private String name;
+ private double height;
+ private double weight;
+
+ public BodyMassIndex(String name, double height, double weight) {
+ this.name = name;
+ this.height = height;
+ this.weight = weight;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public double getHeight() {
+ return height;
+ }
+
+ public void setHeight(double height) {
+ this.height = height;
+ }
+
+ public double getWeight() {
+ return weight;
+ }
+
+ public void setWeight(double weight) {
+ this.weight = weight;
+ }
+
+ public double calculate() {
+ double bmi = weight / (height * height);
+ String formattedBmi = String.format("%.2f", bmi);
+ return Double.parseDouble(formattedBmi);
+ }
+}
diff --git a/core-java-modules/core-java-console/src/main/java/com/baeldung/consoletableoutput/BodyMassIndexApplication.java b/core-java-modules/core-java-console/src/main/java/com/baeldung/consoletableoutput/BodyMassIndexApplication.java
new file mode 100644
index 0000000000..cb340256aa
--- /dev/null
+++ b/core-java-modules/core-java-console/src/main/java/com/baeldung/consoletableoutput/BodyMassIndexApplication.java
@@ -0,0 +1,62 @@
+package com.baeldung.consoletableoutput;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import de.vandermeer.asciitable.AsciiTable;
+import de.vandermeer.skb.interfaces.transformers.textformat.TextAlignment;
+
+public class BodyMassIndexApplication {
+
+ public static void main(String[] args) {
+ stringFormat();
+ asciiTable();
+
+ }
+
+ public static void stringFormat() {
+ List bodyMassIndices = new ArrayList<>();
+ bodyMassIndices.add(new BodyMassIndex("Tom", 1.8, 80));
+ bodyMassIndices.add(new BodyMassIndex("Elton", 1.9, 90));
+ bodyMassIndices.add(new BodyMassIndex("Harry", 1.9, 90));
+ bodyMassIndices.add(new BodyMassIndex("Hannah", 1.9, 90));
+
+ String leftAlignment = "| %-7s | %-7.2f | %-7.2f | %-5.2f |%n";
+
+ System.out.format("+---------+---------+---------+-------+%n");
+ System.out.format("| Name | Height | Weight | BMI |%n");
+ System.out.format("+---------+---------+---------+-------+%n");
+
+ for (BodyMassIndex bodyMassIndex : bodyMassIndices) {
+ System.out.format(leftAlignment, bodyMassIndex.getName(), bodyMassIndex.getHeight(), bodyMassIndex.getWeight(), bodyMassIndex.calculate());
+ System.out.format("+---------+---------+---------+-------+%n");
+ }
+
+ }
+
+ public static void asciiTable() {
+ List bodyMassIndices = new ArrayList<>();
+ bodyMassIndices.add(new BodyMassIndex("Tom", 1.8, 80));
+ bodyMassIndices.add(new BodyMassIndex("Elton", 1.9, 90));
+ bodyMassIndices.add(new BodyMassIndex("Harry", 1.9, 90));
+ bodyMassIndices.add(new BodyMassIndex("Hannah", 1.9, 90));
+
+ AsciiTable asciiTable = new AsciiTable();
+ asciiTable.addRule();
+ asciiTable.addRow("Name", "Height", "Weight", "BMI");
+ asciiTable.addRule();
+
+ for (BodyMassIndex bodyMassIndex : bodyMassIndices) {
+
+ asciiTable.addRow(bodyMassIndex.getName(), bodyMassIndex.getHeight(), bodyMassIndex.getWeight(), bodyMassIndex.calculate());
+ asciiTable.addRule();
+
+ }
+
+ asciiTable.setTextAlignment(TextAlignment.CENTER);
+ String render = asciiTable.render();
+ System.out.println(render);
+
+ }
+
+}
diff --git a/core-java-modules/core-java-io/src/main/java/com/baeldung/size/FileSizeUtils.java b/core-java-modules/core-java-io/src/main/java/com/baeldung/size/FileSizeUtils.java
new file mode 100644
index 0000000000..18c8687043
--- /dev/null
+++ b/core-java-modules/core-java-io/src/main/java/com/baeldung/size/FileSizeUtils.java
@@ -0,0 +1,28 @@
+package com.baeldung.size;
+
+import java.io.File;
+
+public class FileSizeUtils {
+ public static long getFileSizeInBytes(File file) {
+ if (file.exists()) {
+ return file.length();
+ } else {
+ throw new IllegalArgumentException("File not found.");
+ }
+ }
+
+ public static double getFileSizeInKilobytes(File file) {
+ long bytes = getFileSizeInBytes(file);
+ return (double) bytes / 1024;
+ }
+
+ public static double getFileSizeInMegabytes(File file) {
+ double kilobytes = getFileSizeInKilobytes(file);
+ return kilobytes / 1024;
+ }
+
+ public static double getFileSizeInGigabytes(File file) {
+ double megabytes = getFileSizeInMegabytes(file);
+ return megabytes / 1024;
+ }
+}
diff --git a/core-java-modules/core-java-io/src/test/java/com/baeldung/size/JavaFileSizeUnitTest.java b/core-java-modules/core-java-io/src/test/java/com/baeldung/size/JavaFileSizeUnitTest.java
index d015f2602e..d8f4361260 100644
--- a/core-java-modules/core-java-io/src/test/java/com/baeldung/size/JavaFileSizeUnitTest.java
+++ b/core-java-modules/core-java-io/src/test/java/com/baeldung/size/JavaFileSizeUnitTest.java
@@ -1,6 +1,8 @@
package com.baeldung.size;
-import static org.junit.Assert.assertEquals;
+import org.apache.commons.io.FileUtils;
+import org.junit.Before;
+import org.junit.Test;
import java.io.File;
import java.io.FileInputStream;
@@ -10,9 +12,9 @@ import java.net.URL;
import java.nio.channels.FileChannel;
import java.nio.file.Path;
import java.nio.file.Paths;
-import org.apache.commons.io.FileUtils;
-import org.junit.Before;
-import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
public class JavaFileSizeUnitTest {
private static final long EXPECTED_FILE_SIZE_IN_BYTES = 11;
@@ -85,4 +87,23 @@ public class JavaFileSizeUnitTest {
assertEquals(EXPECTED_FILE_SIZE_IN_BYTES, stream.available());
}
}
-}
\ No newline at end of file
+
+ @Test
+ public void whenGetFileSizeInDifferentUnits_thenCorrect(){
+ filePath = String.join(File.separator, new String[] { "src", "test", "resources", "size", "sample_file_1.in" });
+ File file = new File(filePath);
+ if (file.exists()) {
+ long expectedBytes = file.length();
+ double expectedKilobytes = (double) expectedBytes / 1024;
+ double expectedMegabytes = expectedKilobytes / 1024;
+ double expectedGigabytes = expectedMegabytes / 1024;
+
+ assertEquals(expectedBytes, FileSizeUtils.getFileSizeInBytes(file));
+ assertEquals(expectedKilobytes, FileSizeUtils.getFileSizeInKilobytes(file), 0.01);
+ assertEquals(expectedMegabytes, FileSizeUtils.getFileSizeInMegabytes(file), 0.01);
+ assertEquals(expectedGigabytes, FileSizeUtils.getFileSizeInGigabytes(file), 0.01);
+ } else {
+ fail("File not found.");
+ }
+ }
+}
diff --git a/intelliJ-modules/.gitignore b/intelliJ-modules/.gitignore
new file mode 100644
index 0000000000..d16386367f
--- /dev/null
+++ b/intelliJ-modules/.gitignore
@@ -0,0 +1 @@
+build/
\ No newline at end of file
diff --git a/intelliJ-modules/gradle/wrapper/gradle-wrapper.properties b/intelliJ-modules/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 0000000000..62f495dfed
--- /dev/null
+++ b/intelliJ-modules/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,7 @@
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-8.2-bin.zip
+networkTimeout=10000
+validateDistributionUrl=true
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
diff --git a/intelliJ-modules/gradlew b/intelliJ-modules/gradlew
new file mode 100644
index 0000000000..fcb6fca147
--- /dev/null
+++ b/intelliJ-modules/gradlew
@@ -0,0 +1,248 @@
+#!/bin/sh
+
+#
+# Copyright © 2015-2021 the original authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+##############################################################################
+#
+# Gradle start up script for POSIX generated by Gradle.
+#
+# Important for running:
+#
+# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
+# noncompliant, but you have some other compliant shell such as ksh or
+# bash, then to run this script, type that shell name before the whole
+# command line, like:
+#
+# ksh Gradle
+#
+# Busybox and similar reduced shells will NOT work, because this script
+# requires all of these POSIX shell features:
+# * functions;
+# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
+# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
+# * compound commands having a testable exit status, especially «case»;
+# * various built-in commands including «command», «set», and «ulimit».
+#
+# Important for patching:
+#
+# (2) This script targets any POSIX shell, so it avoids extensions provided
+# by Bash, Ksh, etc; in particular arrays are avoided.
+#
+# The "traditional" practice of packing multiple parameters into a
+# space-separated string is a well documented source of bugs and security
+# problems, so this is (mostly) avoided, by progressively accumulating
+# options in "$@", and eventually passing that to Java.
+#
+# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
+# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
+# see the in-line comments for details.
+#
+# There are tweaks for specific operating systems such as AIX, CygWin,
+# Darwin, MinGW, and NonStop.
+#
+# (3) This script is generated from the Groovy template
+# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
+# within the Gradle project.
+#
+# You can find Gradle at https://github.com/gradle/gradle/.
+#
+##############################################################################
+
+# Attempt to set APP_HOME
+
+# Resolve links: $0 may be a link
+app_path=$0
+
+# Need this for daisy-chained symlinks.
+while
+ APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
+ [ -h "$app_path" ]
+do
+ ls=$( ls -ld "$app_path" )
+ link=${ls#*' -> '}
+ case $link in #(
+ /*) app_path=$link ;; #(
+ *) app_path=$APP_HOME$link ;;
+ esac
+done
+
+# This is normally unused
+# shellcheck disable=SC2034
+APP_BASE_NAME=${0##*/}
+APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
+
+# Use the maximum available, or set MAX_FD != -1 to use that value.
+MAX_FD=maximum
+
+warn () {
+ echo "$*"
+} >&2
+
+die () {
+ echo
+ echo "$*"
+ echo
+ exit 1
+} >&2
+
+# OS specific support (must be 'true' or 'false').
+cygwin=false
+msys=false
+darwin=false
+nonstop=false
+case "$( uname )" in #(
+ CYGWIN* ) cygwin=true ;; #(
+ Darwin* ) darwin=true ;; #(
+ MSYS* | MINGW* ) msys=true ;; #(
+ NONSTOP* ) nonstop=true ;;
+esac
+
+CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
+
+
+# Determine the Java command to use to start the JVM.
+if [ -n "$JAVA_HOME" ] ; then
+ if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+ # IBM's JDK on AIX uses strange locations for the executables
+ JAVACMD=$JAVA_HOME/jre/sh/java
+ else
+ JAVACMD=$JAVA_HOME/bin/java
+ fi
+ if [ ! -x "$JAVACMD" ] ; then
+ die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+ fi
+else
+ JAVACMD=java
+ if ! command -v java >/dev/null 2>&1
+ then
+ die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+ fi
+fi
+
+# Increase the maximum file descriptors if we can.
+if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
+ case $MAX_FD in #(
+ max*)
+ # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
+ # shellcheck disable=SC3045
+ MAX_FD=$( ulimit -H -n ) ||
+ warn "Could not query maximum file descriptor limit"
+ esac
+ case $MAX_FD in #(
+ '' | soft) :;; #(
+ *)
+ # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
+ # shellcheck disable=SC3045
+ ulimit -n "$MAX_FD" ||
+ warn "Could not set maximum file descriptor limit to $MAX_FD"
+ esac
+fi
+
+# Collect all arguments for the java command, stacking in reverse order:
+# * args from the command line
+# * the main class name
+# * -classpath
+# * -D...appname settings
+# * --module-path (only if needed)
+# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
+
+# For Cygwin or MSYS, switch paths to Windows format before running java
+if "$cygwin" || "$msys" ; then
+ APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
+ CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
+
+ JAVACMD=$( cygpath --unix "$JAVACMD" )
+
+ # Now convert the arguments - kludge to limit ourselves to /bin/sh
+ for arg do
+ if
+ case $arg in #(
+ -*) false ;; # don't mess with options #(
+ /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
+ [ -e "$t" ] ;; #(
+ *) false ;;
+ esac
+ then
+ arg=$( cygpath --path --ignore --mixed "$arg" )
+ fi
+ # Roll the args list around exactly as many times as the number of
+ # args, so each arg winds up back in the position where it started, but
+ # possibly modified.
+ #
+ # NB: a `for` loop captures its iteration list before it begins, so
+ # changing the positional parameters here affects neither the number of
+ # iterations, nor the values presented in `arg`.
+ shift # remove old arg
+ set -- "$@" "$arg" # push replacement arg
+ done
+fi
+
+
+# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
+
+# Collect all arguments for the java command;
+# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
+# shell script including quotes and variable substitutions, so put them in
+# double quotes to make sure that they get re-expanded; and
+# * put everything else in single quotes, so that it's not re-expanded.
+
+set -- \
+ "-Dorg.gradle.appname=$APP_BASE_NAME" \
+ -classpath "$CLASSPATH" \
+ org.gradle.wrapper.GradleWrapperMain \
+ "$@"
+
+# Stop when "xargs" is not available.
+if ! command -v xargs >/dev/null 2>&1
+then
+ die "xargs is not available"
+fi
+
+# Use "xargs" to parse quoted args.
+#
+# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
+#
+# In Bash we could simply go:
+#
+# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
+# set -- "${ARGS[@]}" "$@"
+#
+# but POSIX shell has neither arrays nor command substitution, so instead we
+# post-process each arg (as a line of input to sed) to backslash-escape any
+# character that might be a shell metacharacter, then use eval to reverse
+# that process (while maintaining the separation between arguments), and wrap
+# the whole thing up as a single "set" statement.
+#
+# This will of course break if any of these variables contains a newline or
+# an unmatched quote.
+#
+
+eval "set -- $(
+ printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
+ xargs -n1 |
+ sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
+ tr '\n' ' '
+ )" '"$@"'
+
+exec "$JAVACMD" "$@"
diff --git a/intelliJ-modules/gradlew.bat b/intelliJ-modules/gradlew.bat
new file mode 100644
index 0000000000..93e3f59f13
--- /dev/null
+++ b/intelliJ-modules/gradlew.bat
@@ -0,0 +1,92 @@
+@rem
+@rem Copyright 2015 the original author or authors.
+@rem
+@rem Licensed under the Apache License, Version 2.0 (the "License");
+@rem you may not use this file except in compliance with the License.
+@rem You may obtain a copy of the License at
+@rem
+@rem https://www.apache.org/licenses/LICENSE-2.0
+@rem
+@rem Unless required by applicable law or agreed to in writing, software
+@rem distributed under the License is distributed on an "AS IS" BASIS,
+@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+@rem See the License for the specific language governing permissions and
+@rem limitations under the License.
+@rem
+
+@if "%DEBUG%"=="" @echo off
+@rem ##########################################################################
+@rem
+@rem Gradle startup script for Windows
+@rem
+@rem ##########################################################################
+
+@rem Set local scope for the variables with windows NT shell
+if "%OS%"=="Windows_NT" setlocal
+
+set DIRNAME=%~dp0
+if "%DIRNAME%"=="" set DIRNAME=.
+@rem This is normally unused
+set APP_BASE_NAME=%~n0
+set APP_HOME=%DIRNAME%
+
+@rem Resolve any "." and ".." in APP_HOME to make it shorter.
+for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
+
+@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
+
+@rem Find java.exe
+if defined JAVA_HOME goto findJavaFromJavaHome
+
+set JAVA_EXE=java.exe
+%JAVA_EXE% -version >NUL 2>&1
+if %ERRORLEVEL% equ 0 goto execute
+
+echo.
+echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:findJavaFromJavaHome
+set JAVA_HOME=%JAVA_HOME:"=%
+set JAVA_EXE=%JAVA_HOME%/bin/java.exe
+
+if exist "%JAVA_EXE%" goto execute
+
+echo.
+echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:execute
+@rem Setup the command line
+
+set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
+
+
+@rem Execute Gradle
+"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
+
+:end
+@rem End local scope for the variables with windows NT shell
+if %ERRORLEVEL% equ 0 goto mainEnd
+
+:fail
+rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
+rem the _cmd.exe /c_ return code!
+set EXIT_CODE=%ERRORLEVEL%
+if %EXIT_CODE% equ 0 set EXIT_CODE=1
+if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
+exit /b %EXIT_CODE%
+
+:mainEnd
+if "%OS%"=="Windows_NT" endlocal
+
+:omega
diff --git a/intelliJ-modules/settings.gradle b/intelliJ-modules/settings.gradle
new file mode 100644
index 0000000000..ff10e09b3c
--- /dev/null
+++ b/intelliJ-modules/settings.gradle
@@ -0,0 +1,3 @@
+rootProject.name = 'intellij-modules'
+include 'stackoverflow-plugin'
+include 'stackoverflow-plugin-gradle'
diff --git a/intelliJ-modules/stackoverflow-plugin-gradle/build.gradle b/intelliJ-modules/stackoverflow-plugin-gradle/build.gradle
index cd0cc258bf..3967a1f075 100644
--- a/intelliJ-modules/stackoverflow-plugin-gradle/build.gradle
+++ b/intelliJ-modules/stackoverflow-plugin-gradle/build.gradle
@@ -1,6 +1,6 @@
plugins {
id 'java'
- id 'org.jetbrains.intellij' version '0.4.21'
+ id 'org.jetbrains.intellij' version '1.16.0'
}
group 'com.baeldung'
@@ -11,15 +11,17 @@ repositories {
}
dependencies {
- testCompile group: 'junit', name: 'junit', version: '4.12'
+ testImplementation('junit:junit:4.12')
}
// See https://github.com/JetBrains/gradle-intellij-plugin/
intellij {
- version '2020.1.1'
+ version = "2022.2.5"
+ type = "IC"
}
+
patchPluginXml {
- changeNotes """
+ changeNotes = """
Add change notes here.
most HTML tags may be used"""
}
\ No newline at end of file
diff --git a/intelliJ-modules/stackoverflow-plugin/build.gradle b/intelliJ-modules/stackoverflow-plugin/build.gradle
new file mode 100644
index 0000000000..81962e0e0a
--- /dev/null
+++ b/intelliJ-modules/stackoverflow-plugin/build.gradle
@@ -0,0 +1,5 @@
+// this project is not supposed to be built. We check only if it compiles fine.
+
+plugins {
+ id 'java'
+}
\ No newline at end of file
diff --git a/intelliJ-modules/stackoverflow-plugin/settings.gradle b/intelliJ-modules/stackoverflow-plugin/settings.gradle
new file mode 100644
index 0000000000..4ebea35284
--- /dev/null
+++ b/intelliJ-modules/stackoverflow-plugin/settings.gradle
@@ -0,0 +1 @@
+rootProject.name = 'stackoverflow-plugin'
diff --git a/spring-boot-modules/spring-boot-libraries/pom.xml b/spring-boot-modules/spring-boot-libraries/pom.xml
index ed9a414a60..b0f0c780aa 100644
--- a/spring-boot-modules/spring-boot-libraries/pom.xml
+++ b/spring-boot-modules/spring-boot-libraries/pom.xml
@@ -18,6 +18,10 @@
org.springframework.boot
spring-boot-starter-web
+
+ org.springframework.boot
+ spring-boot-starter-validation
+
org.springframework.boot
spring-boot-starter-security
@@ -95,7 +99,7 @@
- com.github.vladimir-bukhtoyarov
+ com.bucket4j
bucket4j-core
${bucket4j.version}
@@ -226,8 +230,8 @@
2.1
2.6.0
3.3.0
- 7.6.0
- 0.7.0
+ 8.1.0
+ 0.8.1
3.1.8
diff --git a/spring-boot-modules/spring-boot-libraries/src/main/resources/ratelimiting/application-bucket4j-starter.yml b/spring-boot-modules/spring-boot-libraries/src/main/resources/ratelimiting/application-bucket4j-starter.yml
index ecc9f22e0a..efff65555b 100644
--- a/spring-boot-modules/spring-boot-libraries/src/main/resources/ratelimiting/application-bucket4j-starter.yml
+++ b/spring-boot-modules/spring-boot-libraries/src/main/resources/ratelimiting/application-bucket4j-starter.yml
@@ -21,19 +21,19 @@ bucket4j:
url: /api/v1/area.*
http-response-body: "{ \"status\": 429, \"error\": \"Too Many Requests\", \"message\": \"You have exhausted your API Request Quota\" }"
rate-limits:
- - expression: "getHeader('X-api-key')"
+ - cache-key: "getHeader('X-api-key')"
execute-condition: "getHeader('X-api-key').startsWith('PX001-')"
bandwidths:
- capacity: 100
time: 1
unit: hours
- - expression: "getHeader('X-api-key')"
+ - cache-key: "getHeader('X-api-key')"
execute-condition: "getHeader('X-api-key').startsWith('BX001-')"
bandwidths:
- capacity: 40
time: 1
unit: hours
- - expression: "getHeader('X-api-key')"
+ - cache-key: "getHeader('X-api-key')"
bandwidths:
- capacity: 20
time: 1