diff --git a/algorithms-miscellaneous-2/src/test/java/com/baeldung/algorithms/astar/RouteFinder.java b/algorithms-miscellaneous-2/src/test/java/com/baeldung/algorithms/astar/RouteFinder.java index 35458093c5..f8b66fec88 100644 --- a/algorithms-miscellaneous-2/src/test/java/com/baeldung/algorithms/astar/RouteFinder.java +++ b/algorithms-miscellaneous-2/src/test/java/com/baeldung/algorithms/astar/RouteFinder.java @@ -8,6 +8,9 @@ import java.util.PriorityQueue; import java.util.Queue; import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; + +@Slf4j public class RouteFinder { private final Graph graph; private final Scorer nextNodeScorer; @@ -28,11 +31,11 @@ public class RouteFinder { openSet.add(start); while (!openSet.isEmpty()) { - System.out.println("Open Set contains: " + openSet.stream().map(RouteNode::getCurrent).collect(Collectors.toSet())); + log.debug("Open Set contains: " + openSet.stream().map(RouteNode::getCurrent).collect(Collectors.toSet())); RouteNode next = openSet.poll(); - System.out.println("Looking at node: " + next); + log.debug("Looking at node: " + next); if (next.getCurrent().equals(to)) { - System.out.println("Found our destination!"); + log.debug("Found our destination!"); List route = new ArrayList<>(); RouteNode current = next; @@ -41,7 +44,7 @@ public class RouteFinder { current = allNodes.get(current.getPrevious()); } while (current != null); - System.out.println("Route: " + route); + log.debug("Route: " + route); return route; } @@ -55,7 +58,7 @@ public class RouteFinder { nextNode.setRouteScore(newScore); nextNode.setEstimatedScore(newScore + targetScorer.computeCost(connection, to)); openSet.add(nextNode); - System.out.println("Found a better route to node: " + nextNode); + log.debug("Found a better route to node: " + nextNode); } }); } diff --git a/algorithms-miscellaneous-2/src/test/java/com/baeldung/algorithms/astar/underground/RouteFinderIntegrationTest.java b/algorithms-miscellaneous-2/src/test/java/com/baeldung/algorithms/astar/underground/RouteFinderIntegrationTest.java index 1e4ad56d94..aba7f149da 100644 --- a/algorithms-miscellaneous-2/src/test/java/com/baeldung/algorithms/astar/underground/RouteFinderIntegrationTest.java +++ b/algorithms-miscellaneous-2/src/test/java/com/baeldung/algorithms/astar/underground/RouteFinderIntegrationTest.java @@ -1,5 +1,7 @@ package com.baeldung.algorithms.astar.underground; +import static org.assertj.core.api.Assertions.assertThat; + import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -10,9 +12,13 @@ import java.util.stream.Stream; import com.baeldung.algorithms.astar.Graph; import com.baeldung.algorithms.astar.RouteFinder; + +import lombok.extern.slf4j.Slf4j; + import org.junit.Before; import org.junit.Test; +@Slf4j public class RouteFinderIntegrationTest { private Graph underground; @@ -637,7 +643,8 @@ public class RouteFinderIntegrationTest { @Test public void findRoute() { List route = routeFinder.findRoute(underground.getNode("74"), underground.getNode("7")); + assertThat(route).size().isPositive(); - System.out.println(route.stream().map(Station::getName).collect(Collectors.toList())); + route.stream().map(Station::getName).collect(Collectors.toList()).forEach(station -> log.debug(station)); } } diff --git a/apache-kafka/README.md b/apache-kafka/README.md new file mode 100644 index 0000000000..5e724f95b6 --- /dev/null +++ b/apache-kafka/README.md @@ -0,0 +1,18 @@ +## Apache Kafka + +This module contains articles about Apache Kafka. + +### Relevant articles +- [Kafka Streams vs Kafka Consumer](https://www.baeldung.com/java-kafka-streams-vs-kafka-consumer) +- [Kafka Topic Creation Using Java](https://www.baeldung.com/kafka-topic-creation) +- [Using Kafka MockConsumer](https://www.baeldung.com/kafka-mockconsumer) +- [Using Kafka MockProducer](https://www.baeldung.com/kafka-mockproducer) +- [Introduction to KafkaStreams in Java](https://www.baeldung.com/java-kafka-streams) +- [Introduction to Kafka Connectors](https://www.baeldung.com/kafka-connectors-guide) +- [Kafka Connect Example with MQTT and MongoDB](https://www.baeldung.com/kafka-connect-mqtt-mongodb) +- [Building a Data Pipeline with Flink and Kafka](https://www.baeldung.com/kafka-flink-data-pipeline) +- [Exactly Once Processing in Kafka with Java](https://www.baeldung.com/kafka-exactly-once) + + +##### Building the project +You can build the project from the command line using: *mvn clean install*, or in an IDE. \ No newline at end of file diff --git a/libraries-data-3/log4j.properties b/apache-kafka/log4j.properties similarity index 100% rename from libraries-data-3/log4j.properties rename to apache-kafka/log4j.properties diff --git a/apache-kafka/pom.xml b/apache-kafka/pom.xml new file mode 100644 index 0000000000..cda91ed92f --- /dev/null +++ b/apache-kafka/pom.xml @@ -0,0 +1,180 @@ + + + 4.0.0 + apache-kafka + apache-kafka + + + com.baeldung + parent-modules + 1.0.0-SNAPSHOT + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.slf4j + slf4j-api + ${org.slf4j.version} + + + org.slf4j + slf4j-log4j12 + ${org.slf4j.version} + + + org.apache.flink + flink-connector-kafka-0.11_2.11 + ${flink.version} + + + org.apache.flink + flink-streaming-java_2.11 + ${flink.version} + + + org.apache.flink + flink-core + ${flink.version} + + + commons-logging + commons-logging + + + + + org.apache.flink + flink-java + ${flink.version} + + + commons-logging + commons-logging + + + + + org.apache.flink + flink-test-utils_2.11 + ${flink.version} + test + + + com.google.guava + guava + ${guava.version} + + + org.awaitility + awaitility + ${awaitility.version} + test + + + org.awaitility + awaitility-proxy + ${awaitility.version} + test + + + com.fasterxml.jackson.datatype + jackson-datatype-jsr310 + ${jackson.version} + + + com.fasterxml.jackson.core + jackson-databind + ${jackson.version} + + + org.assertj + assertj-core + ${assertj.version} + test + + + org.testcontainers + kafka + ${testcontainers-kafka.version} + test + + + org.testcontainers + junit-jupiter + ${testcontainers-jupiter.version} + test + + + org.apache.spark + spark-core_2.11 + ${org.apache.spark.spark-core.version} + provided + + + org.apache.spark + spark-sql_2.11 + ${org.apache.spark.spark-core.version} + provided + + + org.apache.spark + spark-graphx_2.11 + ${org.apache.spark.spark-core.version} + provided + + + org.apache.spark + spark-streaming_2.11 + ${org.apache.spark.spark-core.version} + provided + + + org.apache.spark + spark-mllib_2.11 + ${org.apache.spark.spark-core.version} + provided + + + org.apache.spark + spark-streaming-kafka-0-10_2.11 + ${org.apache.spark.spark-core.version} + + + com.datastax.spark + spark-cassandra-connector_2.11 + ${com.datastax.spark.spark-cassandra-connector.version} + + + com.datastax.spark + spark-cassandra-connector-java_2.11 + ${com.datastax.spark.spark-cassandra-connector-java.version} + + + + + 3.6.2 + 2.8.0 + 1.15.3 + 1.15.3 + 1.5.0 + 3.0.0 + 29.0-jre + 2.4.8 + 0.8.1-spark3.0-s_2.12 + 2.5.2 + 1.6.0-M1 + + + \ No newline at end of file diff --git a/apache-kafka/src/main/java/com/baeldung/flink/FlinkDataPipeline.java b/apache-kafka/src/main/java/com/baeldung/flink/FlinkDataPipeline.java new file mode 100644 index 0000000000..4502b628b2 --- /dev/null +++ b/apache-kafka/src/main/java/com/baeldung/flink/FlinkDataPipeline.java @@ -0,0 +1,70 @@ +package com.baeldung.flink; + +import com.baeldung.flink.model.Backup; +import com.baeldung.flink.model.InputMessage; +import com.baeldung.flink.operator.BackupAggregator; +import com.baeldung.flink.operator.InputMessageTimestampAssigner; +import com.baeldung.flink.operator.WordsCapitalizer; +import org.apache.flink.streaming.api.TimeCharacteristic; +import org.apache.flink.streaming.api.datastream.DataStream; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.streaming.api.windowing.time.Time; +import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011; +import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer011; + +import static com.baeldung.flink.connector.Consumers.*; +import static com.baeldung.flink.connector.Producers.*; + +public class FlinkDataPipeline { + + public static void capitalize() throws Exception { + String inputTopic = "flink_input"; + String outputTopic = "flink_output"; + String consumerGroup = "baeldung"; + String address = "localhost:9092"; + + StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment(); + + FlinkKafkaConsumer011 flinkKafkaConsumer = createStringConsumerForTopic(inputTopic, address, consumerGroup); + flinkKafkaConsumer.setStartFromEarliest(); + + DataStream stringInputStream = environment.addSource(flinkKafkaConsumer); + + FlinkKafkaProducer011 flinkKafkaProducer = createStringProducer(outputTopic, address); + + stringInputStream.map(new WordsCapitalizer()) + .addSink(flinkKafkaProducer); + + environment.execute(); + } + + public static void createBackup() throws Exception { + String inputTopic = "flink_input"; + String outputTopic = "flink_output"; + String consumerGroup = "baeldung"; + String kafkaAddress = "localhost:9092"; + + StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment(); + + environment.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); + + FlinkKafkaConsumer011 flinkKafkaConsumer = createInputMessageConsumer(inputTopic, kafkaAddress, consumerGroup); + flinkKafkaConsumer.setStartFromEarliest(); + + flinkKafkaConsumer.assignTimestampsAndWatermarks(new InputMessageTimestampAssigner()); + FlinkKafkaProducer011 flinkKafkaProducer = createBackupProducer(outputTopic, kafkaAddress); + + DataStream inputMessagesStream = environment.addSource(flinkKafkaConsumer); + + inputMessagesStream.timeWindowAll(Time.hours(24)) + .aggregate(new BackupAggregator()) + .addSink(flinkKafkaProducer); + + environment.execute(); + } + + public static void main(String[] args) throws Exception { + createBackup(); + } + +} diff --git a/libraries-data-2/src/main/java/com/baeldung/flink/connector/Consumers.java b/apache-kafka/src/main/java/com/baeldung/flink/connector/Consumers.java similarity index 53% rename from libraries-data-2/src/main/java/com/baeldung/flink/connector/Consumers.java rename to apache-kafka/src/main/java/com/baeldung/flink/connector/Consumers.java index 514085f9c4..c72cb8a2d6 100644 --- a/libraries-data-2/src/main/java/com/baeldung/flink/connector/Consumers.java +++ b/apache-kafka/src/main/java/com/baeldung/flink/connector/Consumers.java @@ -9,23 +9,20 @@ import java.util.Properties; public class Consumers { -public static FlinkKafkaConsumer011 createStringConsumerForTopic( - String topic, String kafkaAddress, String kafkaGroup ) { - Properties props = new Properties(); - props.setProperty("bootstrap.servers", kafkaAddress); - props.setProperty("group.id",kafkaGroup); - FlinkKafkaConsumer011 consumer = - new FlinkKafkaConsumer011<>(topic, new SimpleStringSchema(),props); + public static FlinkKafkaConsumer011 createStringConsumerForTopic(String topic, String kafkaAddress, String kafkaGroup) { + Properties props = new Properties(); + props.setProperty("bootstrap.servers", kafkaAddress); + props.setProperty("group.id", kafkaGroup); + FlinkKafkaConsumer011 consumer = new FlinkKafkaConsumer011<>(topic, new SimpleStringSchema(), props); - return consumer; -} + return consumer; + } - public static FlinkKafkaConsumer011 createInputMessageConsumer(String topic, String kafkaAddress, String kafkaGroup ) { + public static FlinkKafkaConsumer011 createInputMessageConsumer(String topic, String kafkaAddress, String kafkaGroup) { Properties properties = new Properties(); properties.setProperty("bootstrap.servers", kafkaAddress); - properties.setProperty("group.id",kafkaGroup); - FlinkKafkaConsumer011 consumer = new FlinkKafkaConsumer011( - topic, new InputMessageDeserializationSchema(),properties); + properties.setProperty("group.id", kafkaGroup); + FlinkKafkaConsumer011 consumer = new FlinkKafkaConsumer011(topic, new InputMessageDeserializationSchema(), properties); return consumer; } diff --git a/libraries-data-2/src/main/java/com/baeldung/flink/connector/Producers.java b/apache-kafka/src/main/java/com/baeldung/flink/connector/Producers.java similarity index 100% rename from libraries-data-2/src/main/java/com/baeldung/flink/connector/Producers.java rename to apache-kafka/src/main/java/com/baeldung/flink/connector/Producers.java diff --git a/libraries-data-2/src/main/java/com/baeldung/flink/model/Backup.java b/apache-kafka/src/main/java/com/baeldung/flink/model/Backup.java similarity index 100% rename from libraries-data-2/src/main/java/com/baeldung/flink/model/Backup.java rename to apache-kafka/src/main/java/com/baeldung/flink/model/Backup.java diff --git a/libraries-data-2/src/main/java/com/baeldung/flink/model/InputMessage.java b/apache-kafka/src/main/java/com/baeldung/flink/model/InputMessage.java similarity index 82% rename from libraries-data-2/src/main/java/com/baeldung/flink/model/InputMessage.java rename to apache-kafka/src/main/java/com/baeldung/flink/model/InputMessage.java index b3f75256ae..d33eb5a9ac 100644 --- a/libraries-data-2/src/main/java/com/baeldung/flink/model/InputMessage.java +++ b/apache-kafka/src/main/java/com/baeldung/flink/model/InputMessage.java @@ -18,6 +18,7 @@ public class InputMessage { public String getSender() { return sender; } + public void setSender(String sender) { this.sender = sender; } @@ -55,12 +56,14 @@ public class InputMessage { @Override public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; InputMessage message1 = (InputMessage) o; - return Objects.equal(sender, message1.sender) && - Objects.equal(recipient, message1.recipient) && - Objects.equal(sentAt, message1.sentAt) && + return Objects.equal(sender, message1.sender) && + Objects.equal(recipient, message1.recipient) && + Objects.equal(sentAt, message1.sentAt) && Objects.equal(message, message1.message); } diff --git a/apache-kafka/src/main/java/com/baeldung/flink/operator/BackupAggregator.java b/apache-kafka/src/main/java/com/baeldung/flink/operator/BackupAggregator.java new file mode 100644 index 0000000000..bac1c8c705 --- /dev/null +++ b/apache-kafka/src/main/java/com/baeldung/flink/operator/BackupAggregator.java @@ -0,0 +1,34 @@ +package com.baeldung.flink.operator; + +import com.baeldung.flink.model.Backup; +import com.baeldung.flink.model.InputMessage; +import org.apache.flink.api.common.functions.AggregateFunction; + +import java.time.LocalDateTime; +import java.util.ArrayList; +import java.util.List; + +public class BackupAggregator implements AggregateFunction, Backup> { + @Override + public List createAccumulator() { + return new ArrayList<>(); + } + + @Override + public List add(InputMessage inputMessage, List inputMessages) { + inputMessages.add(inputMessage); + return inputMessages; + } + + @Override + public Backup getResult(List inputMessages) { + Backup backup = new Backup(inputMessages, LocalDateTime.now()); + return backup; + } + + @Override + public List merge(List inputMessages, List acc1) { + inputMessages.addAll(acc1); + return inputMessages; + } +} diff --git a/libraries-data-2/src/main/java/com/baeldung/flink/operator/InputMessageTimestampAssigner.java b/apache-kafka/src/main/java/com/baeldung/flink/operator/InputMessageTimestampAssigner.java similarity index 88% rename from libraries-data-2/src/main/java/com/baeldung/flink/operator/InputMessageTimestampAssigner.java rename to apache-kafka/src/main/java/com/baeldung/flink/operator/InputMessageTimestampAssigner.java index 05828d9588..995fe41717 100644 --- a/libraries-data-2/src/main/java/com/baeldung/flink/operator/InputMessageTimestampAssigner.java +++ b/apache-kafka/src/main/java/com/baeldung/flink/operator/InputMessageTimestampAssigner.java @@ -12,7 +12,9 @@ public class InputMessageTimestampAssigner implements AssignerWithPunctuatedWate @Override public long extractTimestamp(InputMessage element, long previousElementTimestamp) { ZoneId zoneId = ZoneId.systemDefault(); - return element.getSentAt().atZone(zoneId).toEpochSecond() * 1000; + return element.getSentAt() + .atZone(zoneId) + .toEpochSecond() * 1000; } @Nullable diff --git a/libraries-data-2/src/main/java/com/baeldung/flink/operator/WordsCapitalizer.java b/apache-kafka/src/main/java/com/baeldung/flink/operator/WordsCapitalizer.java similarity index 100% rename from libraries-data-2/src/main/java/com/baeldung/flink/operator/WordsCapitalizer.java rename to apache-kafka/src/main/java/com/baeldung/flink/operator/WordsCapitalizer.java diff --git a/libraries-data-2/src/main/java/com/baeldung/flink/schema/BackupSerializationSchema.java b/apache-kafka/src/main/java/com/baeldung/flink/schema/BackupSerializationSchema.java similarity index 90% rename from libraries-data-2/src/main/java/com/baeldung/flink/schema/BackupSerializationSchema.java rename to apache-kafka/src/main/java/com/baeldung/flink/schema/BackupSerializationSchema.java index 967b266bb6..d4b7b0955a 100644 --- a/libraries-data-2/src/main/java/com/baeldung/flink/schema/BackupSerializationSchema.java +++ b/apache-kafka/src/main/java/com/baeldung/flink/schema/BackupSerializationSchema.java @@ -9,8 +9,7 @@ import org.apache.flink.api.common.serialization.SerializationSchema; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class BackupSerializationSchema - implements SerializationSchema { +public class BackupSerializationSchema implements SerializationSchema { static ObjectMapper objectMapper = new ObjectMapper().registerModule(new JavaTimeModule()); @@ -18,7 +17,7 @@ public class BackupSerializationSchema @Override public byte[] serialize(Backup backupMessage) { - if(objectMapper == null) { + if (objectMapper == null) { objectMapper.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY); objectMapper = new ObjectMapper().registerModule(new JavaTimeModule()); } diff --git a/libraries-data-2/src/main/java/com/baeldung/flink/schema/InputMessageDeserializationSchema.java b/apache-kafka/src/main/java/com/baeldung/flink/schema/InputMessageDeserializationSchema.java similarity index 89% rename from libraries-data-2/src/main/java/com/baeldung/flink/schema/InputMessageDeserializationSchema.java rename to apache-kafka/src/main/java/com/baeldung/flink/schema/InputMessageDeserializationSchema.java index 9aaf8b9877..e521af7c2d 100644 --- a/libraries-data-2/src/main/java/com/baeldung/flink/schema/InputMessageDeserializationSchema.java +++ b/apache-kafka/src/main/java/com/baeldung/flink/schema/InputMessageDeserializationSchema.java @@ -8,12 +8,10 @@ import org.apache.flink.api.common.typeinfo.TypeInformation; import java.io.IOException; -public class InputMessageDeserializationSchema implements - DeserializationSchema { +public class InputMessageDeserializationSchema implements DeserializationSchema { static ObjectMapper objectMapper = new ObjectMapper().registerModule(new JavaTimeModule()); - @Override public InputMessage deserialize(byte[] bytes) throws IOException { diff --git a/libraries-data-3/src/main/java/com/baeldung/kafka/admin/KafkaTopicApplication.java b/apache-kafka/src/main/java/com/baeldung/kafka/admin/KafkaTopicApplication.java similarity index 77% rename from libraries-data-3/src/main/java/com/baeldung/kafka/admin/KafkaTopicApplication.java rename to apache-kafka/src/main/java/com/baeldung/kafka/admin/KafkaTopicApplication.java index 0d74e27d4e..25d621166d 100644 --- a/libraries-data-3/src/main/java/com/baeldung/kafka/admin/KafkaTopicApplication.java +++ b/apache-kafka/src/main/java/com/baeldung/kafka/admin/KafkaTopicApplication.java @@ -27,11 +27,11 @@ public class KafkaTopicApplication { short replicationFactor = 1; NewTopic newTopic = new NewTopic(topicName, partitions, replicationFactor); - CreateTopicsResult result = admin.createTopics( - Collections.singleton(newTopic)); + CreateTopicsResult result = admin.createTopics(Collections.singleton(newTopic)); // get the async result for the new topic creation - KafkaFuture future = result.values().get(topicName); + KafkaFuture future = result.values() + .get(topicName); // call get() to block until topic creation has completed or failed future.get(); @@ -47,15 +47,13 @@ public class KafkaTopicApplication { short replicationFactor = 1; NewTopic newTopic = new NewTopic(topicName, partitions, replicationFactor); - CreateTopicsOptions topicOptions = new CreateTopicsOptions() - .validateOnly(true) - .retryOnQuotaViolation(true); + CreateTopicsOptions topicOptions = new CreateTopicsOptions().validateOnly(true) + .retryOnQuotaViolation(true); - CreateTopicsResult result = admin.createTopics( - Collections.singleton(newTopic), topicOptions - ); + CreateTopicsResult result = admin.createTopics(Collections.singleton(newTopic), topicOptions); - KafkaFuture future = result.values().get(topicName); + KafkaFuture future = result.values() + .get(topicName); future.get(); } } @@ -72,14 +70,12 @@ public class KafkaTopicApplication { Map newTopicConfig = new HashMap<>(); newTopicConfig.put(TopicConfig.CLEANUP_POLICY_CONFIG, TopicConfig.CLEANUP_POLICY_COMPACT); newTopicConfig.put(TopicConfig.COMPRESSION_TYPE_CONFIG, "lz4"); - NewTopic newTopic = new NewTopic(topicName, partitions, replicationFactor) - .configs(newTopicConfig); + NewTopic newTopic = new NewTopic(topicName, partitions, replicationFactor).configs(newTopicConfig); - CreateTopicsResult result = admin.createTopics( - Collections.singleton(newTopic) - ); + CreateTopicsResult result = admin.createTopics(Collections.singleton(newTopic)); - KafkaFuture future = result.values().get(topicName); + KafkaFuture future = result.values() + .get(topicName); future.get(); } } diff --git a/libraries-data-2/src/main/java/com/baeldung/kafka/consumer/CountryPopulation.java b/apache-kafka/src/main/java/com/baeldung/kafka/consumer/CountryPopulation.java similarity index 100% rename from libraries-data-2/src/main/java/com/baeldung/kafka/consumer/CountryPopulation.java rename to apache-kafka/src/main/java/com/baeldung/kafka/consumer/CountryPopulation.java diff --git a/libraries-data-2/src/main/java/com/baeldung/kafka/consumer/CountryPopulationConsumer.java b/apache-kafka/src/main/java/com/baeldung/kafka/consumer/CountryPopulationConsumer.java similarity index 89% rename from libraries-data-2/src/main/java/com/baeldung/kafka/consumer/CountryPopulationConsumer.java rename to apache-kafka/src/main/java/com/baeldung/kafka/consumer/CountryPopulationConsumer.java index ba4dfe6f3b..a67d3a581c 100644 --- a/libraries-data-2/src/main/java/com/baeldung/kafka/consumer/CountryPopulationConsumer.java +++ b/apache-kafka/src/main/java/com/baeldung/kafka/consumer/CountryPopulationConsumer.java @@ -19,9 +19,7 @@ public class CountryPopulationConsumer { private java.util.function.Consumer exceptionConsumer; private java.util.function.Consumer countryPopulationConsumer; - public CountryPopulationConsumer( - Consumer consumer, java.util.function.Consumer exceptionConsumer, - java.util.function.Consumer countryPopulationConsumer) { + public CountryPopulationConsumer(Consumer consumer, java.util.function.Consumer exceptionConsumer, java.util.function.Consumer countryPopulationConsumer) { this.consumer = consumer; this.exceptionConsumer = exceptionConsumer; this.countryPopulationConsumer = countryPopulationConsumer; diff --git a/libraries-6/src/main/java/com/baeldung/kafka/TransactionalMessageProducer.java b/apache-kafka/src/main/java/com/baeldung/kafka/exactlyonce/TransactionalMessageProducer.java similarity index 87% rename from libraries-6/src/main/java/com/baeldung/kafka/TransactionalMessageProducer.java rename to apache-kafka/src/main/java/com/baeldung/kafka/exactlyonce/TransactionalMessageProducer.java index 15488bbaf4..8f2fe6e309 100644 --- a/libraries-6/src/main/java/com/baeldung/kafka/TransactionalMessageProducer.java +++ b/apache-kafka/src/main/java/com/baeldung/kafka/exactlyonce/TransactionalMessageProducer.java @@ -1,4 +1,4 @@ -package com.baeldung.kafka; +package com.baeldung.kafka.exactlyonce; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerRecord; @@ -24,16 +24,16 @@ public class TransactionalMessageProducer { producer.initTransactions(); - try{ + try { producer.beginTransaction(); - Stream.of(DATA_MESSAGE_1, DATA_MESSAGE_2).forEach(s -> producer.send( - new ProducerRecord("input", null, s))); + Stream.of(DATA_MESSAGE_1, DATA_MESSAGE_2) + .forEach(s -> producer.send(new ProducerRecord("input", null, s))); producer.commitTransaction(); - }catch (KafkaException e){ + } catch (KafkaException e) { producer.abortTransaction(); diff --git a/libraries-6/src/main/java/com/baeldung/kafka/TransactionalWordCount.java b/apache-kafka/src/main/java/com/baeldung/kafka/exactlyonce/TransactionalWordCount.java similarity index 90% rename from libraries-6/src/main/java/com/baeldung/kafka/TransactionalWordCount.java rename to apache-kafka/src/main/java/com/baeldung/kafka/exactlyonce/TransactionalWordCount.java index 0563ba6684..b9a2cb9f85 100644 --- a/libraries-6/src/main/java/com/baeldung/kafka/TransactionalWordCount.java +++ b/apache-kafka/src/main/java/com/baeldung/kafka/exactlyonce/TransactionalWordCount.java @@ -1,4 +1,4 @@ -package com.baeldung.kafka; +package com.baeldung.kafka.exactlyonce; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; @@ -43,10 +43,11 @@ public class TransactionalWordCount { ConsumerRecords records = consumer.poll(ofSeconds(60)); Map wordCountMap = records.records(new TopicPartition(INPUT_TOPIC, 0)) - .stream() - .flatMap(record -> Stream.of(record.value().split(" "))) - .map(word -> Tuple.of(word, 1)) - .collect(Collectors.toMap(tuple -> tuple.getKey(), t1 -> t1.getValue(), (v1, v2) -> v1 + v2)); + .stream() + .flatMap(record -> Stream.of(record.value() + .split(" "))) + .map(word -> Tuple.of(word, 1)) + .collect(Collectors.toMap(tuple -> tuple.getKey(), t1 -> t1.getValue(), (v1, v2) -> v1 + v2)); producer.beginTransaction(); @@ -56,7 +57,8 @@ public class TransactionalWordCount { for (TopicPartition partition : records.partitions()) { List> partitionedRecords = records.records(partition); - long offset = partitionedRecords.get(partitionedRecords.size() - 1).offset(); + long offset = partitionedRecords.get(partitionedRecords.size() - 1) + .offset(); offsetsToCommit.put(partition, new OffsetAndMetadata(offset + 1)); } @@ -72,7 +74,6 @@ public class TransactionalWordCount { } - } private static KafkaConsumer createKafkaConsumer() { diff --git a/libraries-6/src/main/java/com/baeldung/kafka/Tuple.java b/apache-kafka/src/main/java/com/baeldung/kafka/exactlyonce/Tuple.java similarity index 69% rename from libraries-6/src/main/java/com/baeldung/kafka/Tuple.java rename to apache-kafka/src/main/java/com/baeldung/kafka/exactlyonce/Tuple.java index 883de4ba21..ad61e905fd 100644 --- a/libraries-6/src/main/java/com/baeldung/kafka/Tuple.java +++ b/apache-kafka/src/main/java/com/baeldung/kafka/exactlyonce/Tuple.java @@ -1,4 +1,4 @@ -package com.baeldung.kafka; +package com.baeldung.kafka.exactlyonce; public class Tuple { @@ -10,8 +10,8 @@ public class Tuple { this.value = value; } - public static Tuple of(String key, Integer value){ - return new Tuple(key,value); + public static Tuple of(String key, Integer value) { + return new Tuple(key, value); } public String getKey() { diff --git a/libraries-data-2/src/main/java/com/baeldung/kafka/producer/EvenOddPartitioner.java b/apache-kafka/src/main/java/com/baeldung/kafka/producer/EvenOddPartitioner.java similarity index 100% rename from libraries-data-2/src/main/java/com/baeldung/kafka/producer/EvenOddPartitioner.java rename to apache-kafka/src/main/java/com/baeldung/kafka/producer/EvenOddPartitioner.java diff --git a/libraries-data-2/src/main/java/com/baeldung/kafka/producer/KafkaProducer.java b/apache-kafka/src/main/java/com/baeldung/kafka/producer/KafkaProducer.java similarity index 95% rename from libraries-data-2/src/main/java/com/baeldung/kafka/producer/KafkaProducer.java rename to apache-kafka/src/main/java/com/baeldung/kafka/producer/KafkaProducer.java index 911c9ed3d7..fa508593e0 100644 --- a/libraries-data-2/src/main/java/com/baeldung/kafka/producer/KafkaProducer.java +++ b/apache-kafka/src/main/java/com/baeldung/kafka/producer/KafkaProducer.java @@ -15,8 +15,7 @@ public class KafkaProducer { } public Future send(String key, String value) { - ProducerRecord record = new ProducerRecord("topic_sports_news", - key, value); + ProducerRecord record = new ProducerRecord("topic_sports_news", key, value); return producer.send(record); } @@ -36,5 +35,4 @@ public class KafkaProducer { producer.commitTransaction(); } - } diff --git a/libraries-data/src/main/resources/kafka-connect/01_Quick_Start/connect-file-sink.properties b/apache-kafka/src/main/resources/kafka-connect/01_Quick_Start/connect-file-sink.properties similarity index 100% rename from libraries-data/src/main/resources/kafka-connect/01_Quick_Start/connect-file-sink.properties rename to apache-kafka/src/main/resources/kafka-connect/01_Quick_Start/connect-file-sink.properties diff --git a/libraries-data/src/main/resources/kafka-connect/01_Quick_Start/connect-file-source.properties b/apache-kafka/src/main/resources/kafka-connect/01_Quick_Start/connect-file-source.properties similarity index 100% rename from libraries-data/src/main/resources/kafka-connect/01_Quick_Start/connect-file-source.properties rename to apache-kafka/src/main/resources/kafka-connect/01_Quick_Start/connect-file-source.properties diff --git a/libraries-data/src/main/resources/kafka-connect/01_Quick_Start/connect-standalone.properties b/apache-kafka/src/main/resources/kafka-connect/01_Quick_Start/connect-standalone.properties similarity index 100% rename from libraries-data/src/main/resources/kafka-connect/01_Quick_Start/connect-standalone.properties rename to apache-kafka/src/main/resources/kafka-connect/01_Quick_Start/connect-standalone.properties diff --git a/libraries-data/src/main/resources/kafka-connect/02_Distributed/connect-distributed.properties b/apache-kafka/src/main/resources/kafka-connect/02_Distributed/connect-distributed.properties similarity index 100% rename from libraries-data/src/main/resources/kafka-connect/02_Distributed/connect-distributed.properties rename to apache-kafka/src/main/resources/kafka-connect/02_Distributed/connect-distributed.properties diff --git a/libraries-data/src/main/resources/kafka-connect/02_Distributed/connect-file-sink.json b/apache-kafka/src/main/resources/kafka-connect/02_Distributed/connect-file-sink.json similarity index 100% rename from libraries-data/src/main/resources/kafka-connect/02_Distributed/connect-file-sink.json rename to apache-kafka/src/main/resources/kafka-connect/02_Distributed/connect-file-sink.json diff --git a/libraries-data/src/main/resources/kafka-connect/02_Distributed/connect-file-source.json b/apache-kafka/src/main/resources/kafka-connect/02_Distributed/connect-file-source.json similarity index 100% rename from libraries-data/src/main/resources/kafka-connect/02_Distributed/connect-file-source.json rename to apache-kafka/src/main/resources/kafka-connect/02_Distributed/connect-file-source.json diff --git a/libraries-data/src/main/resources/kafka-connect/03_Transform/connect-distributed.properties b/apache-kafka/src/main/resources/kafka-connect/03_Transform/connect-distributed.properties similarity index 100% rename from libraries-data/src/main/resources/kafka-connect/03_Transform/connect-distributed.properties rename to apache-kafka/src/main/resources/kafka-connect/03_Transform/connect-distributed.properties diff --git a/libraries-data/src/main/resources/kafka-connect/03_Transform/connect-file-source-transform.json b/apache-kafka/src/main/resources/kafka-connect/03_Transform/connect-file-source-transform.json similarity index 100% rename from libraries-data/src/main/resources/kafka-connect/03_Transform/connect-file-source-transform.json rename to apache-kafka/src/main/resources/kafka-connect/03_Transform/connect-file-source-transform.json diff --git a/libraries-data/src/main/resources/kafka-connect/04_Custom/connect-mongodb-sink.json b/apache-kafka/src/main/resources/kafka-connect/04_Custom/connect-mongodb-sink.json similarity index 100% rename from libraries-data/src/main/resources/kafka-connect/04_Custom/connect-mongodb-sink.json rename to apache-kafka/src/main/resources/kafka-connect/04_Custom/connect-mongodb-sink.json diff --git a/libraries-data/src/main/resources/kafka-connect/04_Custom/connect-mqtt-source.json b/apache-kafka/src/main/resources/kafka-connect/04_Custom/connect-mqtt-source.json similarity index 100% rename from libraries-data/src/main/resources/kafka-connect/04_Custom/connect-mqtt-source.json rename to apache-kafka/src/main/resources/kafka-connect/04_Custom/connect-mqtt-source.json diff --git a/libraries-data/src/main/resources/kafka-connect/04_Custom/docker-compose.yaml b/apache-kafka/src/main/resources/kafka-connect/04_Custom/docker-compose.yaml similarity index 100% rename from libraries-data/src/main/resources/kafka-connect/04_Custom/docker-compose.yaml rename to apache-kafka/src/main/resources/kafka-connect/04_Custom/docker-compose.yaml diff --git a/libraries-data-2/src/test/java/com/baeldung/flink/BackupCreatorIntegrationTest.java b/apache-kafka/src/test/java/com/baeldung/flink/BackupCreatorIntegrationTest.java similarity index 100% rename from libraries-data-2/src/test/java/com/baeldung/flink/BackupCreatorIntegrationTest.java rename to apache-kafka/src/test/java/com/baeldung/flink/BackupCreatorIntegrationTest.java diff --git a/libraries-data-2/src/test/java/com/baeldung/flink/WordCapitalizerIntegrationTest.java b/apache-kafka/src/test/java/com/baeldung/flink/WordCapitalizerIntegrationTest.java similarity index 100% rename from libraries-data-2/src/test/java/com/baeldung/flink/WordCapitalizerIntegrationTest.java rename to apache-kafka/src/test/java/com/baeldung/flink/WordCapitalizerIntegrationTest.java diff --git a/libraries-data-3/src/test/java/com/baeldung/kafka/admin/KafkaTopicApplicationIntegrationTest.java b/apache-kafka/src/test/java/com/baeldung/kafka/admin/KafkaTopicApplicationIntegrationTest.java similarity index 100% rename from libraries-data-3/src/test/java/com/baeldung/kafka/admin/KafkaTopicApplicationIntegrationTest.java rename to apache-kafka/src/test/java/com/baeldung/kafka/admin/KafkaTopicApplicationIntegrationTest.java diff --git a/libraries-data-2/src/test/java/com/baeldung/kafka/consumer/CountryPopulationConsumerUnitTest.java b/apache-kafka/src/test/java/com/baeldung/kafka/consumer/CountryPopulationConsumerUnitTest.java similarity index 100% rename from libraries-data-2/src/test/java/com/baeldung/kafka/consumer/CountryPopulationConsumerUnitTest.java rename to apache-kafka/src/test/java/com/baeldung/kafka/consumer/CountryPopulationConsumerUnitTest.java diff --git a/libraries-data-2/src/test/java/com/baeldung/kafka/producer/KafkaProducerUnitTest.java b/apache-kafka/src/test/java/com/baeldung/kafka/producer/KafkaProducerUnitTest.java similarity index 100% rename from libraries-data-2/src/test/java/com/baeldung/kafka/producer/KafkaProducerUnitTest.java rename to apache-kafka/src/test/java/com/baeldung/kafka/producer/KafkaProducerUnitTest.java diff --git a/libraries-data-3/src/test/java/com/baeldung/kafka/streams/KafkaStreamsLiveTest.java b/apache-kafka/src/test/java/com/baeldung/kafka/streamsvsconsumer/KafkaStreamsLiveTest.java similarity index 99% rename from libraries-data-3/src/test/java/com/baeldung/kafka/streams/KafkaStreamsLiveTest.java rename to apache-kafka/src/test/java/com/baeldung/kafka/streamsvsconsumer/KafkaStreamsLiveTest.java index 0d4c0606e3..88de6101dc 100644 --- a/libraries-data-3/src/test/java/com/baeldung/kafka/streams/KafkaStreamsLiveTest.java +++ b/apache-kafka/src/test/java/com/baeldung/kafka/streamsvsconsumer/KafkaStreamsLiveTest.java @@ -1,4 +1,4 @@ -package com.baeldung.kafka.streams; +package com.baeldung.kafka.streamsvsconsumer; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.producer.KafkaProducer; diff --git a/libraries-data/src/test/java/com/baeldung/kafkastreams/KafkaStreamsLiveTest.java b/apache-kafka/src/test/java/com/baeldung/kafkastreams/KafkaStreamsLiveTest.java similarity index 53% rename from libraries-data/src/test/java/com/baeldung/kafkastreams/KafkaStreamsLiveTest.java rename to apache-kafka/src/test/java/com/baeldung/kafkastreams/KafkaStreamsLiveTest.java index 32568e9ea5..3b559b619e 100644 --- a/libraries-data/src/test/java/com/baeldung/kafkastreams/KafkaStreamsLiveTest.java +++ b/apache-kafka/src/test/java/com/baeldung/kafkastreams/KafkaStreamsLiveTest.java @@ -1,61 +1,78 @@ package com.baeldung.kafkastreams; -import org.apache.kafka.clients.consumer.ConsumerConfig; -import org.apache.kafka.common.serialization.Serde; -import org.apache.kafka.common.serialization.Serdes; -import org.apache.kafka.streams.KafkaStreams; -import org.apache.kafka.streams.StreamsConfig; -import org.apache.kafka.streams.kstream.KStream; -import org.apache.kafka.streams.kstream.KStreamBuilder; -import org.apache.kafka.streams.kstream.KTable; -import org.apache.kafka.test.TestUtils; -import org.junit.Ignore; -import org.junit.Test; - +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.Arrays; import java.util.Properties; import java.util.regex.Pattern; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.common.serialization.Serdes; +import org.apache.kafka.streams.KafkaStreams; +import org.apache.kafka.streams.StreamsBuilder; +import org.apache.kafka.streams.StreamsConfig; +import org.apache.kafka.streams.Topology; +import org.apache.kafka.streams.kstream.KStream; +import org.apache.kafka.streams.kstream.KTable; +import org.apache.kafka.streams.kstream.Produced; +import org.junit.Ignore; +import org.junit.Test; + public class KafkaStreamsLiveTest { private String bootstrapServers = "localhost:9092"; + private Path stateDirectory; @Test @Ignore("it needs to have kafka broker running on local") public void shouldTestKafkaStreams() throws InterruptedException { - //given + // given String inputTopic = "inputTopic"; Properties streamsConfiguration = new Properties(); streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-live-test"); streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); - streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); - streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); + streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String() + .getClass() + .getName()); + streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String() + .getClass() + .getName()); streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 1000); streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + // Use a temporary directory for storing state, which will be automatically removed after the test. - streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getAbsolutePath()); + try { + this.stateDirectory = Files.createTempDirectory("kafka-streams"); + streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, this.stateDirectory.toAbsolutePath() + .toString()); + } catch (final IOException e) { + throw new UncheckedIOException("Cannot create temporary directory", e); + } - //when - KStreamBuilder builder = new KStreamBuilder(); + // when + final StreamsBuilder builder = new StreamsBuilder(); KStream textLines = builder.stream(inputTopic); Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS); - KTable wordCounts = textLines - .flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase()))) - .groupBy((key, word) -> word) - .count(); + KTable wordCounts = textLines.flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase()))) + .groupBy((key, word) -> word) + .count(); - wordCounts.foreach((word, count) -> System.out.println("word: " + word + " -> " + count)); + wordCounts.toStream() + .foreach((word, count) -> System.out.println("word: " + word + " -> " + count)); String outputTopic = "outputTopic"; - final Serde stringSerde = Serdes.String(); - final Serde longSerde = Serdes.Long(); - wordCounts.to(stringSerde, longSerde, outputTopic); - KafkaStreams streams = new KafkaStreams(builder, streamsConfiguration); + wordCounts.toStream() + .to(outputTopic, Produced.with(Serdes.String(), Serdes.Long())); + + final Topology topology = builder.build(); + KafkaStreams streams = new KafkaStreams(topology, streamsConfiguration); streams.start(); - //then + // then Thread.sleep(30000); streams.close(); } diff --git a/core-java-modules/core-java-16/pom.xml b/core-java-modules/core-java-16/pom.xml index 230e342f01..a8a84511db 100644 --- a/core-java-modules/core-java-16/pom.xml +++ b/core-java-modules/core-java-16/pom.xml @@ -1,48 +1,53 @@ - 4.0.0 - core-java-16 - 0.1.0-SNAPSHOT - core-java-16 - jar - http://maven.apache.org + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + 4.0.0 + core-java-16 + 0.1.0-SNAPSHOT + core-java-16 + jar + http://maven.apache.org - - com.baeldung - parent-modules - 1.0.0-SNAPSHOT - ../../ - + + com.baeldung + parent-modules + 1.0.0-SNAPSHOT + ../../ + - - - org.assertj - assertj-core - ${assertj.version} - test - - + + + org.assertj + assertj-core + ${assertj.version} + test + + + org.apache.commons + commons-lang3 + 3.12.0 + + - - - - org.apache.maven.plugins - maven-compiler-plugin - ${maven-compiler-plugin.version} - - ${maven.compiler.source.version} - ${maven.compiler.target.version} - - - - + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + ${maven.compiler.source.version} + ${maven.compiler.target.version} + + + + - - 16 - 16 - 3.6.1 - + + 16 + 16 + 3.6.1 + \ No newline at end of file diff --git a/core-java-modules/core-java-16/src/main/java/com/baeldung/java_16_features/groupingby/BlogPost.java b/core-java-modules/core-java-16/src/main/java/com/baeldung/java_16_features/groupingby/BlogPost.java new file mode 100644 index 0000000000..960a75a58e --- /dev/null +++ b/core-java-modules/core-java-16/src/main/java/com/baeldung/java_16_features/groupingby/BlogPost.java @@ -0,0 +1,43 @@ +package com.baeldung.java_16_features.groupingby; + +import java.util.IntSummaryStatistics; + + +public class BlogPost { + + private String title; + private String author; + private BlogPostType type; + private int likes; + record AuthPostTypesLikes(String author, BlogPostType type, int likes) {}; + record PostcountTitlesLikesStats(long postCount, String titles, IntSummaryStatistics likesStats){}; + record TitlesBoundedSumOfLikes(String titles, int boundedSumOfLikes) {}; + + public BlogPost(String title, String author, BlogPostType type, int likes) { + this.title = title; + this.author = author; + this.type = type; + this.likes = likes; + } + + public String getTitle() { + return title; + } + + public String getAuthor() { + return author; + } + + public BlogPostType getType() { + return type; + } + + public int getLikes() { + return likes; + } + + @Override + public String toString() { + return "BlogPost{" + "title='" + title + '\'' + ", type=" + type + ", likes=" + likes + '}'; + } +} diff --git a/core-java-modules/core-java-16/src/main/java/com/baeldung/java_16_features/groupingby/BlogPostType.java b/core-java-modules/core-java-16/src/main/java/com/baeldung/java_16_features/groupingby/BlogPostType.java new file mode 100644 index 0000000000..df38b7e1c4 --- /dev/null +++ b/core-java-modules/core-java-16/src/main/java/com/baeldung/java_16_features/groupingby/BlogPostType.java @@ -0,0 +1,5 @@ +package com.baeldung.java_16_features.groupingby; + +public enum BlogPostType { + NEWS, REVIEW, GUIDE +} diff --git a/core-java-modules/core-java-16/src/main/java/com/baeldung/java_16_features/groupingby/Tuple.java b/core-java-modules/core-java-16/src/main/java/com/baeldung/java_16_features/groupingby/Tuple.java new file mode 100644 index 0000000000..ad41207aa4 --- /dev/null +++ b/core-java-modules/core-java-16/src/main/java/com/baeldung/java_16_features/groupingby/Tuple.java @@ -0,0 +1,41 @@ +package com.baeldung.java_16_features.groupingby; + +import java.util.Objects; + +public class Tuple { + private final BlogPostType type; + private final String author; + + public Tuple(BlogPostType type, String author) { + this.type = type; + this.author = author; + } + + public BlogPostType getType() { + return type; + } + + public String getAuthor() { + return author; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + Tuple tuple = (Tuple) o; + return type == tuple.type && author.equals(tuple.author); + } + + @Override + public int hashCode() { + return Objects.hash(type, author); + } + + @Override + public String toString() { + return "Tuple{" + "type=" + type + ", author='" + author + '\'' + '}'; + } +} diff --git a/core-java-modules/core-java-16/src/test/java/com/baeldung/java_16_features/groupingby/JavaGroupingByCollectorUnitTest.java b/core-java-modules/core-java-16/src/test/java/com/baeldung/java_16_features/groupingby/JavaGroupingByCollectorUnitTest.java new file mode 100644 index 0000000000..0dea142658 --- /dev/null +++ b/core-java-modules/core-java-16/src/test/java/com/baeldung/java_16_features/groupingby/JavaGroupingByCollectorUnitTest.java @@ -0,0 +1,302 @@ +package com.baeldung.java_16_features.groupingby; + +import static java.util.Comparator.comparingInt; +import static java.util.stream.Collectors.averagingInt; +import static java.util.stream.Collectors.counting; +import static java.util.stream.Collectors.groupingBy; +import static java.util.stream.Collectors.groupingByConcurrent; +import static java.util.stream.Collectors.collectingAndThen; +import static java.util.stream.Collectors.toMap; +import static java.util.stream.Collectors.joining; +import static java.util.stream.Collectors.mapping; +import static java.util.stream.Collectors.maxBy; +import static java.util.stream.Collectors.summarizingInt; +import static java.util.stream.Collectors.summingInt; +import static java.util.stream.Collectors.toList; +import static java.util.stream.Collectors.toSet; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.offset; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.Arrays; +import java.util.EnumMap; +import java.util.IntSummaryStatistics; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.ConcurrentMap; + +import org.apache.commons.lang3.tuple.ImmutablePair; +import org.apache.commons.lang3.tuple.Pair; +import org.junit.jupiter.api.Test; + +public class JavaGroupingByCollectorUnitTest { + + private static final List posts = Arrays.asList(new BlogPost("News item 1", "Author 1", BlogPostType.NEWS, 15), new BlogPost("Tech review 1", "Author 2", BlogPostType.REVIEW, 5), + new BlogPost("Programming guide", "Author 1", BlogPostType.GUIDE, 20), new BlogPost("News item 2", "Author 2", BlogPostType.NEWS, 35), new BlogPost("Tech review 2", "Author 1", BlogPostType.REVIEW, 15)); + + @Test + public void givenAListOfPosts_whenGroupedByType_thenGetAMapBetweenTypeAndPosts() { + Map> postsPerType = posts.stream() + .collect(groupingBy(BlogPost::getType)); + + assertEquals(2, postsPerType.get(BlogPostType.NEWS) + .size()); + assertEquals(1, postsPerType.get(BlogPostType.GUIDE) + .size()); + assertEquals(2, postsPerType.get(BlogPostType.REVIEW) + .size()); + } + + @Test + public void givenAListOfPosts_whenGroupedByTypeAndTheirTitlesAreJoinedInAString_thenGetAMapBetweenTypeAndCsvTitles() { + Map postsPerType = posts.stream() + .collect(groupingBy(BlogPost::getType, mapping(BlogPost::getTitle, joining(", ", "Post titles: [", "]")))); + + assertEquals("Post titles: [News item 1, News item 2]", postsPerType.get(BlogPostType.NEWS)); + assertEquals("Post titles: [Programming guide]", postsPerType.get(BlogPostType.GUIDE)); + assertEquals("Post titles: [Tech review 1, Tech review 2]", postsPerType.get(BlogPostType.REVIEW)); + } + + @Test + public void givenAListOfPosts_whenGroupedByTypeAndSumTheLikes_thenGetAMapBetweenTypeAndPostLikes() { + Map likesPerType = posts.stream() + .collect(groupingBy(BlogPost::getType, summingInt(BlogPost::getLikes))); + + assertEquals(50, likesPerType.get(BlogPostType.NEWS) + .intValue()); + assertEquals(20, likesPerType.get(BlogPostType.REVIEW) + .intValue()); + assertEquals(20, likesPerType.get(BlogPostType.GUIDE) + .intValue()); + } + + @Test + public void givenAListOfPosts_whenGroupedByTypeInAnEnumMap_thenGetAnEnumMapBetweenTypeAndPosts() { + EnumMap> postsPerType = posts.stream() + .collect(groupingBy(BlogPost::getType, () -> new EnumMap<>(BlogPostType.class), toList())); + + assertEquals(2, postsPerType.get(BlogPostType.NEWS) + .size()); + assertEquals(1, postsPerType.get(BlogPostType.GUIDE) + .size()); + assertEquals(2, postsPerType.get(BlogPostType.REVIEW) + .size()); + } + + @Test + public void givenAListOfPosts_whenGroupedByTypeInSets_thenGetAMapBetweenTypesAndSetsOfPosts() { + Map> postsPerType = posts.stream() + .collect(groupingBy(BlogPost::getType, toSet())); + + assertEquals(2, postsPerType.get(BlogPostType.NEWS) + .size()); + assertEquals(1, postsPerType.get(BlogPostType.GUIDE) + .size()); + assertEquals(2, postsPerType.get(BlogPostType.REVIEW) + .size()); + } + + @Test + public void givenAListOfPosts_whenGroupedByTypeConcurrently_thenGetAMapBetweenTypeAndPosts() { + ConcurrentMap> postsPerType = posts.parallelStream() + .collect(groupingByConcurrent(BlogPost::getType)); + + assertEquals(2, postsPerType.get(BlogPostType.NEWS) + .size()); + assertEquals(1, postsPerType.get(BlogPostType.GUIDE) + .size()); + assertEquals(2, postsPerType.get(BlogPostType.REVIEW) + .size()); + } + + @Test + public void givenAListOfPosts_whenGroupedByTypeAndAveragingLikes_thenGetAMapBetweenTypeAndAverageNumberOfLikes() { + Map averageLikesPerType = posts.stream() + .collect(groupingBy(BlogPost::getType, averagingInt(BlogPost::getLikes))); + + assertEquals(25, averageLikesPerType.get(BlogPostType.NEWS) + .intValue()); + assertEquals(20, averageLikesPerType.get(BlogPostType.GUIDE) + .intValue()); + assertEquals(10, averageLikesPerType.get(BlogPostType.REVIEW) + .intValue()); + } + + @Test + public void givenAListOfPosts_whenGroupedByTypeAndCounted_thenGetAMapBetweenTypeAndNumberOfPosts() { + Map numberOfPostsPerType = posts.stream() + .collect(groupingBy(BlogPost::getType, counting())); + + assertEquals(2, numberOfPostsPerType.get(BlogPostType.NEWS) + .intValue()); + assertEquals(1, numberOfPostsPerType.get(BlogPostType.GUIDE) + .intValue()); + assertEquals(2, numberOfPostsPerType.get(BlogPostType.REVIEW) + .intValue()); + } + + @Test + public void givenAListOfPosts_whenGroupedByTypeAndMaxingLikes_thenGetAMapBetweenTypeAndMaximumNumberOfLikes() { + Map> maxLikesPerPostType = posts.stream() + .collect(groupingBy(BlogPost::getType, maxBy(comparingInt(BlogPost::getLikes)))); + + assertTrue(maxLikesPerPostType.get(BlogPostType.NEWS) + .isPresent()); + assertEquals(35, maxLikesPerPostType.get(BlogPostType.NEWS) + .get() + .getLikes()); + + assertTrue(maxLikesPerPostType.get(BlogPostType.GUIDE) + .isPresent()); + assertEquals(20, maxLikesPerPostType.get(BlogPostType.GUIDE) + .get() + .getLikes()); + + assertTrue(maxLikesPerPostType.get(BlogPostType.REVIEW) + .isPresent()); + assertEquals(15, maxLikesPerPostType.get(BlogPostType.REVIEW) + .get() + .getLikes()); + } + + @Test + public void givenAListOfPosts_whenGroupedByAuthorAndThenByType_thenGetAMapBetweenAuthorAndMapsBetweenTypeAndBlogPosts() { + Map>> map = posts.stream() + .collect(groupingBy(BlogPost::getAuthor, groupingBy(BlogPost::getType))); + + assertEquals(1, map.get("Author 1") + .get(BlogPostType.NEWS) + .size()); + assertEquals(1, map.get("Author 1") + .get(BlogPostType.GUIDE) + .size()); + assertEquals(1, map.get("Author 1") + .get(BlogPostType.REVIEW) + .size()); + + assertEquals(1, map.get("Author 2") + .get(BlogPostType.NEWS) + .size()); + assertEquals(1, map.get("Author 2") + .get(BlogPostType.REVIEW) + .size()); + assertNull(map.get("Author 2") + .get(BlogPostType.GUIDE)); + } + + @Test + public void givenAListOfPosts_whenGroupedByTypeAndSummarizingLikes_thenGetAMapBetweenTypeAndSummary() { + Map likeStatisticsPerType = posts.stream() + .collect(groupingBy(BlogPost::getType, summarizingInt(BlogPost::getLikes))); + + IntSummaryStatistics newsLikeStatistics = likeStatisticsPerType.get(BlogPostType.NEWS); + + assertEquals(2, newsLikeStatistics.getCount()); + assertEquals(50, newsLikeStatistics.getSum()); + assertEquals(25.0, newsLikeStatistics.getAverage(), 0.001); + assertEquals(35, newsLikeStatistics.getMax()); + assertEquals(15, newsLikeStatistics.getMin()); + } + + @Test + public void givenAListOfPosts_whenGroupedByComplexMapPairKeyType_thenGetAMapBetweenPairAndList() { + + Map, List> postsPerTypeAndAuthor = posts.stream() + .collect(groupingBy(post -> new ImmutablePair<>(post.getType(), post.getAuthor()))); + + List result = postsPerTypeAndAuthor.get(new ImmutablePair<>(BlogPostType.GUIDE, "Author 1")); + + assertThat(result.size()).isEqualTo(1); + + BlogPost blogPost = result.get(0); + + assertThat(blogPost.getTitle()).isEqualTo("Programming guide"); + assertThat(blogPost.getType()).isEqualTo(BlogPostType.GUIDE); + assertThat(blogPost.getAuthor()).isEqualTo("Author 1"); + } + + @Test + public void givenAListOfPosts_whenGroupedByComplexMapKeyType_thenGetAMapBetweenTupleAndList() { + + Map> postsPerTypeAndAuthor = posts.stream() + .collect(groupingBy(post -> new Tuple(post.getType(), post.getAuthor()))); + + List result = postsPerTypeAndAuthor.get(new Tuple(BlogPostType.GUIDE, "Author 1")); + + assertThat(result.size()).isEqualTo(1); + + BlogPost blogPost = result.get(0); + + assertThat(blogPost.getTitle()).isEqualTo("Programming guide"); + assertThat(blogPost.getType()).isEqualTo(BlogPostType.GUIDE); + assertThat(blogPost.getAuthor()).isEqualTo("Author 1"); + } + + @Test + public void givenAListOfPosts_whenGroupedByRecord_thenGetAMapBetweenRecordAndList() { + + Map> postsPerTypeAndAuthor = posts.stream() + .collect(groupingBy(post -> new BlogPost.AuthPostTypesLikes(post.getAuthor(), post.getType(), post.getLikes()))); + + List result = postsPerTypeAndAuthor.get(new BlogPost.AuthPostTypesLikes("Author 1", BlogPostType.GUIDE, 20)); + + assertThat(result.size()).isEqualTo(1); + + BlogPost blogPost = result.get(0); + + assertThat(blogPost.getTitle()).isEqualTo("Programming guide"); + assertThat(blogPost.getType()).isEqualTo(BlogPostType.GUIDE); + assertThat(blogPost.getAuthor()).isEqualTo("Author 1"); + assertThat(blogPost.getLikes()).isEqualTo(20); + } + + @Test + public void givenListOfPosts_whenGroupedByAuthor_thenGetAMapUsingCollectingAndThen() { + + Map postsPerAuthor = posts.stream() + .collect(groupingBy(BlogPost::getAuthor, collectingAndThen(toList(), list -> { + long count = list.stream() + .map(BlogPost::getTitle) + .collect(counting()); + String titles = list.stream() + .map(BlogPost::getTitle) + .collect(joining(" : ")); + IntSummaryStatistics summary = list.stream() + .collect(summarizingInt(BlogPost::getLikes)); + return new BlogPost.PostcountTitlesLikesStats(count, titles, summary); + }))); + + BlogPost.PostcountTitlesLikesStats result = postsPerAuthor.get("Author 1"); + assertThat(result.postCount()).isEqualTo(3L); + assertThat(result.titles()).isEqualTo("News item 1 : Programming guide : Tech review 2"); + assertThat(result.likesStats().getMax()).isEqualTo(20); + assertThat(result.likesStats().getMin()).isEqualTo(15); + assertThat(result.likesStats().getAverage()).isEqualTo(16.666d, offset(0.001d)); + } + + @Test + public void givenListOfPosts_whenGroupedByAuthor_thenGetAMapUsingToMap() { + + int maxValLikes = 17; + Map postsPerAuthor = posts.stream() + .collect(toMap(BlogPost::getAuthor, post -> { + int likes = (post.getLikes() > maxValLikes) ? maxValLikes : post.getLikes(); + return new BlogPost.TitlesBoundedSumOfLikes(post.getTitle(), likes); + }, (u1, u2) -> { + int likes = (u2.boundedSumOfLikes() > maxValLikes) ? maxValLikes : u2.boundedSumOfLikes(); + return new BlogPost.TitlesBoundedSumOfLikes(u1.titles() + .toUpperCase() + " : " + + u2.titles() + .toUpperCase(), + u1.boundedSumOfLikes() + likes); + })); + + BlogPost.TitlesBoundedSumOfLikes result = postsPerAuthor.get("Author 1"); + assertThat(result.titles()).isEqualTo("NEWS ITEM 1 : PROGRAMMING GUIDE : TECH REVIEW 2"); + assertThat(result.boundedSumOfLikes()).isEqualTo(47); + } +} diff --git a/core-java-modules/core-java-concurrency-basic-2/src/main/java/com/baeldung/concurrent/stopexecution/StopExecution.java b/core-java-modules/core-java-concurrency-basic-2/src/main/java/com/baeldung/concurrent/stopexecution/StopExecution.java index 20f66da5da..a3c7dc02db 100644 --- a/core-java-modules/core-java-concurrency-basic-2/src/main/java/com/baeldung/concurrent/stopexecution/StopExecution.java +++ b/core-java-modules/core-java-concurrency-basic-2/src/main/java/com/baeldung/concurrent/stopexecution/StopExecution.java @@ -189,21 +189,8 @@ public class StopExecution { longRunningSort(); } - private void longRunningOperation() { - LOG.info("long Running operation started"); - - try { - //Thread.sleep(500); - longFileRead(); - LOG.info("long running operation finished"); - } catch (InterruptedException e) { - LOG.info("long Running operation interrupted"); - } - } - private void longRunningSort() { - LOG.info("long Running task started"); - // Do you long running calculation here + LOG.info("Long running task started"); int len = 100000; List numbers = new ArrayList<>(); try { @@ -229,25 +216,7 @@ public class StopExecution { LOG.info("Index position: " + i); LOG.info("Long running task finished"); } catch (InterruptedException e) { - LOG.info("long Running operation interrupted"); - } - } - - private void longFileRead() throws InterruptedException { - String file = "input.txt"; - ClassLoader classloader = getClass().getClassLoader(); - - try (InputStream inputStream = classloader.getResourceAsStream(file)) { - Reader inputStreamReader = new InputStreamReader(inputStream); - - int data = inputStreamReader.read(); - while (data != -1) { - char theChar = (char) data; - data = inputStreamReader.read(); - throwExceptionOnThreadInterrupt(); - } - } catch (IOException e) { - LOG.error("Exception: ", e); + LOG.info("Long running operation interrupted"); } } diff --git a/core-java-modules/core-java-date-operations-1/src/main/java/com/baeldung/java9/time/TimeApi.java b/core-java-modules/core-java-date-operations-1/src/main/java/com/baeldung/java9/time/TimeApi.java index ee4e35a77b..dee3135391 100644 --- a/core-java-modules/core-java-date-operations-1/src/main/java/com/baeldung/java9/time/TimeApi.java +++ b/core-java-modules/core-java-date-operations-1/src/main/java/com/baeldung/java9/time/TimeApi.java @@ -13,12 +13,9 @@ import java.util.stream.IntStream; public class TimeApi { public static List getDatesBetweenUsingJava7(Date startDate, Date endDate) { - List datesInRange = new ArrayList(); - Calendar calendar = new GregorianCalendar(); - calendar.setTime(startDate); - - Calendar endCalendar = new GregorianCalendar(); - endCalendar.setTime(endDate); + List datesInRange = new ArrayList<>(); + Calendar calendar = getCalendarWithoutTime(startDate); + Calendar endCalendar = getCalendarWithoutTime(endDate); while (calendar.before(endCalendar)) { Date result = calendar.getTime(); @@ -40,4 +37,15 @@ public class TimeApi { return startDate.datesUntil(endDate).collect(Collectors.toList()); } + private static Calendar getCalendarWithoutTime(Date date) { + Calendar calendar = new GregorianCalendar(); + calendar.setTime(date); + calendar.set(Calendar.HOUR, 0); + calendar.set(Calendar.HOUR_OF_DAY, 0); + calendar.set(Calendar.MINUTE, 0); + calendar.set(Calendar.SECOND, 0); + calendar.set(Calendar.MILLISECOND, 0); + return calendar; + } + } diff --git a/core-java-modules/core-java-date-operations-1/src/test/java/com/baeldung/java9/time/TimeApiUnitTest.java b/core-java-modules/core-java-date-operations-1/src/test/java/com/baeldung/java9/time/TimeApiUnitTest.java index 8813870c2b..416a621286 100644 --- a/core-java-modules/core-java-date-operations-1/src/test/java/com/baeldung/java9/time/TimeApiUnitTest.java +++ b/core-java-modules/core-java-date-operations-1/src/test/java/com/baeldung/java9/time/TimeApiUnitTest.java @@ -1,12 +1,13 @@ package com.baeldung.java9.time; +import org.junit.Test; + import java.time.LocalDate; import java.util.Calendar; import java.util.Date; import java.util.List; -import static org.junit.Assert.assertEquals; -import org.junit.Test; +import static org.assertj.core.api.Assertions.assertThat; public class TimeApiUnitTest { @@ -18,19 +19,18 @@ public class TimeApiUnitTest { Date endDate = endCalendar.getTime(); List dates = TimeApi.getDatesBetweenUsingJava7(startDate, endDate); - assertEquals(dates.size(), 2); + + assertThat(dates).hasSize(2); Calendar calendar = Calendar.getInstance(); - Date date1 = calendar.getTime(); - assertEquals(dates.get(0).getDay(), date1.getDay()); - assertEquals(dates.get(0).getMonth(), date1.getMonth()); - assertEquals(dates.get(0).getYear(), date1.getYear()); + Date expectedDate1 = calendar.getTime(); + assertThat(dates.get(0)).isInSameDayAs(expectedDate1); + assertThatTimeFieldsAreZero(dates.get(0)); calendar.add(Calendar.DATE, 1); - Date date2 = calendar.getTime(); - assertEquals(dates.get(1).getDay(), date2.getDay()); - assertEquals(dates.get(1).getMonth(), date2.getMonth()); - assertEquals(dates.get(1).getYear(), date2.getYear()); + Date expectedDate2 = calendar.getTime(); + assertThat(dates.get(1)).isInSameDayAs(expectedDate2); + assertThatTimeFieldsAreZero(dates.get(1)); } @Test @@ -39,9 +39,8 @@ public class TimeApiUnitTest { LocalDate endDate = LocalDate.now().plusDays(2); List dates = TimeApi.getDatesBetweenUsingJava8(startDate, endDate); - assertEquals(dates.size(), 2); - assertEquals(dates.get(0), LocalDate.now()); - assertEquals(dates.get(1), LocalDate.now().plusDays(1)); + + assertThat(dates).containsExactly(LocalDate.now(), LocalDate.now().plusDays(1)); } @Test @@ -50,9 +49,15 @@ public class TimeApiUnitTest { LocalDate endDate = LocalDate.now().plusDays(2); List dates = TimeApi.getDatesBetweenUsingJava9(startDate, endDate); - assertEquals(dates.size(), 2); - assertEquals(dates.get(0), LocalDate.now()); - assertEquals(dates.get(1), LocalDate.now().plusDays(1)); + + assertThat(dates).containsExactly(LocalDate.now(), LocalDate.now().plusDays(1)); + } + + private static void assertThatTimeFieldsAreZero(Date date) { + assertThat(date).hasHourOfDay(0); + assertThat(date).hasMinute(0); + assertThat(date).hasSecond(0); + assertThat(date).hasMillisecond(0); } } diff --git a/core-java-modules/core-java-lang-oop-constructors/README.md b/core-java-modules/core-java-lang-oop-constructors/README.md index 4bec8db256..69ade3e25a 100644 --- a/core-java-modules/core-java-lang-oop-constructors/README.md +++ b/core-java-modules/core-java-lang-oop-constructors/README.md @@ -7,3 +7,4 @@ This module contains article about constructors in Java - [Java Copy Constructor](https://www.baeldung.com/java-copy-constructor) - [Cannot Reference “X” Before Supertype Constructor Has Been Called](https://www.baeldung.com/java-cannot-reference-x-before-supertype-constructor-error) - [Private Constructors in Java](https://www.baeldung.com/java-private-constructors) +- [Throwing Exceptions in Constructors](https://www.baeldung.com/java-constructors-exceptions) diff --git a/core-java-modules/core-java-os/src/test/java/com/baeldung/grep/GrepWithUnix4JIntegrationTest.java b/core-java-modules/core-java-os/src/test/java/com/baeldung/grep/GrepWithUnix4JIntegrationTest.java index 3ea7acf620..5c9da0cc9e 100644 --- a/core-java-modules/core-java-os/src/test/java/com/baeldung/grep/GrepWithUnix4JIntegrationTest.java +++ b/core-java-modules/core-java-os/src/test/java/com/baeldung/grep/GrepWithUnix4JIntegrationTest.java @@ -25,9 +25,9 @@ public class GrepWithUnix4JIntegrationTest { @Test public void whenGrepWithSimpleString_thenCorrect() { - int expectedLineCount = 4; + int expectedLineCount = 5; - // grep "NINETEEN" dictionary.txt + // grep "NINETEEN" dictionary.in List lines = Unix4j.grep("NINETEEN", fileToGrep).toLineList(); assertEquals(expectedLineCount, lines.size()); @@ -35,9 +35,9 @@ public class GrepWithUnix4JIntegrationTest { @Test public void whenInverseGrepWithSimpleString_thenCorrect() { - int expectedLineCount = 178687; + int expectedLineCount = 8; - // grep -v "NINETEEN" dictionary.txt + // grep -v "NINETEEN" dictionary.in List lines = grep(Options.v, "NINETEEN", fileToGrep).toLineList(); assertEquals(expectedLineCount, lines.size()); @@ -45,9 +45,9 @@ public class GrepWithUnix4JIntegrationTest { @Test public void whenGrepWithRegex_thenCorrect() { - int expectedLineCount = 151; + int expectedLineCount = 5; - // grep -c ".*?NINE.*?" dictionary.txt + // grep -c ".*?NINE.*?" dictionary.in String patternCount = grep(Options.c, ".*?NINE.*?", fileToGrep).cut(fields, ":", 1).toStringResult(); assertEquals(expectedLineCount, Integer.parseInt(patternCount)); diff --git a/core-java-modules/core-java-os/src/test/resources/dictionary.in b/core-java-modules/core-java-os/src/test/resources/dictionary.in new file mode 100644 index 0000000000..9e6c74ecdb --- /dev/null +++ b/core-java-modules/core-java-os/src/test/resources/dictionary.in @@ -0,0 +1,13 @@ +EIGHTTEEN +EIGHTTEENS +EIGHTTEENTH +EIGHTTEENTHS +NINETEEN +NINETEENS +NINETEENTH +NINETEENTHS +TWENTY +TWENTHIES +TWENTHIETH +TWENTHIETHS +TWENTYNINETEEN \ No newline at end of file diff --git a/core-java-modules/core-java-string-operations-3/README.md b/core-java-modules/core-java-string-operations-3/README.md index ad4ada3a68..ff6ac51fab 100644 --- a/core-java-modules/core-java-string-operations-3/README.md +++ b/core-java-modules/core-java-string-operations-3/README.md @@ -4,3 +4,4 @@ - [Java (String) or .toString()?](https://www.baeldung.com/java-string-casting-vs-tostring) - [Split Java String by Newline](https://www.baeldung.com/java-string-split-by-newline) - [Split a String in Java and Keep the Delimiters](https://www.baeldung.com/java-split-string-keep-delimiters) +- [Validate String as Filename in Java](https://www.baeldung.com/java-validate-filename) diff --git a/drools/src/main/resources/logback.xml b/drools/src/main/resources/logback.xml index 7d900d8ea8..b928039804 100644 --- a/drools/src/main/resources/logback.xml +++ b/drools/src/main/resources/logback.xml @@ -6,7 +6,8 @@ - + + diff --git a/gradle/gradle-dependency-management/build.gradle b/gradle/gradle-dependency-management/build.gradle index 88ed84f4b1..787b23c382 100644 --- a/gradle/gradle-dependency-management/build.gradle +++ b/gradle/gradle-dependency-management/build.gradle @@ -3,6 +3,11 @@ plugins { id 'org.springframework.boot' version '2.3.4.RELEASE' } +ext { + springBootVersion = '2.3.4.RELEASE' + lombokVersion = '1.18.14' +} + group = 'com.gradle' version = '1.0.0' sourceCompatibility = '14' @@ -12,19 +17,16 @@ repositories { } dependencies { - implementation 'org.springframework.boot:spring-boot-starter:2.3.4.RELEASE' + implementation "org.springframework.boot:spring-boot-starter:${springBootVersion}" - testImplementation 'org.springframework.boot:spring-boot-starter-test:2.3.4.RELEASE' - - compileOnly 'org.projectlombok:lombok:1.18.14' - - testCompileOnly 'org.projectlombok:lombok:1.18.14' + compileOnly "org.projectlombok:lombok:${lombokVersion}" runtimeOnly files('libs/sampleOne.jar', 'libs/sampleTwo.jar') - - runtimeOnly fileTree('libs') { include '*.jar' } + runtimeOnly fileTree("libs") { include "*.jar" } -// implementation gradleApi() + testImplementation "org.springframework.boot:spring-boot-starter-test:${springBootVersion}" + + testCompileOnly "org.projectlombok:lombok:${lombokVersion}" } test { diff --git a/gradle/gradle-dependency-management/gradle/wrapper/gradle-wrapper.jar b/gradle/gradle-dependency-management/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000000..7454180f2a Binary files /dev/null and b/gradle/gradle-dependency-management/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/gradle-dependency-management/gradlew b/gradle/gradle-dependency-management/gradlew new file mode 100755 index 0000000000..1b6c787337 --- /dev/null +++ b/gradle/gradle-dependency-management/gradlew @@ -0,0 +1,234 @@ +#!/bin/sh + +# +# Copyright © 2015-2021 the original authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +############################################################################## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# +############################################################################## + +# Attempt to set APP_HOME + +# Resolve links: $0 may be a link +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac +done + +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit + +APP_NAME="Gradle" +APP_BASE_NAME=${0##*/} + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD=maximum + +warn () { + echo "$*" +} >&2 + +die () { + echo + echo "$*" + echo + exit 1 +} >&2 + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD=$JAVA_HOME/jre/sh/java + else + JAVACMD=$JAVA_HOME/bin/java + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD=java + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac +fi + +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. + +# For Cygwin or MSYS, switch paths to Windows format before running java +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + + # Now convert the arguments - kludge to limit ourselves to /bin/sh + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) + fi + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg + done +fi + +# Collect all arguments for the java command; +# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of +# shell script including quotes and variable substitutions, so put them in +# double quotes to make sure that they get re-expanded; and +# * put everything else in single quotes, so that it's not re-expanded. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# + +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' + +exec "$JAVACMD" "$@" diff --git a/gradle/gradle-dependency-management/gradlew.bat b/gradle/gradle-dependency-management/gradlew.bat new file mode 100644 index 0000000000..ac1b06f938 --- /dev/null +++ b/gradle/gradle-dependency-management/gradlew.bat @@ -0,0 +1,89 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto execute + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto execute + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/gradle/gradle-dependency-management/libs/sampleOne.jar b/gradle/gradle-dependency-management/libs/sampleOne.jar new file mode 100644 index 0000000000..b8f564975e Binary files /dev/null and b/gradle/gradle-dependency-management/libs/sampleOne.jar differ diff --git a/gradle/gradle-dependency-management/libs/sampleTwo.jar b/gradle/gradle-dependency-management/libs/sampleTwo.jar new file mode 100644 index 0000000000..b8f564975e Binary files /dev/null and b/gradle/gradle-dependency-management/libs/sampleTwo.jar differ diff --git a/javafx/src/main/java/com/baeldung/listview/ExampleController.java b/javafx/src/main/java/com/baeldung/listview/ExampleController.java new file mode 100644 index 0000000000..c02fa68d2e --- /dev/null +++ b/javafx/src/main/java/com/baeldung/listview/ExampleController.java @@ -0,0 +1,38 @@ +package com.baeldung.listview; + +import com.baeldung.listview.cellfactory.CheckboxCellFactory; +import com.baeldung.listview.cellfactory.PersonCellFactory; +import javafx.collections.FXCollections; +import javafx.collections.ObservableList; +import javafx.fxml.FXML; +import javafx.fxml.Initializable; +import javafx.scene.control.ListView; + +import java.net.URL; +import java.util.ResourceBundle; + +public class ExampleController implements Initializable { + @FXML + private ListView listView; + + @Override + public void initialize(URL location, ResourceBundle resources) { + ObservableList wordsList = FXCollections.observableArrayList(); + wordsList.add(new Person("Isaac", "Newton")); + wordsList.add(new Person("Albert", "Einstein")); + wordsList.add(new Person("Ludwig", "Boltzmann")); + listView.setItems(wordsList); + } + + public void defaultButtonClick() { + listView.setCellFactory(null); + } + + public void cellFactoryButtonClick() { + listView.setCellFactory(new PersonCellFactory()); + } + + public void checkboxCellFactoryButtonClick() { + listView.setCellFactory(new CheckboxCellFactory()); + } +} diff --git a/javafx/src/main/java/com/baeldung/listview/Main.java b/javafx/src/main/java/com/baeldung/listview/Main.java new file mode 100644 index 0000000000..a067971758 --- /dev/null +++ b/javafx/src/main/java/com/baeldung/listview/Main.java @@ -0,0 +1,28 @@ +package com.baeldung.javafx.listview; + +import javafx.application.Application; +import javafx.fxml.FXMLLoader; +import javafx.scene.Parent; +import javafx.scene.Scene; +import javafx.stage.Stage; + +import java.net.URL; + +public class Main extends Application { + + public static void main(String args[]) { + launch(args); + } + + @Override + public void start(Stage primaryStage) throws Exception { + FXMLLoader loader = new FXMLLoader(); + URL xmlUrl = getClass().getResource("/example.fxml"); + loader.setLocation(xmlUrl); + Parent root = loader.load(); + + primaryStage.setTitle("List View Demo"); + primaryStage.setScene(new Scene(root)); + primaryStage.show(); + } +} diff --git a/javafx/src/main/java/com/baeldung/listview/Person.java b/javafx/src/main/java/com/baeldung/listview/Person.java new file mode 100644 index 0000000000..cdc0ab2dc8 --- /dev/null +++ b/javafx/src/main/java/com/baeldung/listview/Person.java @@ -0,0 +1,25 @@ +package com.baeldung.listview; + +public class Person { + + private final String firstName; + private final String lastName; + + public Person(String firstName, String lastName) { + this.firstName = firstName; + this.lastName = lastName; + } + + public String getFirstName() { + return firstName; + } + + public String getLastName() { + return lastName; + } + + @Override + public String toString() { + return firstName + " " + lastName; + } +} diff --git a/javafx/src/main/java/com/baeldung/listview/cellfactory/CheckboxCellFactory.java b/javafx/src/main/java/com/baeldung/listview/cellfactory/CheckboxCellFactory.java new file mode 100644 index 0000000000..522afcb76e --- /dev/null +++ b/javafx/src/main/java/com/baeldung/listview/cellfactory/CheckboxCellFactory.java @@ -0,0 +1,29 @@ +package com.baeldung.listview.cellfactory; + +import com.baeldung.listview.Person; +import javafx.scene.control.CheckBox; +import javafx.scene.control.ListCell; +import javafx.scene.control.ListView; +import javafx.util.Callback; + +public class CheckboxCellFactory implements Callback, ListCell> { + @Override + public ListCell call(ListView param) { + return new ListCell(){ + @Override + public void updateItem(Person person, boolean empty) { + super.updateItem(person, empty); + if (empty) { + setText(null); + setGraphic(null); + } else if (person != null) { + setText(null); + setGraphic(new CheckBox(person.getFirstName() + " " + person.getLastName())); + } else { + setText("null"); + setGraphic(null); + } + } + }; + } +} diff --git a/javafx/src/main/java/com/baeldung/listview/cellfactory/PersonCellFactory.java b/javafx/src/main/java/com/baeldung/listview/cellfactory/PersonCellFactory.java new file mode 100644 index 0000000000..57866b9ead --- /dev/null +++ b/javafx/src/main/java/com/baeldung/listview/cellfactory/PersonCellFactory.java @@ -0,0 +1,23 @@ +package com.baeldung.listview.cellfactory; + +import com.baeldung.listview.Person; +import javafx.scene.control.ListCell; +import javafx.scene.control.ListView; +import javafx.util.Callback; + +public class PersonCellFactory implements Callback, ListCell> { + @Override + public ListCell call(ListView param) { + return new ListCell(){ + @Override + public void updateItem(Person person, boolean empty) { + super.updateItem(person, empty); + if (empty || person == null) { + setText(null); + } else { + setText(person.getFirstName() + " " + person.getLastName()); + } + } + }; + } +} diff --git a/javafx/src/main/resources/example.fxml b/javafx/src/main/resources/example.fxml new file mode 100644 index 0000000000..c68df076d0 --- /dev/null +++ b/javafx/src/main/resources/example.fxml @@ -0,0 +1,14 @@ + + + + + + + + + +