Merge branch 'eugenp:master' into master

This commit is contained in:
Swapan Pramanick 2021-10-30 00:27:31 +02:00 committed by GitHub
commit 2adb9e22a5
2656 changed files with 83160 additions and 23999 deletions

11
.gitignore vendored
View File

@ -89,5 +89,14 @@ testing-modules/report-*.json
libraries-2/*.db
apache-spark/data/output
logs/
libraries-data-io/*.docx
persistence-modules/spring-hibernate-5/com.*
spring-boot-modules/spring-boot-react/frontend/build
spring-boot-modules/spring-boot-react/frontend/node
spring-boot-modules/spring-boot-react/frontend/yarn.lock
spring-boot-modules/spring-boot-properties-3/*.log
# SDKMan
.sdkmanrc
.sdkmanrc

View File

@ -1,8 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>akka-http</artifactId>
<name>akka-http</name>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -44,18 +43,6 @@
</dependency>
</dependencies>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>${exec-maven-plugin.version}</version>
</plugin>
</plugins>
</pluginManagement>
</build>
<properties>
<commons-math3.version>3.6.1</commons-math3.version>
<io.jenetics.version>3.7.0</io.jenetics.version>
@ -63,4 +50,4 @@
<commons-codec.version>1.11</commons-codec.version>
</properties>
</project>
</project>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -49,18 +48,6 @@
</dependency>
</dependencies>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>${exec-maven-plugin.version}</version>
</plugin>
</plugins>
</pluginManagement>
</build>
<reporting>
<plugins>
<plugin>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -54,18 +53,6 @@
</dependency>
</dependencies>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>${exec-maven-plugin.version}</version>
</plugin>
</plugins>
</pluginManagement>
</build>
<reporting>
<plugins>
<plugin>

View File

@ -8,6 +8,9 @@ import java.util.PriorityQueue;
import java.util.Queue;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class RouteFinder<T extends GraphNode> {
private final Graph<T> graph;
private final Scorer<T> nextNodeScorer;
@ -28,11 +31,11 @@ public class RouteFinder<T extends GraphNode> {
openSet.add(start);
while (!openSet.isEmpty()) {
System.out.println("Open Set contains: " + openSet.stream().map(RouteNode::getCurrent).collect(Collectors.toSet()));
log.debug("Open Set contains: " + openSet.stream().map(RouteNode::getCurrent).collect(Collectors.toSet()));
RouteNode<T> next = openSet.poll();
System.out.println("Looking at node: " + next);
log.debug("Looking at node: " + next);
if (next.getCurrent().equals(to)) {
System.out.println("Found our destination!");
log.debug("Found our destination!");
List<T> route = new ArrayList<>();
RouteNode<T> current = next;
@ -41,7 +44,7 @@ public class RouteFinder<T extends GraphNode> {
current = allNodes.get(current.getPrevious());
} while (current != null);
System.out.println("Route: " + route);
log.debug("Route: " + route);
return route;
}
@ -55,7 +58,7 @@ public class RouteFinder<T extends GraphNode> {
nextNode.setRouteScore(newScore);
nextNode.setEstimatedScore(newScore + targetScorer.computeCost(connection, to));
openSet.add(nextNode);
System.out.println("Found a better route to node: " + nextNode);
log.debug("Found a better route to node: " + nextNode);
}
});
}

View File

@ -1,5 +1,7 @@
package com.baeldung.algorithms.astar.underground;
import static org.assertj.core.api.Assertions.assertThat;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@ -10,9 +12,13 @@ import java.util.stream.Stream;
import com.baeldung.algorithms.astar.Graph;
import com.baeldung.algorithms.astar.RouteFinder;
import lombok.extern.slf4j.Slf4j;
import org.junit.Before;
import org.junit.Test;
@Slf4j
public class RouteFinderIntegrationTest {
private Graph<Station> underground;
@ -637,7 +643,8 @@ public class RouteFinderIntegrationTest {
@Test
public void findRoute() {
List<Station> route = routeFinder.findRoute(underground.getNode("74"), underground.getNode("7"));
assertThat(route).size().isPositive();
System.out.println(route.stream().map(Station::getName).collect(Collectors.toList()));
route.stream().map(Station::getName).collect(Collectors.toList()).forEach(station -> log.debug(station));
}
}

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -21,19 +20,16 @@
<version>${org.assertj.core.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
<version>${commons-collections4.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
</dependency>
<dependency>
<groupId>com.squareup.retrofit2</groupId>
<artifactId>retrofit</artifactId>
@ -44,13 +40,11 @@
<artifactId>converter-jackson</artifactId>
<version>${retrofit.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>${commons.lang3.version}</version>
</dependency>
<dependency>
<groupId>pl.pragmatists</groupId>
<artifactId>JUnitParams</artifactId>
@ -74,18 +68,6 @@
</dependency>
</dependencies>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>${exec-maven-plugin.version}</version>
</plugin>
</plugins>
</pluginManagement>
</build>
<properties>
<org.assertj.core.version>3.9.0</org.assertj.core.version>
<commons-collections4.version>4.3</commons-collections4.version>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -34,18 +33,6 @@
</dependency>
</dependencies>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>${exec-maven-plugin.version}</version>
</plugin>
</plugins>
</pluginManagement>
</build>
<properties>
<org.assertj.core.version>3.9.0</org.assertj.core.version>
<guava.version>27.0.1-jre</guava.version>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -38,9 +37,8 @@
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-commons</artifactId>
<version>${junit.platform.version}</version>
<version>${junit-platform.version}</version>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core</artifactId>
@ -49,25 +47,12 @@
</dependency>
</dependencies>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>${exec-maven-plugin.version}</version>
</plugin>
</plugins>
</pluginManagement>
</build>
<properties>
<tradukisto.version>1.0.1</tradukisto.version>
<org.assertj.core.version>3.9.0</org.assertj.core.version>
<commons-codec.version>1.11</commons-codec.version>
<commons-math3.version>3.6.1</commons-math3.version>
<guava.version>28.1-jre</guava.version>
<junit.platform.version>1.6.0</junit.platform.version>
</properties>
</project>

View File

@ -1,10 +1,10 @@
package com.baeldung.algorithms.prim;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import org.junit.Test;
public class PrimUnitTest {
@Test

View File

@ -1,6 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>algorithms-miscellaneous-6</artifactId>
<version>0.0.1-SNAPSHOT</version>
@ -11,9 +12,9 @@
<artifactId>parent-modules</artifactId>
<version>1.0.0-SNAPSHOT</version>
</parent>
<dependencies>
<dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
@ -21,7 +22,7 @@
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-commons</artifactId>
<version>${junit.platform.version}</version>
<version>${junit-platform.version}</version>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
@ -41,12 +42,11 @@
<version>${commons-math3.version}</version>
</dependency>
</dependencies>
<properties>
<guava.version>28.1-jre</guava.version>
<org.assertj.core.version>3.9.0</org.assertj.core.version>
<junit.platform.version>1.6.0</junit.platform.version>
<commons-math3.version>3.6.1</commons-math3.version>
</properties>
</project>
</project>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

View File

@ -12,7 +12,7 @@ public class BinarySearch {
while (low <= high) {
int mid = (low + high) / 2;
int mid = low + ((high - low) / 2);
if (sortedArray[mid] < key) {
low = mid + 1;
@ -28,7 +28,7 @@ public class BinarySearch {
public int runBinarySearchRecursively(int[] sortedArray, int key, int low, int high) {
int middle = (low + high) / 2;
int middle = low + ((high - low) / 2);
if (high < low) {
return -1;
}

View File

@ -29,11 +29,13 @@ public class Graph {
stack.push(start);
while (!stack.isEmpty()) {
int current = stack.pop();
isVisited[current] = true;
visit(current);
for (int dest : adjVertices.get(current)) {
if (!isVisited[dest])
stack.push(dest);
if(!isVisited[current]){
isVisited[current] = true;
visit(current);
for (int dest : adjVertices.get(current)) {
if (!isVisited[dest])
stack.push(dest);
}
}
}
}

View File

@ -8,7 +8,9 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SuffixTree {
private static final Logger LOGGER = LoggerFactory.getLogger(SuffixTree.class);
private static final String WORD_TERMINATION = "$";
private static final int POSITION_UNDEFINED = -1;
private Node root;
@ -23,7 +25,7 @@ public class SuffixTree {
}
public List<String> searchText(String pattern) {
LOGGER.info("Searching for pattern \"{}\"", pattern);
LOGGER.debug("Searching for pattern \"{}\"", pattern);
List<String> result = new ArrayList<>();
List<Node> nodes = getAllNodesInTraversePath(pattern, root, false);
@ -41,11 +43,11 @@ public class SuffixTree {
}
private void addSuffix(String suffix, int position) {
LOGGER.info(">>>>>>>>>>>> Adding new suffix {}", suffix);
LOGGER.debug(">>>>>>>>>>>> Adding new suffix {}", suffix);
List<Node> nodes = getAllNodesInTraversePath(suffix, root, true);
if (nodes.size() == 0) {
addChildNode(root, suffix, position);
LOGGER.info("{}", printTree());
LOGGER.debug("{}", printTree());
} else {
Node lastNode = nodes.remove(nodes.size() - 1);
String newText = suffix;
@ -58,7 +60,7 @@ public class SuffixTree {
newText = newText.substring(existingSuffixUptoLastNode.length());
}
extendNode(lastNode, newText, position);
LOGGER.info("{}", printTree());
LOGGER.debug("{}", printTree());
}
}

View File

@ -2,7 +2,6 @@ package com.baeldung.algorithms.dfs;
import java.util.List;
import com.baeldung.algorithms.dfs.Graph;
import org.junit.Test;
public class GraphUnitTest {

View File

@ -10,7 +10,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class QuadTreeSearchUnitTest {
private static final Logger LOGGER = LoggerFactory.getLogger(QuadTreeSearchUnitTest.class);
private static QuadTree quadTree;
@ -20,41 +20,41 @@ public class QuadTreeSearchUnitTest {
Region area = new Region(0, 0, 400, 400);
quadTree = new QuadTree(area);
float[][] points = new float[][] { { 21, 25 }, { 55, 53 }, { 70, 318 }, { 98, 302 },
float[][] points = new float[][] { { 21, 25 }, { 55, 53 }, { 70, 318 }, { 98, 302 },
{ 49, 229 }, { 135, 229 }, { 224, 292 }, { 206, 321 }, { 197, 258 }, { 245, 238 } };
for (int i = 0; i < points.length; i++) {
Point point = new Point(points[i][0], points[i][1]);
quadTree.addPoint(point);
}
LOGGER.info("\n" + quadTree.printTree(""));
LOGGER.info("==============================================");
LOGGER.debug("\n" + quadTree.printTree(""));
LOGGER.debug("==============================================");
}
@Test
public void givenQuadTree_whenSearchingForRange_thenReturn1MatchingItem() {
Region searchArea = new Region(200, 200, 250, 250);
List<Point> result = quadTree.search(searchArea, null, "");
LOGGER.info(result.toString());
LOGGER.info(quadTree.printSearchTraversePath());
LOGGER.debug(result.toString());
LOGGER.debug(quadTree.printSearchTraversePath());
Assert.assertEquals(1, result.size());
Assert.assertArrayEquals(new float[] { 245, 238 },
Assert.assertArrayEquals(new float[] { 245, 238 },
new float[]{result.get(0).getX(), result.get(0).getY() }, 0);
}
@Test
public void givenQuadTree_whenSearchingForRange_thenReturn2MatchingItems() {
Region searchArea = new Region(0, 0, 100, 100);
List<Point> result = quadTree.search(searchArea, null, "");
LOGGER.info(result.toString());
LOGGER.info(quadTree.printSearchTraversePath());
LOGGER.debug(result.toString());
LOGGER.debug(quadTree.printSearchTraversePath());
Assert.assertEquals(2, result.size());
Assert.assertArrayEquals(new float[] { 21, 25 },
Assert.assertArrayEquals(new float[] { 21, 25 },
new float[]{result.get(0).getX(), result.get(0).getY() }, 0);
Assert.assertArrayEquals(new float[] { 55, 53 },
Assert.assertArrayEquals(new float[] { 55, 53 },
new float[]{result.get(1).getX(), result.get(1).getY() }, 0);
}
}

View File

@ -1,5 +1,4 @@
<project
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -33,7 +32,7 @@
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>${junit-jupiter-api.version}</version>
<version>${junit-jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
@ -44,23 +43,10 @@
</dependency>
</dependencies>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>${exec-maven-plugin.version}</version>
</plugin>
</plugins>
</pluginManagement>
</build>
<properties>
<commons-math3.version>3.6.1</commons-math3.version>
<org.assertj.core.version>3.9.0</org.assertj.core.version>
<commons-codec.version>1.11</commons-codec.version>
<junit-jupiter-api.version>5.3.1</junit-jupiter-api.version>
</properties>
</project>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -34,7 +33,7 @@
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>${junit-jupiter-api.version}</version>
<version>${junit-jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
@ -45,23 +44,10 @@
</dependency>
</dependencies>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>${exec-maven-plugin.version}</version>
</plugin>
</plugins>
</pluginManagement>
</build>
<properties>
<commons-math3.version>3.6.1</commons-math3.version>
<org.assertj.core.version>3.9.0</org.assertj.core.version>
<commons-codec.version>1.11</commons-codec.version>
<junit-jupiter-api.version>5.3.1</junit-jupiter-api.version>
</properties>
</project>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -11,7 +10,6 @@
<groupId>com.baeldung</groupId>
<version>1.0.0-SNAPSHOT</version>
<artifactId>annotations</artifactId>
<relativePath>../</relativePath>
</parent>
<dependencies>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -11,7 +10,6 @@
<groupId>com.baeldung</groupId>
<artifactId>annotations</artifactId>
<version>1.0.0-SNAPSHOT</version>
<relativePath>../</relativePath>
</parent>
<dependencies>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -21,4 +20,4 @@
</dependency>
</dependencies>
</project>
</project>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -38,4 +37,4 @@
</plugins>
</build>
</project>
</project>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -53,4 +52,4 @@
<httpclient.version>4.5.2</httpclient.version>
</properties>
</project>
</project>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -108,4 +107,4 @@
<cargo-maven2-plugin.version>1.6.1</cargo-maven2-plugin.version>
</properties>
</project>
</project>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -30,7 +29,6 @@
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>${exec-maven-plugin.version}</version>
</plugin>
</plugins>
</pluginManagement>
@ -40,4 +38,4 @@
<cxf.version>3.1.8</cxf.version>
</properties>
</project>
</project>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -19,4 +18,4 @@
<module>sse-jaxrs-client</module>
</modules>
</project>
</project>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -59,4 +58,4 @@
<cxf-version>3.2.0</cxf-version>
</properties>
</project>
</project>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -15,7 +14,6 @@
</parent>
<dependencies>
<dependency>
<groupId>javax.ws.rs</groupId>
<artifactId>javax.ws.rs-api</artifactId>
@ -34,7 +32,6 @@
<version>${bind-api.version}</version>
<scope>provided</scope>
</dependency>
</dependencies>
<build>
@ -86,4 +83,4 @@
<bind-api.version>1.0</bind-api.version>
</properties>
</project>
</project>

19
apache-kafka/README.md Normal file
View File

@ -0,0 +1,19 @@
## Apache Kafka
This module contains articles about Apache Kafka.
### Relevant articles
- [Kafka Streams vs. Kafka Consumer](https://www.baeldung.com/java-kafka-streams-vs-kafka-consumer)
- [Kafka Topic Creation Using Java](https://www.baeldung.com/kafka-topic-creation)
- [Using Kafka MockConsumer](https://www.baeldung.com/kafka-mockconsumer)
- [Using Kafka MockProducer](https://www.baeldung.com/kafka-mockproducer)
- [Introduction to KafkaStreams in Java](https://www.baeldung.com/java-kafka-streams)
- [Introduction to Kafka Connectors](https://www.baeldung.com/kafka-connectors-guide)
- [Kafka Connect Example with MQTT and MongoDB](https://www.baeldung.com/kafka-connect-mqtt-mongodb)
- [Building a Data Pipeline with Flink and Kafka](https://www.baeldung.com/kafka-flink-data-pipeline)
- [Exactly Once Processing in Kafka with Java](https://www.baeldung.com/kafka-exactly-once)
- [Custom Serializers in Apache Kafka](https://www.baeldung.com/kafka-custom-serializer)
##### Building the project
You can build the project from the command line using: *mvn clean install*, or in an IDE.

View File

@ -0,0 +1 @@
log4j.rootLogger=INFO, stdout

187
apache-kafka/pom.xml Normal file
View File

@ -0,0 +1,187 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>apache-kafka</artifactId>
<name>apache-kafka</name>
<parent>
<groupId>com.baeldung</groupId>
<artifactId>parent-modules</artifactId>
<version>1.0.0-SNAPSHOT</version>
</parent>
<dependencies>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>${kafka.version}</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-streams</artifactId>
<version>${kafka.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${org.slf4j.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${org.slf4j.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka-0.11_2.11</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_2.11</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-core</artifactId>
<version>${flink.version}</version>
<exclusions>
<exclusion>
<artifactId>commons-logging</artifactId>
<groupId>commons-logging</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<version>${flink.version}</version>
<exclusions>
<exclusion>
<artifactId>commons-logging</artifactId>
<groupId>commons-logging</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-test-utils_2.11</artifactId>
<version>${flink.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
</dependency>
<dependency>
<groupId>org.awaitility</groupId>
<artifactId>awaitility</artifactId>
<version>${awaitility.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.awaitility</groupId>
<artifactId>awaitility-proxy</artifactId>
<version>${awaitility.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-jsr310</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core</artifactId>
<version>${assertj.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>kafka</artifactId>
<version>${testcontainers-kafka.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>junit-jupiter</artifactId>
<version>${testcontainers-jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
<version>${org.apache.spark.spark-core.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.11</artifactId>
<version>${org.apache.spark.spark-core.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-graphx_2.11</artifactId>
<version>${org.apache.spark.spark-core.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.11</artifactId>
<version>${org.apache.spark.spark-core.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-mllib_2.11</artifactId>
<version>${org.apache.spark.spark-core.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka-0-10_2.11</artifactId>
<version>${org.apache.spark.spark-core.version}</version>
</dependency>
<dependency>
<groupId>com.datastax.spark</groupId>
<artifactId>spark-cassandra-connector_2.11</artifactId>
<version>${com.datastax.spark.spark-cassandra-connector.version}</version>
</dependency>
<dependency>
<groupId>com.datastax.spark</groupId>
<artifactId>spark-cassandra-connector-java_2.11</artifactId>
<version>${com.datastax.spark.spark-cassandra-connector-java.version}</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>${lombok.version}</version>
<scope>provided</scope>
</dependency>
</dependencies>
<properties>
<assertj.version>3.6.2</assertj.version>
<kafka.version>2.8.0</kafka.version>
<testcontainers-kafka.version>1.15.3</testcontainers-kafka.version>
<testcontainers-jupiter.version>1.15.3</testcontainers-jupiter.version>
<flink.version>1.5.0</flink.version>
<awaitility.version>3.0.0</awaitility.version>
<guava.version>29.0-jre</guava.version>
<org.apache.spark.spark-core.version>2.4.8</org.apache.spark.spark-core.version>
<graphframes.version>0.8.1-spark3.0-s_2.12</graphframes.version>
<com.datastax.spark.spark-cassandra-connector.version>2.5.2</com.datastax.spark.spark-cassandra-connector.version>
<com.datastax.spark.spark-cassandra-connector-java.version>1.6.0-M1</com.datastax.spark.spark-cassandra-connector-java.version>
<lombok.version>1.18.20</lombok.version>
</properties>
</project>

View File

@ -0,0 +1,70 @@
package com.baeldung.flink;
import com.baeldung.flink.model.Backup;
import com.baeldung.flink.model.InputMessage;
import com.baeldung.flink.operator.BackupAggregator;
import com.baeldung.flink.operator.InputMessageTimestampAssigner;
import com.baeldung.flink.operator.WordsCapitalizer;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer011;
import static com.baeldung.flink.connector.Consumers.*;
import static com.baeldung.flink.connector.Producers.*;
public class FlinkDataPipeline {
public static void capitalize() throws Exception {
String inputTopic = "flink_input";
String outputTopic = "flink_output";
String consumerGroup = "baeldung";
String address = "localhost:9092";
StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
FlinkKafkaConsumer011<String> flinkKafkaConsumer = createStringConsumerForTopic(inputTopic, address, consumerGroup);
flinkKafkaConsumer.setStartFromEarliest();
DataStream<String> stringInputStream = environment.addSource(flinkKafkaConsumer);
FlinkKafkaProducer011<String> flinkKafkaProducer = createStringProducer(outputTopic, address);
stringInputStream.map(new WordsCapitalizer())
.addSink(flinkKafkaProducer);
environment.execute();
}
public static void createBackup() throws Exception {
String inputTopic = "flink_input";
String outputTopic = "flink_output";
String consumerGroup = "baeldung";
String kafkaAddress = "localhost:9092";
StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
environment.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
FlinkKafkaConsumer011<InputMessage> flinkKafkaConsumer = createInputMessageConsumer(inputTopic, kafkaAddress, consumerGroup);
flinkKafkaConsumer.setStartFromEarliest();
flinkKafkaConsumer.assignTimestampsAndWatermarks(new InputMessageTimestampAssigner());
FlinkKafkaProducer011<Backup> flinkKafkaProducer = createBackupProducer(outputTopic, kafkaAddress);
DataStream<InputMessage> inputMessagesStream = environment.addSource(flinkKafkaConsumer);
inputMessagesStream.timeWindowAll(Time.hours(24))
.aggregate(new BackupAggregator())
.addSink(flinkKafkaProducer);
environment.execute();
}
public static void main(String[] args) throws Exception {
createBackup();
}
}

View File

@ -9,23 +9,20 @@ import java.util.Properties;
public class Consumers {
public static FlinkKafkaConsumer011<String> createStringConsumerForTopic(
String topic, String kafkaAddress, String kafkaGroup ) {
Properties props = new Properties();
props.setProperty("bootstrap.servers", kafkaAddress);
props.setProperty("group.id",kafkaGroup);
FlinkKafkaConsumer011<String> consumer =
new FlinkKafkaConsumer011<>(topic, new SimpleStringSchema(),props);
public static FlinkKafkaConsumer011<String> createStringConsumerForTopic(String topic, String kafkaAddress, String kafkaGroup) {
Properties props = new Properties();
props.setProperty("bootstrap.servers", kafkaAddress);
props.setProperty("group.id", kafkaGroup);
FlinkKafkaConsumer011<String> consumer = new FlinkKafkaConsumer011<>(topic, new SimpleStringSchema(), props);
return consumer;
}
return consumer;
}
public static FlinkKafkaConsumer011<InputMessage> createInputMessageConsumer(String topic, String kafkaAddress, String kafkaGroup ) {
public static FlinkKafkaConsumer011<InputMessage> createInputMessageConsumer(String topic, String kafkaAddress, String kafkaGroup) {
Properties properties = new Properties();
properties.setProperty("bootstrap.servers", kafkaAddress);
properties.setProperty("group.id",kafkaGroup);
FlinkKafkaConsumer011<InputMessage> consumer = new FlinkKafkaConsumer011<InputMessage>(
topic, new InputMessageDeserializationSchema(),properties);
properties.setProperty("group.id", kafkaGroup);
FlinkKafkaConsumer011<InputMessage> consumer = new FlinkKafkaConsumer011<InputMessage>(topic, new InputMessageDeserializationSchema(), properties);
return consumer;
}

View File

@ -18,6 +18,7 @@ public class InputMessage {
public String getSender() {
return sender;
}
public void setSender(String sender) {
this.sender = sender;
}
@ -55,12 +56,14 @@ public class InputMessage {
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
InputMessage message1 = (InputMessage) o;
return Objects.equal(sender, message1.sender) &&
Objects.equal(recipient, message1.recipient) &&
Objects.equal(sentAt, message1.sentAt) &&
return Objects.equal(sender, message1.sender) &&
Objects.equal(recipient, message1.recipient) &&
Objects.equal(sentAt, message1.sentAt) &&
Objects.equal(message, message1.message);
}

View File

@ -0,0 +1,34 @@
package com.baeldung.flink.operator;
import com.baeldung.flink.model.Backup;
import com.baeldung.flink.model.InputMessage;
import org.apache.flink.api.common.functions.AggregateFunction;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.List;
public class BackupAggregator implements AggregateFunction<InputMessage, List<InputMessage>, Backup> {
@Override
public List<InputMessage> createAccumulator() {
return new ArrayList<>();
}
@Override
public List<InputMessage> add(InputMessage inputMessage, List<InputMessage> inputMessages) {
inputMessages.add(inputMessage);
return inputMessages;
}
@Override
public Backup getResult(List<InputMessage> inputMessages) {
Backup backup = new Backup(inputMessages, LocalDateTime.now());
return backup;
}
@Override
public List<InputMessage> merge(List<InputMessage> inputMessages, List<InputMessage> acc1) {
inputMessages.addAll(acc1);
return inputMessages;
}
}

View File

@ -12,7 +12,9 @@ public class InputMessageTimestampAssigner implements AssignerWithPunctuatedWate
@Override
public long extractTimestamp(InputMessage element, long previousElementTimestamp) {
ZoneId zoneId = ZoneId.systemDefault();
return element.getSentAt().atZone(zoneId).toEpochSecond() * 1000;
return element.getSentAt()
.atZone(zoneId)
.toEpochSecond() * 1000;
}
@Nullable

View File

@ -9,8 +9,7 @@ import org.apache.flink.api.common.serialization.SerializationSchema;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class BackupSerializationSchema
implements SerializationSchema<Backup> {
public class BackupSerializationSchema implements SerializationSchema<Backup> {
static ObjectMapper objectMapper = new ObjectMapper().registerModule(new JavaTimeModule());
@ -18,7 +17,7 @@ public class BackupSerializationSchema
@Override
public byte[] serialize(Backup backupMessage) {
if(objectMapper == null) {
if (objectMapper == null) {
objectMapper.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY);
objectMapper = new ObjectMapper().registerModule(new JavaTimeModule());
}

View File

@ -8,12 +8,10 @@ import org.apache.flink.api.common.typeinfo.TypeInformation;
import java.io.IOException;
public class InputMessageDeserializationSchema implements
DeserializationSchema<InputMessage> {
public class InputMessageDeserializationSchema implements DeserializationSchema<InputMessage> {
static ObjectMapper objectMapper = new ObjectMapper().registerModule(new JavaTimeModule());
@Override
public InputMessage deserialize(byte[] bytes) throws IOException {

View File

@ -0,0 +1,83 @@
package com.baeldung.kafka.admin;
import org.apache.kafka.clients.admin.Admin;
import org.apache.kafka.clients.admin.AdminClientConfig;
import org.apache.kafka.clients.admin.CreateTopicsOptions;
import org.apache.kafka.clients.admin.CreateTopicsResult;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.common.KafkaFuture;
import org.apache.kafka.common.config.TopicConfig;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
public class KafkaTopicApplication {
private final Properties properties;
public KafkaTopicApplication(Properties properties) {
this.properties = properties;
}
public void createTopic(String topicName) throws Exception {
try (Admin admin = Admin.create(properties)) {
int partitions = 1;
short replicationFactor = 1;
NewTopic newTopic = new NewTopic(topicName, partitions, replicationFactor);
CreateTopicsResult result = admin.createTopics(Collections.singleton(newTopic));
// get the async result for the new topic creation
KafkaFuture<Void> future = result.values()
.get(topicName);
// call get() to block until topic creation has completed or failed
future.get();
}
}
public void createTopicWithOptions(String topicName) throws Exception {
Properties props = new Properties();
props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
try (Admin admin = Admin.create(props)) {
int partitions = 1;
short replicationFactor = 1;
NewTopic newTopic = new NewTopic(topicName, partitions, replicationFactor);
CreateTopicsOptions topicOptions = new CreateTopicsOptions().validateOnly(true)
.retryOnQuotaViolation(true);
CreateTopicsResult result = admin.createTopics(Collections.singleton(newTopic), topicOptions);
KafkaFuture<Void> future = result.values()
.get(topicName);
future.get();
}
}
public void createCompactedTopicWithCompression(String topicName) throws Exception {
Properties props = new Properties();
props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
try (Admin admin = Admin.create(props)) {
int partitions = 1;
short replicationFactor = 1;
// Create a compacted topic with 'lz4' compression codec
Map<String, String> newTopicConfig = new HashMap<>();
newTopicConfig.put(TopicConfig.CLEANUP_POLICY_CONFIG, TopicConfig.CLEANUP_POLICY_COMPACT);
newTopicConfig.put(TopicConfig.COMPRESSION_TYPE_CONFIG, "lz4");
NewTopic newTopic = new NewTopic(topicName, partitions, replicationFactor).configs(newTopicConfig);
CreateTopicsResult result = admin.createTopics(Collections.singleton(newTopic));
KafkaFuture<Void> future = result.values()
.get(topicName);
future.get();
}
}
}

View File

@ -19,9 +19,7 @@ public class CountryPopulationConsumer {
private java.util.function.Consumer<Throwable> exceptionConsumer;
private java.util.function.Consumer<CountryPopulation> countryPopulationConsumer;
public CountryPopulationConsumer(
Consumer<String, Integer> consumer, java.util.function.Consumer<Throwable> exceptionConsumer,
java.util.function.Consumer<CountryPopulation> countryPopulationConsumer) {
public CountryPopulationConsumer(Consumer<String, Integer> consumer, java.util.function.Consumer<Throwable> exceptionConsumer, java.util.function.Consumer<CountryPopulation> countryPopulationConsumer) {
this.consumer = consumer;
this.exceptionConsumer = exceptionConsumer;
this.countryPopulationConsumer = countryPopulationConsumer;

View File

@ -0,0 +1,15 @@
package com.baeldung.kafka.dto;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
@Builder
public class MessageDto {
private String message;
private String version;
}

View File

@ -1,4 +1,4 @@
package com.baeldung.kafka;
package com.baeldung.kafka.exactlyonce;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
@ -24,16 +24,16 @@ public class TransactionalMessageProducer {
producer.initTransactions();
try{
try {
producer.beginTransaction();
Stream.of(DATA_MESSAGE_1, DATA_MESSAGE_2).forEach(s -> producer.send(
new ProducerRecord<String, String>("input", null, s)));
Stream.of(DATA_MESSAGE_1, DATA_MESSAGE_2)
.forEach(s -> producer.send(new ProducerRecord<String, String>("input", null, s)));
producer.commitTransaction();
}catch (KafkaException e){
} catch (KafkaException e) {
producer.abortTransaction();

View File

@ -1,4 +1,4 @@
package com.baeldung.kafka;
package com.baeldung.kafka.exactlyonce;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
@ -43,10 +43,11 @@ public class TransactionalWordCount {
ConsumerRecords<String, String> records = consumer.poll(ofSeconds(60));
Map<String, Integer> wordCountMap = records.records(new TopicPartition(INPUT_TOPIC, 0))
.stream()
.flatMap(record -> Stream.of(record.value().split(" ")))
.map(word -> Tuple.of(word, 1))
.collect(Collectors.toMap(tuple -> tuple.getKey(), t1 -> t1.getValue(), (v1, v2) -> v1 + v2));
.stream()
.flatMap(record -> Stream.of(record.value()
.split(" ")))
.map(word -> Tuple.of(word, 1))
.collect(Collectors.toMap(tuple -> tuple.getKey(), t1 -> t1.getValue(), (v1, v2) -> v1 + v2));
producer.beginTransaction();
@ -56,7 +57,8 @@ public class TransactionalWordCount {
for (TopicPartition partition : records.partitions()) {
List<ConsumerRecord<String, String>> partitionedRecords = records.records(partition);
long offset = partitionedRecords.get(partitionedRecords.size() - 1).offset();
long offset = partitionedRecords.get(partitionedRecords.size() - 1)
.offset();
offsetsToCommit.put(partition, new OffsetAndMetadata(offset + 1));
}
@ -72,7 +74,6 @@ public class TransactionalWordCount {
}
}
private static KafkaConsumer<String, String> createKafkaConsumer() {

View File

@ -1,4 +1,4 @@
package com.baeldung.kafka;
package com.baeldung.kafka.exactlyonce;
public class Tuple {
@ -10,8 +10,8 @@ public class Tuple {
this.value = value;
}
public static Tuple of(String key, Integer value){
return new Tuple(key,value);
public static Tuple of(String key, Integer value) {
return new Tuple(key, value);
}
public String getKey() {

View File

@ -15,8 +15,7 @@ public class KafkaProducer {
}
public Future<RecordMetadata> send(String key, String value) {
ProducerRecord record = new ProducerRecord("topic_sports_news",
key, value);
ProducerRecord record = new ProducerRecord("topic_sports_news", key, value);
return producer.send(record);
}
@ -36,5 +35,4 @@ public class KafkaProducer {
producer.commitTransaction();
}
}

View File

@ -0,0 +1,35 @@
package com.baeldung.kafka.serdes;
import com.baeldung.kafka.dto.MessageDto;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.kafka.common.errors.SerializationException;
import org.apache.kafka.common.serialization.Deserializer;
import java.util.Map;
public class CustomDeserializer implements Deserializer<MessageDto> {
private ObjectMapper objectMapper = new ObjectMapper();
@Override
public void configure(Map<String, ?> configs, boolean isKey) {
}
@Override
public MessageDto deserialize(String topic, byte[] data) {
try {
if (data == null){
System.out.println("Null received at deserializing");
return null;
}
System.out.println("Deserializing...");
return objectMapper.readValue(new String(data, "UTF-8"), MessageDto.class);
} catch (Exception e) {
throw new SerializationException("Error when deserializing byte[] to MessageDto");
}
}
@Override
public void close() {
}
}

View File

@ -0,0 +1,34 @@
package com.baeldung.kafka.serdes;
import com.baeldung.kafka.dto.MessageDto;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.kafka.common.errors.SerializationException;
import org.apache.kafka.common.serialization.Serializer;
import java.util.Map;
public class CustomSerializer implements Serializer<MessageDto> {
private final ObjectMapper objectMapper = new ObjectMapper();
@Override
public void configure(Map<String, ?> configs, boolean isKey) {
}
@Override
public byte[] serialize(String topic, MessageDto data) {
try {
if (data == null){
System.out.println("Null received at serializing");
return null;
}
System.out.println("Serializing...");
return objectMapper.writeValueAsBytes(data);
} catch (Exception e) {
throw new SerializationException("Error when serializing MessageDto to byte[]");
}
}
@Override
public void close() {
}
}

View File

@ -0,0 +1,42 @@
package com.baeldung.kafka.admin;
import org.apache.kafka.clients.admin.AdminClientConfig;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.testcontainers.containers.KafkaContainer;
import org.testcontainers.junit.jupiter.Container;
import org.testcontainers.junit.jupiter.Testcontainers;
import org.testcontainers.utility.DockerImageName;
import java.util.Properties;
import static org.assertj.core.api.Assertions.assertThat;
// This live test needs a running Docker instance so that a kafka container can be created
@Testcontainers
class KafkaTopicApplicationLiveTest {
@Container
private static final KafkaContainer KAFKA_CONTAINER = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:5.4.3"));
private KafkaTopicApplication kafkaTopicApplication;
@BeforeEach
void setup() {
Properties properties = new Properties();
properties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CONTAINER.getBootstrapServers());
kafkaTopicApplication = new KafkaTopicApplication(properties);
}
@Test
void givenTopicName_whenCreateNewTopic_thenTopicIsCreated() throws Exception {
kafkaTopicApplication.createTopic("test-topic");
String topicCommand = "/usr/bin/kafka-topics --bootstrap-server=localhost:9092 --list";
String stdout = KAFKA_CONTAINER.execInContainer("/bin/sh", "-c", topicCommand)
.getStdout();
assertThat(stdout).contains("test-topic");
}
}

View File

@ -0,0 +1,94 @@
package com.baeldung.kafka.serdes;
import com.baeldung.kafka.dto.MessageDto;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.testcontainers.containers.KafkaContainer;
import org.testcontainers.utility.DockerImageName;
import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicReference;
import static org.junit.Assert.assertEquals;
// This live test needs a Docker instance running so that kafka container can be created
public class KafkaSerDesLiveTest {
private static final String CONSUMER_APP_ID = "consumer_id";
private static final String CONSUMER_GROUP_ID = "group_id";
@ClassRule
public static KafkaContainer kafka = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:5.4.3"));
private final String TOPIC = "mytopic";
private static KafkaConsumer<String, MessageDto> createKafkaConsumer() {
Properties props = new Properties();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers());
props.put(ConsumerConfig.CLIENT_ID_CONFIG, CONSUMER_APP_ID);
props.put(ConsumerConfig.GROUP_ID_CONFIG, CONSUMER_GROUP_ID);
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "com.baeldung.kafka.serdes.CustomDeserializer");
return new KafkaConsumer<>(props);
}
private static KafkaProducer<String, MessageDto> createKafkaProducer() {
Properties props = new Properties();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers());
props.put(ProducerConfig.CLIENT_ID_CONFIG, CONSUMER_APP_ID);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "com.baeldung.kafka.serdes.CustomSerializer");
return new KafkaProducer(props);
}
@Before
public void setUp() {
}
@Test
public void givenKafkaClientShouldSerializeAndDeserialize() throws InterruptedException {
MessageDto msgProd = MessageDto.builder().message("test").version("1.0").build();
KafkaProducer<String, MessageDto> producer = createKafkaProducer();
producer.send(new ProducerRecord<String, MessageDto>(TOPIC, "1", msgProd));
System.out.println("Message sent " + msgProd);
producer.close();
Thread.sleep(2000);
AtomicReference<MessageDto> msgCons = new AtomicReference<>();
KafkaConsumer<String, MessageDto> consumer = createKafkaConsumer();
consumer.subscribe(Arrays.asList(TOPIC));
ConsumerRecords<String, MessageDto> records = consumer.poll(Duration.ofSeconds(1));
records.forEach(record -> {
msgCons.set(record.value());
System.out.println("Message received " + record.value());
});
consumer.close();
assertEquals(msgProd, msgCons.get());
}
}

View File

@ -0,0 +1,281 @@
package com.baeldung.kafka.streamsvsconsumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StoreQueryParameters;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.kstream.Grouped;
import org.apache.kafka.streams.kstream.JoinWindows;
import org.apache.kafka.streams.kstream.KGroupedStream;
import org.apache.kafka.streams.kstream.KGroupedTable;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KTable;
import org.apache.kafka.streams.kstream.Materialized;
import org.apache.kafka.streams.kstream.Produced;
import org.apache.kafka.streams.kstream.TimeWindows;
import org.apache.kafka.streams.state.KeyValueIterator;
import org.apache.kafka.streams.state.KeyValueStore;
import org.apache.kafka.streams.state.QueryableStoreTypes;
import org.apache.kafka.streams.state.ReadOnlyKeyValueStore;
import org.apache.kafka.streams.state.StoreBuilder;
import org.apache.kafka.streams.state.Stores;
import org.apache.kafka.streams.state.WindowStore;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.testcontainers.containers.KafkaContainer;
import org.testcontainers.utility.DockerImageName;
import java.time.Duration;
import java.util.Arrays;
import java.util.Locale;
import java.util.Properties;
import static org.apache.kafka.clients.consumer.ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG;
import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG;
import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG;
// This live test needs a Docker instance running so that kafka container can be created
public class KafkaStreamsLiveTest {
private final String LEFT_TOPIC = "left-stream-topic";
private final String RIGHT_TOPIC = "right-stream-topic";
private final String LEFT_RIGHT_TOPIC = "left-right-stream-topic";
private KafkaProducer<String, String> producer = createKafkaProducer();
private Properties streamsConfiguration = new Properties();
static final String TEXT_LINES_TOPIC = "TextLinesTopic";
private final String TEXT_EXAMPLE_1 = "test test and test";
private final String TEXT_EXAMPLE_2 = "test filter filter this sentence";
@ClassRule
public static KafkaContainer kafka = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:5.4.3"));
@Before
public void setUp() {
streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers());
streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 1000);
streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
}
@Test
public void shouldTestKafkaTableLatestWord() throws InterruptedException {
String inputTopic = "topicTable";
final StreamsBuilder builder = new StreamsBuilder();
KTable<String, String> textLinesTable = builder.table(inputTopic,
Consumed.with(Serdes.String(), Serdes.String()));
textLinesTable.toStream().foreach((word, count) -> System.out.println("Latest word: " + word + " -> " + count));
final Topology topology = builder.build();
streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "latest-word-id");
KafkaStreams streams = new KafkaStreams(topology, streamsConfiguration);
streams.cleanUp();
streams.start();
producer.send(new ProducerRecord<String, String>(inputTopic, "1", TEXT_EXAMPLE_1));
producer.send(new ProducerRecord<String, String>(inputTopic, "2", TEXT_EXAMPLE_2));
Thread.sleep(2000);
streams.close();
}
@Test
public void shouldTestWordCountKafkaStreams() throws InterruptedException {
String wordCountTopic = "wordCountTopic";
final StreamsBuilder builder = new StreamsBuilder();
KStream<String, String> textLines = builder.stream(wordCountTopic,
Consumed.with(Serdes.String(), Serdes.String()));
KTable<String, Long> wordCounts = textLines
.flatMapValues(value -> Arrays.asList(value.toLowerCase(Locale.ROOT)
.split("\\W+")))
.groupBy((key, word) -> word)
.count(Materialized.<String, Long, KeyValueStore<Bytes, byte[]>> as("counts-store"));
wordCounts.toStream().foreach((word, count) -> System.out.println("Word: " + word + " -> " + count));
wordCounts.toStream().to("outputTopic",
Produced.with(Serdes.String(), Serdes.Long()));
streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-stream-table-id");
final Topology topology = builder.build();
KafkaStreams streams = new KafkaStreams(topology, streamsConfiguration);
streams.cleanUp();
streams.start();
producer.send(new ProducerRecord<String, String>(wordCountTopic, "1", TEXT_EXAMPLE_1));
producer.send(new ProducerRecord<String, String>(wordCountTopic, "2", TEXT_EXAMPLE_2));
Thread.sleep(2000);
streams.close();
}
// Filter, map
@Test
public void shouldTestStatelessTransformations() throws InterruptedException {
String wordCountTopic = "wordCountTopic";
//when
final StreamsBuilder builder = new StreamsBuilder();
KStream<String, String> textLines = builder.stream(wordCountTopic,
Consumed.with(Serdes.String(), Serdes.String()));
final KStream<String, String> textLinesUpperCase =
textLines
.map((key, value) -> KeyValue.pair(value, value.toUpperCase()))
.filter((key, value) -> value.contains("FILTER"));
KTable<String, Long> wordCounts = textLinesUpperCase
.flatMapValues(value -> Arrays.asList(value.split("\\W+")))
.groupBy((key, word) -> word)
.count(Materialized.<String, Long, KeyValueStore<Bytes, byte[]>> as("counts-store"));
wordCounts.toStream().foreach((word, count) -> System.out.println("Word: " + word + " -> " + count));
streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-filter-map-id");
final Topology topology = builder.build();
KafkaStreams streams = new KafkaStreams(topology, streamsConfiguration);
streams.cleanUp();
streams.start();
producer.send(new ProducerRecord<String, String>(wordCountTopic, "1", TEXT_EXAMPLE_1));
producer.send(new ProducerRecord<String, String>(wordCountTopic, "2", TEXT_EXAMPLE_2));
Thread.sleep(2000);
streams.close();
}
@Test
public void shouldTestAggregationStatefulTransformations() throws InterruptedException {
String aggregationTopic = "aggregationTopic";
final StreamsBuilder builder = new StreamsBuilder();
final KStream<byte[], String> input = builder.stream(aggregationTopic,
Consumed.with(Serdes.ByteArray(), Serdes.String()));
final KTable<String, Long> aggregated = input
.groupBy((key, value) -> (value != null && value.length() > 0) ? value.substring(0, 2).toLowerCase() : "",
Grouped.with(Serdes.String(), Serdes.String()))
.aggregate(() -> 0L, (aggKey, newValue, aggValue) -> aggValue + newValue.length(),
Materialized.with(Serdes.String(), Serdes.Long()));
aggregated.toStream().foreach((word, count) -> System.out.println("Word: " + word + " -> " + count));
streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "aggregation-id");
final Topology topology = builder.build();
KafkaStreams streams = new KafkaStreams(topology, streamsConfiguration);
streams.cleanUp();
streams.start();
producer.send(new ProducerRecord<String, String>(aggregationTopic, "1", "one"));
producer.send(new ProducerRecord<String, String>(aggregationTopic, "2", "two"));
producer.send(new ProducerRecord<String, String>(aggregationTopic, "3", "three"));
producer.send(new ProducerRecord<String, String>(aggregationTopic, "4", "four"));
producer.send(new ProducerRecord<String, String>(aggregationTopic, "5", "five"));
Thread.sleep(5000);
streams.close();
}
@Test
public void shouldTestWindowingJoinStatefulTransformations() throws InterruptedException {
final StreamsBuilder builder = new StreamsBuilder();
KStream<String, String> leftSource = builder.stream(LEFT_TOPIC);
KStream<String, String> rightSource = builder.stream(RIGHT_TOPIC);
KStream<String, String> leftRightSource = leftSource.outerJoin(rightSource,
(leftValue, rightValue) -> "left=" + leftValue + ", right=" + rightValue,
JoinWindows.of(Duration.ofSeconds(5)))
.groupByKey()
.reduce(((key, lastValue) -> lastValue))
.toStream();
leftRightSource.foreach((key, value) -> System.out.println("(key= " + key + ") -> (" + value + ")"));
final Topology topology = builder.build();
streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "windowing-join-id");
KafkaStreams streams = new KafkaStreams(topology, streamsConfiguration);
streams.cleanUp();
streams.start();
producer.send(new ProducerRecord<String, String>(LEFT_TOPIC, "1", "left"));
producer.send(new ProducerRecord<String, String>(RIGHT_TOPIC, "2", "right"));
Thread.sleep(2000);
streams.close();
}
@Test
public void shouldTestWordCountWithInteractiveQueries() throws InterruptedException {
final Serde<String> stringSerde = Serdes.String();
final StreamsBuilder builder = new StreamsBuilder();
final KStream<String, String>
textLines = builder.stream(TEXT_LINES_TOPIC, Consumed.with(Serdes.String(), Serdes.String()));
final KGroupedStream<String, String> groupedByWord = textLines
.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
.groupBy((key, word) -> word, Grouped.with(stringSerde, stringSerde));
groupedByWord.count(Materialized.<String, Long, KeyValueStore<Bytes, byte[]>>as("WordCountsStore")
.withValueSerde(Serdes.Long()));
streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-interactive-queries");
final KafkaStreams streams = new KafkaStreams(builder.build(), streamsConfiguration);
streams.cleanUp();
streams.start();
producer.send(new ProducerRecord<String, String>(TEXT_LINES_TOPIC, "1", TEXT_EXAMPLE_1));
producer.send(new ProducerRecord<String, String>(TEXT_LINES_TOPIC, "2", TEXT_EXAMPLE_2));
Thread.sleep(2000);
ReadOnlyKeyValueStore<String, Long> keyValueStore =
streams.store(StoreQueryParameters.fromNameAndType(
"WordCountsStore", QueryableStoreTypes.keyValueStore()));
KeyValueIterator<String, Long> range = keyValueStore.all();
while (range.hasNext()) {
KeyValue<String, Long> next = range.next();
System.out.println("Count for " + next.key + ": " + next.value);
}
streams.close();
}
private static KafkaProducer<String, String> createKafkaProducer() {
Properties props = new Properties();
props.put(BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers());
props.put(KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
props.put(VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
return new KafkaProducer(props);
}
}

View File

@ -1,7 +1,14 @@
package com.baeldung.kafkastreams;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Properties;
import java.util.regex.Pattern;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
@ -10,16 +17,12 @@ import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KTable;
import org.apache.kafka.streams.kstream.Produced;
import org.apache.kafka.test.TestUtils;
import org.junit.Ignore;
import org.junit.Test;
import java.util.Arrays;
import java.util.Properties;
import java.util.regex.Pattern;
public class KafkaStreamsLiveTest {
private String bootstrapServers = "localhost:9092";
private Path stateDirectory;
@Test
@Ignore("it needs to have kafka broker running on local")
@ -30,28 +33,43 @@ public class KafkaStreamsLiveTest {
Properties streamsConfiguration = new Properties();
streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-live-test");
streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String()
.getClass()
.getName());
streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String()
.getClass()
.getName());
streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 1000);
streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
// Use a temporary directory for storing state, which will be automatically removed after the test.
streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getAbsolutePath());
try {
this.stateDirectory = Files.createTempDirectory("kafka-streams");
streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, this.stateDirectory.toAbsolutePath()
.toString());
} catch (final IOException e) {
throw new UncheckedIOException("Cannot create temporary directory", e);
}
// when
StreamsBuilder builder = new StreamsBuilder();
final StreamsBuilder builder = new StreamsBuilder();
KStream<String, String> textLines = builder.stream(inputTopic);
Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS);
KTable<String, Long> wordCounts = textLines.flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase()))).groupBy((key, word) -> word).count();
KTable<String, Long> wordCounts = textLines.flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase())))
.groupBy((key, word) -> word)
.count();
textLines.foreach((word, count) -> System.out.println("word: " + word + " -> " + count));
wordCounts.toStream()
.foreach((word, count) -> System.out.println("word: " + word + " -> " + count));
String outputTopic = "outputTopic";
final Serde<String> stringSerde = Serdes.String();
final Serde<String> longSerde = Serdes.String();
textLines.to(outputTopic, Produced.with(stringSerde,longSerde));
KafkaStreams streams = new KafkaStreams(new Topology(), streamsConfiguration);
wordCounts.toStream()
.to(outputTopic, Produced.with(Serdes.String(), Serdes.Long()));
final Topology topology = builder.build();
KafkaStreams streams = new KafkaStreams(topology, streamsConfiguration);
streams.start();
// then

View File

@ -1,5 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>apache-libraries</artifactId>
@ -29,7 +30,6 @@
<artifactId>avro-maven-plugin</artifactId>
<version>${avro.version}</version>
</dependency>
<!-- beam -->
<dependency>
<groupId>org.apache.beam</groupId>
@ -43,7 +43,6 @@
<version>${beam.version}</version>
<scope>runtime</scope>
</dependency>
<!-- bval -->
<dependency>
<groupId>org.apache.bval</groupId>
@ -60,7 +59,6 @@
<artifactId>bval-extras</artifactId>
<version>${bval.version}</version>
</dependency>
<!-- meecrowave -->
<dependency>
<groupId>org.apache.meecrowave</groupId>
@ -84,14 +82,12 @@
<version>${meecrowave-junit.version}</version>
<scope>test</scope>
</dependency>
<!-- opennlp -->
<dependency>
<groupId>org.apache.opennlp</groupId>
<artifactId>opennlp-tools</artifactId>
<version>${opennlp.opennlp-tools.version}</version>
</dependency>
<!-- pulsar -->
<dependency>
<groupId>org.apache.pulsar</groupId>
@ -99,14 +95,12 @@
<version>${pulsar-client.version}</version>
<scope>compile</scope>
</dependency>
<!-- zookeeper -->
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
<version>${zookeeper.version}</version>
</dependency>
<!-- curator -->
<dependency>
<groupId>org.apache.curator</groupId>
@ -139,27 +133,29 @@
<artifactId>jackson-databind</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.jayway.awaitility</groupId>
<artifactId>awaitility</artifactId>
<version>${avaitility.version}</version>
<scope>test</scope>
</dependency>
<!-- geode -->
<dependency>
<groupId>org.apache.geode</groupId>
<artifactId>geode-core</artifactId>
<version>${geode.core}</version>
</dependency>
<!-- solr -->
<dependency>
<groupId>org.apache.solr</groupId>
<artifactId>solr-solrj</artifactId>
<version>${solr.solr-solrj.version}</version>
</dependency>
<!-- common -->
<dependency>
<groupId>org.assertj</groupId>
@ -171,7 +167,7 @@
<build>
<plugins>
<!-- avro -->
<!-- avro -->
<plugin>
<groupId>org.apache.avro</groupId>
<artifactId>avro-maven-plugin</artifactId>
@ -192,7 +188,6 @@
</execution>
</executions>
</plugin>
<!-- meecrowave -->
<plugin>
<groupId>org.apache.meecrowave</groupId>
@ -206,7 +201,6 @@
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<avro.version>1.8.2</avro.version>
<slf4j.version>1.7.25</slf4j.version>
<beam.version>2.19.0</beam.version>
<assertj.version>3.9.0</assertj.version>
<bval.version>1.1.2</bval.version>
@ -225,4 +219,4 @@
<solr.solr-solrj.version>6.4.0</solr.solr-solrj.version>
</properties>
</project>
</project>

View File

@ -0,0 +1,18 @@
<?xml version="1.0" encoding="UTF-8"?>
<Configuration status="WARN">
<Appenders>
<Console name="Console" target="SYSTEM_OUT">
<PatternLayout pattern="%d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n"/>
</Console>
</Appenders>
<Loggers>
<Logger name="org.apache.meecrowave" level="warn">
<AppenderRef ref="Console"/>
</Logger>
<Root level="info">
<AppenderRef ref="Console"/>
</Root>
</Loggers>
</Configuration>

File diff suppressed because it is too large Load Diff

View File

@ -10,7 +10,6 @@ import com.baeldung.apache.beam.intro.WordCount;
public class WordCountUnitTest {
@Test
// @Ignore
public void givenInputFile_whenWordCountRuns_thenJobFinishWithoutError() {
boolean jobDone = WordCount.wordCount("src/test/resources/wordcount.txt", "target/output");
assertTrue(jobDone);

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -40,7 +39,6 @@
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<!-- Olingo 2 Dependencies -->
<dependency>
<groupId>org.apache.olingo</groupId>
@ -85,4 +83,4 @@
<olingo2.version>2.0.11</olingo2.version>
</properties>
</project>
</project>

View File

@ -6,7 +6,12 @@ spring:
application-path: /odata
jpa:
defer-datasource-initialization: true
show-sql: true
open-in-view: false
hibernate:
ddl-auto: update
ddl-auto: update
sql:
init:
mode: always

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -38,4 +37,4 @@
<jexcel.version>1.0.6</jexcel.version>
</properties>
</project>
</project>

View File

@ -0,0 +1,18 @@
package com.baeldung.poi.excel.multilinetext;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.CellStyle;
import org.apache.poi.ss.usermodel.Row;
public class MultilineText {
public void formatMultilineText(Cell cell, int cellNumber) {
cell.getRow()
.setHeightInPoints(cell.getSheet()
.getDefaultRowHeightInPoints() * 2);
CellStyle cellStyle = cell.getSheet()
.getWorkbook()
.createCellStyle();
cellStyle.setWrapText(true);
cell.setCellStyle(cellStyle);
}
}

View File

@ -0,0 +1,69 @@
package com.baeldung.poi.excel.multilinetext;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URISyntaxException;
import java.nio.file.Paths;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.DataFormatter;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import org.junit.Before;
import org.junit.Test;
public class MultilineTextUnitTest {
private static String FILE_NAME = "com/baeldung/poi/excel/multilinetext/MultilineTextTest.xlsx";
private static final String NEW_FILE_NAME = "MultilineTextTest_output.xlsx";
private static final int STRING_ROW_INDEX = 1;
private static final int STRING_CELL_INDEX = 0;
private String fileLocation;
@Before
public void setup() throws IOException, URISyntaxException {
fileLocation = Paths.get(ClassLoader.getSystemResource(FILE_NAME)
.toURI())
.toString();
}
@Test
public void givenMultilineTextCell_whenFormated_thenMultilineTextVisible() throws IOException {
Workbook workbook = new XSSFWorkbook(fileLocation);
Sheet sheet = workbook.getSheetAt(0);
sheet.createRow(STRING_ROW_INDEX);
Row row = sheet.getRow(STRING_ROW_INDEX);
Cell cell = row.createCell(STRING_CELL_INDEX);
cell.setCellValue("Hello \n world!");
MultilineText multilineText = new MultilineText();
multilineText.formatMultilineText(cell, STRING_CELL_INDEX);
FileOutputStream outputStream = new FileOutputStream(NEW_FILE_NAME);
workbook.write(outputStream);
outputStream.close();
File file = new File(NEW_FILE_NAME);
FileInputStream fileInputStream = new FileInputStream(file);
Workbook testWorkbook = new XSSFWorkbook(fileInputStream);
assertTrue(row.getHeightInPoints() == testWorkbook.getSheetAt(0)
.getRow(STRING_ROW_INDEX)
.getHeightInPoints());
DataFormatter formatter = new DataFormatter();
assertEquals("Hello \n world!", formatter.formatCellValue(testWorkbook.getSheetAt(0)
.getRow(STRING_ROW_INDEX)
.getCell(STRING_CELL_INDEX)));
testWorkbook.close();
fileInputStream.close();
file.delete();
workbook.close();
}
}

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -26,4 +25,5 @@
<geode.core>1.6.0</geode.core>
<rocketmq.version>2.0.4</rocketmq.version>
</properties>
</project>
</project>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -40,10 +39,6 @@
<scope>runtime</scope>
</dependency>
<!-- spring-sec -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-security</artifactId>
@ -55,4 +50,4 @@
<log4j-version>1.2.17</log4j-version>
</properties>
</project>
</project>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -95,20 +94,20 @@
<repositories>
<repository>
<id>SparkPackagesRepo</id>
<url>http://dl.bintray.com/spark-packages/maven</url>
<url>https://repos.spark-packages.org</url>
</repository>
</repositories>
<properties>
<org.apache.spark.spark-core.version>2.3.0</org.apache.spark.spark-core.version>
<org.apache.spark.spark-sql.version>2.3.0</org.apache.spark.spark-sql.version>
<org.apache.spark.spark-streaming.version>2.3.0</org.apache.spark.spark-streaming.version>
<org.apache.spark.spark-mllib.version>2.3.0</org.apache.spark.spark-mllib.version>
<org.apache.spark.spark-graphx.version>2.3.0</org.apache.spark.spark-graphx.version>
<graphframes.version>0.7.0-spark2.4-s_2.11</graphframes.version>
<org.apache.spark.spark-streaming-kafka.version>2.3.0</org.apache.spark.spark-streaming-kafka.version>
<com.datastax.spark.spark-cassandra-connector.version>2.3.0</com.datastax.spark.spark-cassandra-connector.version>
<com.datastax.spark.spark-cassandra-connector-java.version>1.5.2</com.datastax.spark.spark-cassandra-connector-java.version>
<org.apache.spark.spark-core.version>2.4.8</org.apache.spark.spark-core.version>
<org.apache.spark.spark-sql.version>2.4.8</org.apache.spark.spark-sql.version>
<org.apache.spark.spark-streaming.version>2.4.8</org.apache.spark.spark-streaming.version>
<org.apache.spark.spark-mllib.version>2.4.8</org.apache.spark.spark-mllib.version>
<org.apache.spark.spark-graphx.version>2.4.8</org.apache.spark.spark-graphx.version>
<graphframes.version>0.8.1-spark3.0-s_2.12</graphframes.version>
<org.apache.spark.spark-streaming-kafka.version>2.4.8</org.apache.spark.spark-streaming-kafka.version>
<com.datastax.spark.spark-cassandra-connector.version>2.5.2</com.datastax.spark.spark-cassandra-connector.version>
<com.datastax.spark.spark-cassandra-connector-java.version>1.6.0-M1</com.datastax.spark.spark-cassandra-connector-java.version>
</properties>
</project>
</project>

View File

@ -0,0 +1,42 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Set everything to be logged to the console
log4j.rootCategory=WARN, console
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
# Set the default spark-shell log level to WARN. When running the spark-shell, the
# log level for this class is used to overwrite the root logger's log level, so that
# the user can have different defaults for the shell and regular Spark apps.
log4j.logger.org.apache.spark.repl.Main=WARN
# Settings to quiet third party logs that are too verbose
log4j.logger.org.spark_project.jetty=WARN
log4j.logger.org.spark_project.jetty.util.component.AbstractLifeCycle=ERROR
log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
# SPARK-9183: Settings to avoid annoying messages when looking up nonexistent UDFs in SparkSQL with Hive support
log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL
log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR
# Parquet related logging
log4j.logger.org.apache.parquet.CorruptStatistics=ERROR
log4j.logger.parquet.CorruptStatistics=ERROR

View File

@ -1,8 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://maven.apache.org/POM/4.0.0">
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.baeldung</groupId>
<artifactId>apache-tapestry</artifactId>
@ -12,45 +11,42 @@
<dependencies>
<!-- To set up an application with a database, change the artifactId below to tapestry-hibernate,
and add a dependency on your JDBC driver. You'll also need to add Hibernate configuration files, such as hibernate.cfg.xml. -->
and add a dependency on your JDBC driver. You'll also need to add Hibernate configuration files, such
as hibernate.cfg.xml. -->
<dependency>
<groupId>org.apache.tapestry</groupId>
<artifactId>tapestry-core</artifactId>
<version>${tapestry-release-version}</version>
</dependency>
<!-- Include the Log4j implementation for the SLF4J logging framework -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${slf4j-release-version}</version>
</dependency>
<dependency>
<groupId>org.apache.tapestry</groupId>
<artifactId>tapestry-webresources</artifactId>
<version>${tapestry-release-version}</version>
</dependency>
<!-- Uncomment this to add support for file uploads: -->
<!-- <dependency> <groupId>org.apache.tapestry</groupId> <artifactId>tapestry-upload</artifactId> <version>${tapestry-release-version}</version> </dependency> -->
<!-- A dependency on either JUnit or TestNG is required, or the surefire plugin (which runs the tests) will fail, preventing Maven from packaging the WAR. Tapestry includes
a large number of testing facilities designed for use with TestNG (http://testng.org/), so it's recommended. -->
<!-- <dependency> <groupId>org.apache.tapestry</groupId> <artifactId>tapestry-upload</artifactId>
<version>${tapestry-release-version}</version> </dependency> -->
<!-- A dependency on either JUnit or TestNG is required, or the surefire plugin (which runs the
tests) will fail, preventing Maven from packaging the WAR. Tapestry includes a large number of testing
facilities designed for use with TestNG (http://testng.org/), so it's recommended. -->
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>${testng-release-version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.tapestry</groupId>
<artifactId>tapestry-test</artifactId>
<version>${tapestry-release-version}</version>
<scope>test</scope>
</dependency>
<!-- Provided by the servlet container, but sometimes referenced in the application code. -->
<dependency>
<groupId>javax.servlet</groupId>
@ -58,7 +54,6 @@
<version>${servlet-api-release-version}</version>
<scope>provided</scope>
</dependency>
<!-- Provide dependency to the Tapestry javadoc taglet which replaces the Maven component report -->
<dependency>
<groupId>org.apache.tapestry</groupId>
@ -81,7 +76,6 @@
<optimize>true</optimize>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
@ -92,7 +86,6 @@
</systemPropertyVariables>
</configuration>
</plugin>
<!-- Run the application using "mvn jetty:run" -->
<plugin>
<groupId>org.mortbay.jetty</groupId>
@ -101,7 +94,8 @@
<configuration>
<!-- Log to the console. -->
<requestLog implementation="org.mortbay.jetty.NCSARequestLog">
<!-- This doesn't do anything for Jetty, but is a workaround for a Maven bug that prevents the requestLog from being set. -->
<!-- This doesn't do anything for Jetty, but is a workaround for a Maven bug
that prevents the requestLog from being set. -->
<append>true</append>
</requestLog>
<systemProperties>
@ -115,15 +109,13 @@
</plugins>
</build>
<reporting />
<repositories>
<repository>
<id>jboss</id>
<url>http://repository.jboss.org/nexus/content/groups/public/</url>
</repository>
<!-- This repository is only needed when the Tapestry version is a preview release, rather than a final release. -->
<!-- This repository is only needed when the Tapestry version is a preview release, rather than
a final release. -->
<repository>
<id>apache-staging</id>
<url>https://repository.apache.org/content/groups/staging/</url>
@ -142,4 +134,4 @@
<slf4j-release-version>1.7.19</slf4j-release-version>
</properties>
</project>
</project>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -27,11 +26,10 @@
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<version>${org.slf4j.slf4j-simple.version}</version>
<version>${org.slf4j.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
@ -63,8 +61,7 @@
<properties>
<thrift.version>0.10.0</thrift.version>
<maven-thrift.version>0.1.11</maven-thrift.version>
<org.slf4j.slf4j-simple.version>1.7.12</org.slf4j.slf4j-simple.version>
<build-helper-maven-plugin.version>3.0.0</build-helper-maven-plugin.version>
</properties>
</project>
</project>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -69,4 +68,4 @@
<asciidoctorj-pdf.plugin.version>1.5.0-alpha.15</asciidoctorj-pdf.plugin.version>
</properties>
</project>
</project>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

View File

@ -1,128 +1,128 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>atomikos</artifactId>
<name>atomikos</name>
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>atomikos</artifactId>
<name>atomikos</name>
<parent>
<groupId>com.baeldung</groupId>
<artifactId>parent-modules</artifactId>
<version>1.0.0-SNAPSHOT</version>
</parent>
<parent>
<groupId>com.baeldung</groupId>
<artifactId>parent-modules</artifactId>
<version>1.0.0-SNAPSHOT</version>
</parent>
<dependencies>
<dependency>
<groupId>com.atomikos</groupId>
<artifactId>transactions-jdbc</artifactId>
<version>${atomikos-version}</version>
</dependency>
<dependency>
<groupId>com.atomikos</groupId>
<artifactId>transactions-jms</artifactId>
<version>${atomikos-version}</version>
</dependency>
<dependency>
<groupId>com.atomikos</groupId>
<artifactId>transactions-hibernate4</artifactId>
<version>${atomikos-version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>${spring-version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-tx</artifactId>
<version>${spring-version}</version>
</dependency>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-jpa</artifactId>
<version>${spring-data-jpa.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>${spring-version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core</artifactId>
<version>${hibernate.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>javax.transaction</groupId>
<artifactId>jta</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.activemq</groupId>
<artifactId>activemq-core</artifactId>
<version>${activemq-core.version}</version>
</dependency>
<dependency>
<groupId>org.apache.derby</groupId>
<artifactId>derby</artifactId>
<version>${derby.version}</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
<scope>test</scope>
</dependency>
<!-- the JTA API -->
<dependency>
<groupId>javax.transaction</groupId>
<artifactId>jta</artifactId>
<version>${jta.version}</version>
</dependency>
<dependency>
<groupId>org.apache.geronimo.specs</groupId>
<artifactId>geronimo-jta_1.0.1B_spec</artifactId>
<version>${geronimo.version}</version>
</dependency>
<dependency>
<groupId>javax.validation</groupId>
<artifactId>validation-api</artifactId>
<version>${validation-api.version}</version>
</dependency>
<dependency>
<groupId>org.hibernate.validator</groupId>
<artifactId>hibernate-validator</artifactId>
<version>${hibernate-validator.version}</version>
</dependency>
<dependency>
<groupId>javax.el</groupId>
<artifactId>javax.el-api</artifactId>
<version>${javax.el-api.version}</version>
</dependency>
<dependency>
<groupId>org.glassfish.web</groupId>
<artifactId>javax.el</artifactId>
<version>${javax.el.version}</version>
</dependency>
</dependencies>
<dependencies>
<dependency>
<groupId>com.atomikos</groupId>
<artifactId>transactions-jdbc</artifactId>
<version>${atomikos-version}</version>
</dependency>
<dependency>
<groupId>com.atomikos</groupId>
<artifactId>transactions-jms</artifactId>
<version>${atomikos-version}</version>
</dependency>
<dependency>
<groupId>com.atomikos</groupId>
<artifactId>transactions-hibernate4</artifactId>
<version>${atomikos-version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>${spring-version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-tx</artifactId>
<version>${spring-version}</version>
</dependency>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-jpa</artifactId>
<version>${spring-data-jpa.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>${spring-version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core</artifactId>
<version>${hibernate.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>javax.transaction</groupId>
<artifactId>jta</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.activemq</groupId>
<artifactId>activemq-core</artifactId>
<version>${activemq-core.version}</version>
</dependency>
<dependency>
<groupId>org.apache.derby</groupId>
<artifactId>derby</artifactId>
<version>${derby.version}</version>
</dependency>
<dependency>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
<version>${junit-jupiter.version}</version>
<scope>test</scope>
</dependency>
<!-- the JTA API -->
<dependency>
<groupId>javax.transaction</groupId>
<artifactId>jta</artifactId>
<version>${jta.version}</version>
</dependency>
<dependency>
<groupId>org.apache.geronimo.specs</groupId>
<artifactId>geronimo-jta_1.0.1B_spec</artifactId>
<version>${geronimo.version}</version>
</dependency>
<dependency>
<groupId>javax.validation</groupId>
<artifactId>validation-api</artifactId>
<version>${validation-api.version}</version>
</dependency>
<dependency>
<groupId>org.hibernate.validator</groupId>
<artifactId>hibernate-validator</artifactId>
<version>${hibernate-validator.version}</version>
</dependency>
<dependency>
<groupId>javax.el</groupId>
<artifactId>javax.el-api</artifactId>
<version>${javax.el-api.version}</version>
</dependency>
<dependency>
<groupId>org.glassfish.web</groupId>
<artifactId>javax.el</artifactId>
<version>${javax.el.version}</version>
</dependency>
</dependencies>
<properties>
<atomikos-version>5.0.6</atomikos-version>
<spring-version>5.1.6.RELEASE</spring-version>
<hibernate.version>5.4.3.Final</hibernate.version>
<spring-data-jpa.version>1.11.23.RELEASE</spring-data-jpa.version>
<activemq-core.version>5.7.0</activemq-core.version>
<derby.version>10.8.1.2</derby.version>
<jta.version>1.1</jta.version>
<geronimo.version>1.0</geronimo.version>
<validation-api.version>2.0.1.Final</validation-api.version>
<hibernate-validator.version>6.1.2.Final</hibernate-validator.version>
<javax.el-api.version>3.0.0</javax.el-api.version>
<javax.el.version>2.2.4</javax.el.version>
</properties>
<properties>
<atomikos-version>5.0.6</atomikos-version>
<spring-version>5.1.6.RELEASE</spring-version>
<hibernate.version>5.4.3.Final</hibernate.version>
<spring-data-jpa.version>1.11.23.RELEASE</spring-data-jpa.version>
<activemq-core.version>5.7.0</activemq-core.version>
<derby.version>10.8.1.2</derby.version>
<jta.version>1.1</jta.version>
<geronimo.version>1.0</geronimo.version>
<validation-api.version>2.0.1.Final</validation-api.version>
<hibernate-validator.version>6.1.2.Final</hibernate-validator.version>
<javax.el-api.version>3.0.0</javax.el-api.version>
<javax.el.version>2.2.4</javax.el.version>
</properties>
</project>

View File

@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<project
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
@ -32,4 +31,4 @@
<atomix-all.version>1.0.0-rc9</atomix-all.version>
</properties>
</project>
</project>

View File

@ -1,11 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>aws-app-sync</artifactId>
<name>aws-app-sync</name>
<description>Spring Boot using AWS App Sync</description>
<parent>
<groupId>com.baeldung</groupId>
<artifactId>parent-boot-2</artifactId>
@ -14,12 +15,10 @@
</parent>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
@ -46,4 +45,4 @@
</plugins>
</build>
</project>
</project>

Some files were not shown because too many files have changed in this diff Show More