Merge remote-tracking branch 'upstream/master'

This commit is contained in:
Kevin Gilmore 2018-11-29 20:51:40 -06:00
commit 136a66a7b5
100 changed files with 3412 additions and 41 deletions

View File

@ -0,0 +1,67 @@
package com.baeldung.java.list;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
/**
* Demonstrates the different ways to loop over
* the elements of a list.
*/
public class WaysToIterate {
List<String> countries = Arrays.asList("Germany", "Panama", "Australia");
/**
* Iterate over a list using a basic for loop
*/
public void iterateWithForLoop() {
for (int i = 0; i < countries.size(); i++) {
System.out.println(countries.get(i));
}
}
/**
* Iterate over a list using the enhanced for loop
*/
public void iterateWithEnhancedForLoop() {
for (String country : countries) {
System.out.println(country);
}
}
/**
* Iterate over a list using an Iterator
*/
public void iterateWithIterator() {
Iterator<String> countriesIterator = countries.iterator();
while(countriesIterator.hasNext()) {
System.out.println(countriesIterator.next());
}
}
/**
* Iterate over a list using a ListIterator
*/
public void iterateWithListIterator() {
ListIterator<String> listIterator = countries.listIterator();
while(listIterator.hasNext()) {
System.out.println(listIterator.next());
}
}
/**
* Iterate over a list using the Iterable.forEach() method
*/
public void iterateWithForEach() {
countries.forEach(System.out::println);
}
/**
* Iterate over a list using the Stream.forEach() method
*/
public void iterateWithStreamForEach() {
countries.stream().forEach((c) -> System.out.println(c));
}
}

View File

@ -0,0 +1,71 @@
package com.baeldung.java.list;
import static org.junit.Assert.assertEquals;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import org.junit.Test;
public class WaysToIterateUnitTest {
List<String> globalCountries = new ArrayList<String>();
List<String> europeanCountries = Arrays.asList("Germany", "Panama", "Australia");
@Test
public void whenIteratingUsingForLoop_thenReturnThreeAsSizeOfList() {
for (int i = 0; i < europeanCountries.size(); i++) {
globalCountries.add(europeanCountries.get(i));
}
assertEquals(globalCountries.size(), 3);
globalCountries.clear();
}
@Test
public void whenIteratingUsingEnhancedForLoop_thenReturnThreeAsSizeOfList() {
for (String country : europeanCountries) {
globalCountries.add(country);
}
assertEquals(globalCountries.size(), 3);
globalCountries.clear();
}
@Test
public void whenIteratingUsingIterator_thenReturnThreeAsSizeOfList() {
Iterator<String> countriesIterator = europeanCountries.iterator();
while (countriesIterator.hasNext()) {
globalCountries.add(countriesIterator.next());
}
assertEquals(globalCountries.size(), 3);
globalCountries.clear();
}
@Test
public void whenIteratingUsingListIterator_thenReturnThreeAsSizeOfList() {
ListIterator<String> countriesIterator = europeanCountries.listIterator();
while (countriesIterator.hasNext()) {
globalCountries.add(countriesIterator.next());
}
assertEquals(globalCountries.size(), 3);
globalCountries.clear();
}
@Test
public void whenIteratingUsingForEach_thenReturnThreeAsSizeOfList() {
europeanCountries.forEach(country -> globalCountries.add(country));
assertEquals(globalCountries.size(), 3);
globalCountries.clear();
}
@Test
public void whenIteratingUsingStreamForEach_thenReturnThreeAsSizeOfList() {
europeanCountries.stream().forEach((country) -> globalCountries.add(country));
assertEquals(globalCountries.size(), 3);
globalCountries.clear();
}
}

View File

@ -0,0 +1,33 @@
package com.baeldung.concurrent.countdownlatch;
import java.util.concurrent.CountDownLatch;
public class CountdownLatchCountExample {
private int count;
public CountdownLatchCountExample(int count) {
this.count = count;
}
public boolean callTwiceInSameThread() {
CountDownLatch countDownLatch = new CountDownLatch(count);
Thread t = new Thread(() -> {
countDownLatch.countDown();
countDownLatch.countDown();
});
t.start();
try {
countDownLatch.await();
} catch (InterruptedException e) {
e.printStackTrace();
}
return countDownLatch.getCount() == 0;
}
public static void main(String[] args) {
CountdownLatchCountExample ex = new CountdownLatchCountExample(2);
System.out.println("Is CountDown Completed : " + ex.callTwiceInSameThread());
}
}

View File

@ -0,0 +1,41 @@
package com.baeldung.concurrent.countdownlatch;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicInteger;
public class CountdownLatchResetExample {
private int count;
private int threadCount;
private final AtomicInteger updateCount;
CountdownLatchResetExample(int count, int threadCount) {
updateCount = new AtomicInteger(0);
this.count = count;
this.threadCount = threadCount;
}
public int countWaits() {
CountDownLatch countDownLatch = new CountDownLatch(count);
ExecutorService es = Executors.newFixedThreadPool(threadCount);
for (int i = 0; i < threadCount; i++) {
es.execute(() -> {
long prevValue = countDownLatch.getCount();
countDownLatch.countDown();
if (countDownLatch.getCount() != prevValue) {
updateCount.incrementAndGet();
}
});
}
es.shutdown();
return updateCount.get();
}
public static void main(String[] args) {
CountdownLatchResetExample ex = new CountdownLatchResetExample(5, 20);
System.out.println("Count : " + ex.countWaits());
}
}

View File

@ -0,0 +1,45 @@
package com.baeldung.concurrent.cyclicbarrier;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicInteger;
public class CyclicBarrierCompletionMethodExample {
private int count;
private int threadCount;
private final AtomicInteger updateCount;
CyclicBarrierCompletionMethodExample(int count, int threadCount) {
updateCount = new AtomicInteger(0);
this.count = count;
this.threadCount = threadCount;
}
public int countTrips() {
CyclicBarrier cyclicBarrier = new CyclicBarrier(count, () -> {
updateCount.incrementAndGet();
});
ExecutorService es = Executors.newFixedThreadPool(threadCount);
for (int i = 0; i < threadCount; i++) {
es.execute(() -> {
try {
cyclicBarrier.await();
} catch (InterruptedException | BrokenBarrierException e) {
e.printStackTrace();
}
});
}
es.shutdown();
return updateCount.get();
}
public static void main(String[] args) {
CyclicBarrierCompletionMethodExample ex = new CyclicBarrierCompletionMethodExample(5, 20);
System.out.println("Count : " + ex.countTrips());
}
}

View File

@ -0,0 +1,32 @@
package com.baeldung.concurrent.cyclicbarrier;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CyclicBarrier;
public class CyclicBarrierCountExample {
private int count;
public CyclicBarrierCountExample(int count) {
this.count = count;
}
public boolean callTwiceInSameThread() {
CyclicBarrier cyclicBarrier = new CyclicBarrier(count);
Thread t = new Thread(() -> {
try {
cyclicBarrier.await();
cyclicBarrier.await();
} catch (InterruptedException | BrokenBarrierException e) {
e.printStackTrace();
}
});
t.start();
return cyclicBarrier.isBroken();
}
public static void main(String[] args) {
CyclicBarrierCountExample ex = new CyclicBarrierCountExample(7);
System.out.println("Count : " + ex.callTwiceInSameThread());
}
}

View File

@ -0,0 +1,46 @@
package com.baeldung.concurrent.cyclicbarrier;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicInteger;
public class CyclicBarrierResetExample {
private int count;
private int threadCount;
private final AtomicInteger updateCount;
CyclicBarrierResetExample(int count, int threadCount) {
updateCount = new AtomicInteger(0);
this.count = count;
this.threadCount = threadCount;
}
public int countWaits() {
CyclicBarrier cyclicBarrier = new CyclicBarrier(count);
ExecutorService es = Executors.newFixedThreadPool(threadCount);
for (int i = 0; i < threadCount; i++) {
es.execute(() -> {
try {
if (cyclicBarrier.getNumberWaiting() > 0) {
updateCount.incrementAndGet();
}
cyclicBarrier.await();
} catch (InterruptedException | BrokenBarrierException e) {
e.printStackTrace();
}
});
}
es.shutdown();
return updateCount.get();
}
public static void main(String[] args) {
CyclicBarrierResetExample ex = new CyclicBarrierResetExample(7, 20);
System.out.println("Count : " + ex.countWaits());
}
}

View File

@ -0,0 +1,15 @@
package com.baeldung.concurrent.countdownlatch;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
public class CountdownLatchCountExampleUnitTest {
@Test
public void whenCountDownLatch_completed() {
CountdownLatchCountExample ex = new CountdownLatchCountExample(2);
boolean isCompleted = ex.callTwiceInSameThread();
assertTrue(isCompleted);
}
}

View File

@ -0,0 +1,15 @@
package com.baeldung.concurrent.countdownlatch;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
public class CountdownLatchResetExampleUnitTest {
@Test
public void whenCountDownLatch_noReset() {
CountdownLatchResetExample ex = new CountdownLatchResetExample(7,20);
int lineCount = ex.countWaits();
assertTrue(lineCount <= 7);
}
}

View File

@ -0,0 +1,15 @@
package com.baeldung.concurrent.cyclicbarrier;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
public class CyclicBarrierCompletionMethodExampleUnitTest {
@Test
public void whenCyclicBarrier_countTrips() {
CyclicBarrierCompletionMethodExample ex = new CyclicBarrierCompletionMethodExample(7,20);
int lineCount = ex.countTrips();
assertEquals(2, lineCount);
}
}

View File

@ -0,0 +1,15 @@
package com.baeldung.concurrent.cyclicbarrier;
import static org.junit.Assert.assertFalse;
import org.junit.Test;
public class CyclicBarrierCountExampleUnitTest {
@Test
public void whenCyclicBarrier_notCompleted() {
CyclicBarrierCountExample ex = new CyclicBarrierCountExample(2);
boolean isCompleted = ex.callTwiceInSameThread();
assertFalse(isCompleted);
}
}

View File

@ -0,0 +1,15 @@
package com.baeldung.concurrent.cyclicbarrier;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
public class CyclicBarrierResetExampleUnitTest {
@Test
public void whenCyclicBarrier_reset() {
CyclicBarrierResetExample ex = new CyclicBarrierResetExample(7,20);
int lineCount = ex.countWaits();
assertTrue(lineCount > 7);
}
}

View File

@ -66,6 +66,12 @@
<artifactId>mail</artifactId> <artifactId>mail</artifactId>
<version>${javax.mail.version}</version> <version>${javax.mail.version}</version>
</dependency> </dependency>
<dependency>
<groupId>nl.jqno.equalsverifier</groupId>
<artifactId>equalsverifier</artifactId>
<version>${equalsverifier.version}</version>
<scope>test</scope>
</dependency>
</dependencies> </dependencies>
<build> <build>
@ -424,6 +430,7 @@
<maven-shade-plugin.version>3.1.1</maven-shade-plugin.version> <maven-shade-plugin.version>3.1.1</maven-shade-plugin.version>
<spring-boot-maven-plugin.version>2.0.3.RELEASE</spring-boot-maven-plugin.version> <spring-boot-maven-plugin.version>2.0.3.RELEASE</spring-boot-maven-plugin.version>
<exec-maven-plugin.version>1.6.0</exec-maven-plugin.version> <exec-maven-plugin.version>1.6.0</exec-maven-plugin.version>
<equalsverifier.version>3.0.3</equalsverifier.version>
</properties> </properties>
</project> </project>

View File

@ -0,0 +1,36 @@
package com.baeldung.equalshashcode;
class Money {
int amount;
String currencyCode;
Money(int amount, String currencyCode) {
this.amount = amount;
this.currencyCode = currencyCode;
}
@Override
public boolean equals(Object o) {
if (o == this)
return true;
if (!(o instanceof Money))
return false;
Money other = (Money)o;
boolean currencyCodeEquals = (this.currencyCode == null && other.currencyCode == null)
|| (this.currencyCode != null && this.currencyCode.equals(other.currencyCode));
return this.amount == other.amount
&& currencyCodeEquals;
}
@Override
public int hashCode() {
int result = 17;
result = 31 * result + amount;
if (currencyCode != null) {
result = 31 * result + currencyCode.hashCode();
}
return result;
}
}

View File

@ -0,0 +1,39 @@
package com.baeldung.equalshashcode;
class Team {
final String city;
final String department;
Team(String city, String department) {
this.city = city;
this.department = department;
}
@Override
public final boolean equals(Object o) {
if (o == this)
return true;
if (!(o instanceof Team))
return false;
Team otherTeam = (Team)o;
boolean cityEquals = (this.city == null && otherTeam.city == null)
|| this.city != null && this.city.equals(otherTeam.city);
boolean departmentEquals = (this.department == null && otherTeam.department == null)
|| this.department != null && this.department.equals(otherTeam.department);
return cityEquals && departmentEquals;
}
@Override
public final int hashCode() {
int result = 17;
if (city != null) {
result = 31 * result + city.hashCode();
}
if (department != null) {
result = 31 * result + department.hashCode();
}
return result;
}
}

View File

@ -0,0 +1,38 @@
package com.baeldung.equalshashcode;
class Voucher {
private Money value;
private String store;
Voucher(int amount, String currencyCode, String store) {
this.value = new Money(amount, currencyCode);
this.store = store;
}
@Override
public boolean equals(Object o) {
if (o == this)
return true;
if (!(o instanceof Voucher))
return false;
Voucher other = (Voucher)o;
boolean valueEquals = (this.value == null && other.value == null)
|| (this.value != null && this.value.equals(other.value));
boolean storeEquals = (this.store == null && other.store == null)
|| (this.store != null && this.store.equals(other.store));
return valueEquals && storeEquals;
}
@Override
public int hashCode() {
int result = 17;
if (this.value != null) {
result = 31 * result + value.hashCode();
}
if (this.store != null) {
result = 31 * result + store.hashCode();
}
return result;
}
}

View File

@ -0,0 +1,30 @@
package com.baeldung.equalshashcode;
/* (non-Javadoc)
* This class overrides equals, but it doesn't override hashCode.
*
* To see which problems this leads to:
* TeamUnitTest.givenMapKeyWithoutHashCode_whenSearched_thenReturnsWrongValue
*/
class WrongTeam {
String city;
String department;
WrongTeam(String city, String department) {
this.city = city;
this.department = department;
}
@Override
public boolean equals(Object o) {
if (o == this)
return true;
if (!(o instanceof WrongTeam))
return false;
WrongTeam otherTeam = (WrongTeam)o;
return this.city == otherTeam.city
&& this.department == otherTeam.department;
}
}

View File

@ -0,0 +1,47 @@
package com.baeldung.equalshashcode;
/* (non-Javadoc)
* This class extends the Money class that has overridden the equals method and once again overrides the equals method.
*
* To see which problems this leads to:
* MoneyUnitTest.givenMoneyAndVoucherInstances_whenEquals_thenReturnValuesArentSymmetric
*/
class WrongVoucher extends Money {
private String store;
WrongVoucher(int amount, String currencyCode, String store) {
super(amount, currencyCode);
this.store = store;
}
@Override
public boolean equals(Object o) {
if (o == this)
return true;
if (!(o instanceof WrongVoucher))
return false;
WrongVoucher other = (WrongVoucher)o;
boolean currencyCodeEquals = (this.currencyCode == null && other.currencyCode == null)
|| (this.currencyCode != null && this.currencyCode.equals(other.currencyCode));
boolean storeEquals = (this.store == null && other.store == null)
|| (this.store != null && this.store.equals(other.store));
return this.amount == other.amount
&& currencyCodeEquals
&& storeEquals;
}
@Override
public int hashCode() {
int result = 17;
result = 31 * result + amount;
if (this.currencyCode != null) {
result = 31 * result + currencyCode.hashCode();
}
if (this.store != null) {
result = 31 * result + store.hashCode();
}
return result;
}
}

View File

@ -0,0 +1,27 @@
package com.baeldung.equalshashcode;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertFalse;
import org.junit.Test;
public class MoneyUnitTest {
@Test
public void givenMoneyInstancesWithSameAmountAndCurrency_whenEquals_thenReturnsTrue() {
Money income = new Money(55, "USD");
Money expenses = new Money(55, "USD");
assertTrue(income.equals(expenses));
}
@Test
public void givenMoneyAndVoucherInstances_whenEquals_thenReturnValuesArentSymmetric() {
Money cash = new Money(42, "USD");
WrongVoucher voucher = new WrongVoucher(42, "USD", "Amazon");
assertFalse(voucher.equals(cash));
assertTrue(cash.equals(voucher));
}
}

View File

@ -0,0 +1,46 @@
package com.baeldung.equalshashcode;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import java.util.HashMap;
import java.util.Map;
import org.junit.Test;
import nl.jqno.equalsverifier.EqualsVerifier;
public class TeamUnitTest {
@Test
public void givenMapKeyWithHashCode_whenSearched_thenReturnsCorrectValue() {
Map<Team,String> leaders = new HashMap<>();
leaders.put(new Team("New York", "development"), "Anne");
leaders.put(new Team("Boston", "development"), "Brian");
leaders.put(new Team("Boston", "marketing"), "Charlie");
Team myTeam = new Team("New York", "development");
String myTeamleader = leaders.get(myTeam);
assertEquals("Anne", myTeamleader);
}
@Test
public void givenMapKeyWithoutHashCode_whenSearched_thenReturnsWrongValue() {
Map<WrongTeam,String> leaders = new HashMap<>();
leaders.put(new WrongTeam("New York", "development"), "Anne");
leaders.put(new WrongTeam("Boston", "development"), "Brian");
leaders.put(new WrongTeam("Boston", "marketing"), "Charlie");
WrongTeam myTeam = new WrongTeam("New York", "development");
String myTeamleader = leaders.get(myTeam);
assertFalse("Anne".equals(myTeamleader));
}
@Test
public void equalsContract() {
EqualsVerifier.forClass(Team.class).verify();
}
}

View File

@ -0,0 +1,76 @@
package com.baeldung.graph;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
public class Graph {
private Map<Vertex, List<Vertex>> adjVertices;
Graph() {
this.adjVertices = new HashMap<Vertex, List<Vertex>>();
}
void addVertex(String label) {
adjVertices.putIfAbsent(new Vertex(label), new ArrayList<>());
}
void removeVertex(String label) {
Vertex v = new Vertex(label);
adjVertices.values().stream().map(e -> e.remove(v)).collect(Collectors.toList());
adjVertices.remove(new Vertex(label));
}
void addEdge(String label1, String label2) {
Vertex v1 = new Vertex(label1);
Vertex v2 = new Vertex(label2);
adjVertices.get(v1).add(v2);
adjVertices.get(v2).add(v1);
}
void removeEdge(String label1, String label2) {
Vertex v1 = new Vertex(label1);
Vertex v2 = new Vertex(label2);
List<Vertex> eV1 = adjVertices.get(v1);
List<Vertex> eV2 = adjVertices.get(v2);
if (eV1 != null)
eV1.remove(v2);
if (eV2 != null)
eV2.remove(v1);
}
List<Vertex> getAdjVertices(String label) {
return adjVertices.get(new Vertex(label));
}
String printGraph() {
StringBuffer sb = new StringBuffer();
for(Vertex v : adjVertices.keySet()) {
sb.append(v);
sb.append(adjVertices.get(v));
}
return sb.toString();
}
class Vertex {
String label;
Vertex(String label) {
this.label = label;
}
@Override
public boolean equals(Object obj) {
Vertex vertex = (Vertex) obj;
return vertex.label == label;
}
@Override
public int hashCode() {
return label.hashCode();
}
@Override
public String toString() {
return label;
}
}
}

View File

@ -0,0 +1,44 @@
package com.baeldung.graph;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.Queue;
import java.util.Set;
import java.util.Stack;
import com.baeldung.graph.Graph.Vertex;
public class GraphTraversal {
static Set<String> depthFirstTraversal(Graph graph, String root) {
Set<String> visited = new LinkedHashSet<String>();
Stack<String> stack = new Stack<String>();
stack.push(root);
while (!stack.isEmpty()) {
String vertex = stack.pop();
if (!visited.contains(vertex)) {
visited.add(vertex);
for (Vertex v : graph.getAdjVertices(vertex)) {
stack.push(v.label);
}
}
}
return visited;
}
static Set<String> breadthFirstTraversal(Graph graph, String root) {
Set<String> visited = new LinkedHashSet<String>();
Queue<String> queue = new LinkedList<String>();
queue.add(root);
visited.add(root);
while (!queue.isEmpty()) {
String vertex = queue.poll();
for (Vertex v : graph.getAdjVertices(vertex)) {
if (!visited.contains(v.label)) {
visited.add(v.label);
queue.add(v.label);
}
}
}
return visited;
}
}

View File

@ -0,0 +1,36 @@
package com.baeldung.graph;
import org.junit.Assert;
import org.junit.Test;
public class GraphTraversalUnitTest {
@Test
public void givenAGraph_whenTraversingDepthFirst_thenExpectedResult() {
Graph graph = createGraph();
Assert.assertEquals("[Bob, Rob, Maria, Alice, Mark]",
GraphTraversal.depthFirstTraversal(graph, "Bob").toString());
}
@Test
public void givenAGraph_whenTraversingBreadthFirst_thenExpectedResult() {
Graph graph = createGraph();
Assert.assertEquals("[Bob, Alice, Rob, Mark, Maria]",
GraphTraversal.breadthFirstTraversal(graph, "Bob").toString());
}
Graph createGraph() {
Graph graph = new Graph();
graph.addVertex("Bob");
graph.addVertex("Alice");
graph.addVertex("Mark");
graph.addVertex("Rob");
graph.addVertex("Maria");
graph.addEdge("Bob", "Alice");
graph.addEdge("Bob", "Rob");
graph.addEdge("Alice", "Mark");
graph.addEdge("Rob", "Mark");
graph.addEdge("Alice", "Maria");
graph.addEdge("Rob", "Maria");
return graph;
}
}

View File

@ -11,7 +11,7 @@ import java.util.regex.Pattern;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
public class OptimizedMatcherUnitTest { public class OptimizedMatcherManualTest {
private String action; private String action;

View File

@ -0,0 +1,136 @@
package org.baeldung.convertarraytostring;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.junit.Test;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
public class ArrayToStringUnitTest {
// convert with Java
@Test
public void givenAStringArray_whenConvertBeforeJava8_thenReturnString() {
String[] strArray = { "Convert", "Array", "With", "Java" };
StringBuilder stringBuilder = new StringBuilder();
for (int i = 0; i < strArray.length; i++) {
stringBuilder.append(strArray[i]);
}
String joinedString = stringBuilder.toString();
assertThat(joinedString, instanceOf(String.class));
assertEquals("ConvertArrayWithJava", joinedString);
}
@Test
public void givenAString_whenConvertBeforeJava8_thenReturnStringArray() {
String input = "lorem ipsum dolor sit amet";
String[] strArray = input.split(" ");
assertThat(strArray, instanceOf(String[].class));
assertEquals(5, strArray.length);
input = "loremipsum";
strArray = input.split("");
assertThat(strArray, instanceOf(String[].class));
assertEquals(10, strArray.length);
}
@Test
public void givenAnIntArray_whenConvertBeforeJava8_thenReturnString() {
int[] strArray = { 1, 2, 3, 4, 5 };
StringBuilder stringBuilder = new StringBuilder();
for (int i = 0; i < strArray.length; i++) {
stringBuilder.append(Integer.valueOf(strArray[i]));
}
String joinedString = stringBuilder.toString();
assertThat(joinedString, instanceOf(String.class));
assertEquals("12345", joinedString);
}
// convert with Java Stream API
@Test
public void givenAStringArray_whenConvertWithJavaStream_thenReturnString() {
String[] strArray = { "Convert", "With", "Java", "Streams" };
String joinedString = Arrays.stream(strArray)
.collect(Collectors.joining());
assertThat(joinedString, instanceOf(String.class));
assertEquals("ConvertWithJavaStreams", joinedString);
joinedString = Arrays.stream(strArray)
.collect(Collectors.joining(","));
assertThat(joinedString, instanceOf(String.class));
assertEquals("Convert,With,Java,Streams", joinedString);
}
// convert with Apache Commons
@Test
public void givenAStringArray_whenConvertWithApacheCommons_thenReturnString() {
String[] strArray = { "Convert", "With", "Apache", "Commons" };
String joinedString = StringUtils.join(strArray);
assertThat(joinedString, instanceOf(String.class));
assertEquals("ConvertWithApacheCommons", joinedString);
}
@Test
public void givenAString_whenConvertWithApacheCommons_thenReturnStringArray() {
String input = "lorem ipsum dolor sit amet";
String[] strArray = StringUtils.split(input, " ");
assertThat(strArray, instanceOf(String[].class));
assertEquals(5, strArray.length);
}
// convert with Guava
@Test
public void givenAStringArray_whenConvertWithGuava_thenReturnString() {
String[] strArray = { "Convert", "With", "Guava", null };
String joinedString = Joiner.on("")
.skipNulls()
.join(strArray);
assertThat(joinedString, instanceOf(String.class));
assertEquals("ConvertWithGuava", joinedString);
}
@Test
public void givenAString_whenConvertWithGuava_thenReturnStringArray() {
String input = "lorem ipsum dolor sit amet";
List<String> resultList = Splitter.on(' ')
.trimResults()
.omitEmptyStrings()
.splitToList(input);
String[] strArray = resultList.toArray(new String[0]);
assertThat(strArray, instanceOf(String[].class));
assertEquals(5, strArray.length);
}
}

View File

@ -0,0 +1,20 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name=local-file-sink
connector.class=FileStreamSink
tasks.max=1
file=test.sink.txt
topics=connect-test

View File

@ -0,0 +1,20 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name=local-file-source
connector.class=FileStreamSource
tasks.max=1
file=test.txt
topic=connect-test

View File

@ -0,0 +1,44 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# These are defaults. This file just demonstrates how to override some settings.
bootstrap.servers=localhost:9092
# The converters specify the format of data in Kafka and how to translate it into Connect data. Every Connect user will
# need to configure these based on the format they want their data in when loaded from or stored into Kafka
key.converter=org.apache.kafka.connect.json.JsonConverter
value.converter=org.apache.kafka.connect.json.JsonConverter
# Converter-specific settings can be passed in by prefixing the Converter's setting with the converter we want to apply
# it to
key.converter.schemas.enable=false
value.converter.schemas.enable=false
offset.storage.file.filename=/tmp/connect.offsets
# Flush much faster than normal, which is useful for testing/debugging
offset.flush.interval.ms=10000
# Set to a list of filesystem paths separated by commas (,) to enable class loading isolation for plugins
# (connectors, converters, transformations). The list should consist of top level directories that include
# any combination of:
# a) directories immediately containing jars with plugins and their dependencies
# b) uber-jars with plugins and their dependencies
# c) directories immediately containing the package directory structure of classes of plugins and their dependencies
# Note: symlinks will be followed to discover dependencies or plugins.
# Examples:
# plugin.path=/usr/local/share/java,/usr/local/share/kafka/plugins,/opt/connectors,
# Replace the relative path below with an absolute path if you are planning to start Kafka Connect from within a
# directory other than the home directory of Confluent Platform.
plugin.path=C:\Software\confluent-5.0.0\share\java
#plugin.path=./share/java

View File

@ -0,0 +1,88 @@
##
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
# This file contains some of the configurations for the Kafka Connect distributed worker. This file is intended
# to be used with the examples, and some settings may differ from those used in a production system, especially
# the `bootstrap.servers` and those specifying replication factors.
# A list of host/port pairs to use for establishing the initial connection to the Kafka cluster.
bootstrap.servers=localhost:9092
# unique name for the cluster, used in forming the Connect cluster group. Note that this must not conflict with consumer group IDs
group.id=connect-cluster
# The converters specify the format of data in Kafka and how to translate it into Connect data. Every Connect user will
# need to configure these based on the format they want their data in when loaded from or stored into Kafka
key.converter=org.apache.kafka.connect.json.JsonConverter
value.converter=org.apache.kafka.connect.json.JsonConverter
# Converter-specific settings can be passed in by prefixing the Converter's setting with the converter we want to apply
# it to
key.converter.schemas.enable=true
value.converter.schemas.enable=true
# Topic to use for storing offsets. This topic should have many partitions and be replicated and compacted.
# Kafka Connect will attempt to create the topic automatically when needed, but you can always manually create
# the topic before starting Kafka Connect if a specific topic configuration is needed.
# Most users will want to use the built-in default replication factor of 3 or in some cases even specify a larger value.
# Since this means there must be at least as many brokers as the maximum replication factor used, we'd like to be able
# to run this example on a single-broker cluster and so here we instead set the replication factor to 1.
offset.storage.topic=connect-offsets
offset.storage.replication.factor=1
#offset.storage.partitions=25
# Topic to use for storing connector and task configurations; note that this should be a single partition, highly replicated,
# and compacted topic. Kafka Connect will attempt to create the topic automatically when needed, but you can always manually create
# the topic before starting Kafka Connect if a specific topic configuration is needed.
# Most users will want to use the built-in default replication factor of 3 or in some cases even specify a larger value.
# Since this means there must be at least as many brokers as the maximum replication factor used, we'd like to be able
# to run this example on a single-broker cluster and so here we instead set the replication factor to 1.
config.storage.topic=connect-configs
config.storage.replication.factor=1
# Topic to use for storing statuses. This topic can have multiple partitions and should be replicated and compacted.
# Kafka Connect will attempt to create the topic automatically when needed, but you can always manually create
# the topic before starting Kafka Connect if a specific topic configuration is needed.
# Most users will want to use the built-in default replication factor of 3 or in some cases even specify a larger value.
# Since this means there must be at least as many brokers as the maximum replication factor used, we'd like to be able
# to run this example on a single-broker cluster and so here we instead set the replication factor to 1.
status.storage.topic=connect-status
status.storage.replication.factor=1
#status.storage.partitions=5
# Flush much faster than normal, which is useful for testing/debugging
offset.flush.interval.ms=10000
# These are provided to inform the user about the presence of the REST host and port configs
# Hostname & Port for the REST API to listen on. If this is set, it will bind to the interface used to listen to requests.
#rest.host.name=
#rest.port=8083
# The Hostname & Port that will be given out to other workers to connect to i.e. URLs that are routable from other servers.
#rest.advertised.host.name=
#rest.advertised.port=
# Set to a list of filesystem paths separated by commas (,) to enable class loading isolation for plugins
# (connectors, converters, transformations). The list should consist of top level directories that include
# any combination of:
# a) directories immediately containing jars with plugins and their dependencies
# b) uber-jars with plugins and their dependencies
# c) directories immediately containing the package directory structure of classes of plugins and their dependencies
# Examples:
# plugin.path=/usr/local/share/java,/usr/local/share/kafka/plugins,/opt/connectors,
# Replace the relative path below with an absolute path if you are planning to start Kafka Connect from within a
# directory other than the home directory of Confluent Platform.
plugin.path=./share/java

View File

@ -0,0 +1,9 @@
{
"name": "local-file-sink",
"config": {
"connector.class": "FileStreamSink",
"tasks.max": 1,
"file": "test-distributed.sink.txt",
"topics": "connect-distributed"
}
}

View File

@ -0,0 +1,9 @@
{
"name": "local-file-source",
"config": {
"connector.class": "FileStreamSource",
"tasks.max": 1,
"file": "test-distributed.txt",
"topic": "connect-distributed"
}
}

View File

@ -0,0 +1,88 @@
##
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
# This file contains some of the configurations for the Kafka Connect distributed worker. This file is intended
# to be used with the examples, and some settings may differ from those used in a production system, especially
# the `bootstrap.servers` and those specifying replication factors.
# A list of host/port pairs to use for establishing the initial connection to the Kafka cluster.
bootstrap.servers=localhost:9092
# unique name for the cluster, used in forming the Connect cluster group. Note that this must not conflict with consumer group IDs
group.id=connect-cluster
# The converters specify the format of data in Kafka and how to translate it into Connect data. Every Connect user will
# need to configure these based on the format they want their data in when loaded from or stored into Kafka
key.converter=org.apache.kafka.connect.json.JsonConverter
value.converter=org.apache.kafka.connect.json.JsonConverter
# Converter-specific settings can be passed in by prefixing the Converter's setting with the converter we want to apply
# it to
key.converter.schemas.enable=false
value.converter.schemas.enable=false
# Topic to use for storing offsets. This topic should have many partitions and be replicated and compacted.
# Kafka Connect will attempt to create the topic automatically when needed, but you can always manually create
# the topic before starting Kafka Connect if a specific topic configuration is needed.
# Most users will want to use the built-in default replication factor of 3 or in some cases even specify a larger value.
# Since this means there must be at least as many brokers as the maximum replication factor used, we'd like to be able
# to run this example on a single-broker cluster and so here we instead set the replication factor to 1.
offset.storage.topic=connect-offsets
offset.storage.replication.factor=1
#offset.storage.partitions=25
# Topic to use for storing connector and task configurations; note that this should be a single partition, highly replicated,
# and compacted topic. Kafka Connect will attempt to create the topic automatically when needed, but you can always manually create
# the topic before starting Kafka Connect if a specific topic configuration is needed.
# Most users will want to use the built-in default replication factor of 3 or in some cases even specify a larger value.
# Since this means there must be at least as many brokers as the maximum replication factor used, we'd like to be able
# to run this example on a single-broker cluster and so here we instead set the replication factor to 1.
config.storage.topic=connect-configs
config.storage.replication.factor=1
# Topic to use for storing statuses. This topic can have multiple partitions and should be replicated and compacted.
# Kafka Connect will attempt to create the topic automatically when needed, but you can always manually create
# the topic before starting Kafka Connect if a specific topic configuration is needed.
# Most users will want to use the built-in default replication factor of 3 or in some cases even specify a larger value.
# Since this means there must be at least as many brokers as the maximum replication factor used, we'd like to be able
# to run this example on a single-broker cluster and so here we instead set the replication factor to 1.
status.storage.topic=connect-status
status.storage.replication.factor=1
#status.storage.partitions=5
# Flush much faster than normal, which is useful for testing/debugging
offset.flush.interval.ms=10000
# These are provided to inform the user about the presence of the REST host and port configs
# Hostname & Port for the REST API to listen on. If this is set, it will bind to the interface used to listen to requests.
#rest.host.name=
#rest.port=8083
# The Hostname & Port that will be given out to other workers to connect to i.e. URLs that are routable from other servers.
#rest.advertised.host.name=
#rest.advertised.port=
# Set to a list of filesystem paths separated by commas (,) to enable class loading isolation for plugins
# (connectors, converters, transformations). The list should consist of top level directories that include
# any combination of:
# a) directories immediately containing jars with plugins and their dependencies
# b) uber-jars with plugins and their dependencies
# c) directories immediately containing the package directory structure of classes of plugins and their dependencies
# Examples:
# plugin.path=/usr/local/share/java,/usr/local/share/kafka/plugins,/opt/connectors,
# Replace the relative path below with an absolute path if you are planning to start Kafka Connect from within a
# directory other than the home directory of Confluent Platform.
plugin.path=./share/java

View File

@ -0,0 +1,15 @@
{
"name": "local-file-source",
"config": {
"connector.class": "FileStreamSource",
"tasks.max": 1,
"file": "transformation.txt",
"topic": "connect-transformation",
"transforms": "MakeMap,InsertSource",
"transforms.MakeMap.type": "org.apache.kafka.connect.transforms.HoistField$Value",
"transforms.MakeMap.field": "line",
"transforms.InsertSource.type": "org.apache.kafka.connect.transforms.InsertField$Value",
"transforms.InsertSource.static.field": "data_source",
"transforms.InsertSource.static.value": "test-file-source"
}
}

View File

@ -0,0 +1,88 @@
##
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
# This file contains some of the configurations for the Kafka Connect distributed worker. This file is intended
# to be used with the examples, and some settings may differ from those used in a production system, especially
# the `bootstrap.servers` and those specifying replication factors.
# A list of host/port pairs to use for establishing the initial connection to the Kafka cluster.
bootstrap.servers=localhost:9092
# unique name for the cluster, used in forming the Connect cluster group. Note that this must not conflict with consumer group IDs
group.id=connect-cluster
# The converters specify the format of data in Kafka and how to translate it into Connect data. Every Connect user will
# need to configure these based on the format they want their data in when loaded from or stored into Kafka
key.converter=org.apache.kafka.connect.json.JsonConverter
value.converter=org.apache.kafka.connect.json.JsonConverter
# Converter-specific settings can be passed in by prefixing the Converter's setting with the converter we want to apply
# it to
key.converter.schemas.enable=true
value.converter.schemas.enable=true
# Topic to use for storing offsets. This topic should have many partitions and be replicated and compacted.
# Kafka Connect will attempt to create the topic automatically when needed, but you can always manually create
# the topic before starting Kafka Connect if a specific topic configuration is needed.
# Most users will want to use the built-in default replication factor of 3 or in some cases even specify a larger value.
# Since this means there must be at least as many brokers as the maximum replication factor used, we'd like to be able
# to run this example on a single-broker cluster and so here we instead set the replication factor to 1.
offset.storage.topic=connect-offsets
offset.storage.replication.factor=1
#offset.storage.partitions=25
# Topic to use for storing connector and task configurations; note that this should be a single partition, highly replicated,
# and compacted topic. Kafka Connect will attempt to create the topic automatically when needed, but you can always manually create
# the topic before starting Kafka Connect if a specific topic configuration is needed.
# Most users will want to use the built-in default replication factor of 3 or in some cases even specify a larger value.
# Since this means there must be at least as many brokers as the maximum replication factor used, we'd like to be able
# to run this example on a single-broker cluster and so here we instead set the replication factor to 1.
config.storage.topic=connect-configs
config.storage.replication.factor=1
# Topic to use for storing statuses. This topic can have multiple partitions and should be replicated and compacted.
# Kafka Connect will attempt to create the topic automatically when needed, but you can always manually create
# the topic before starting Kafka Connect if a specific topic configuration is needed.
# Most users will want to use the built-in default replication factor of 3 or in some cases even specify a larger value.
# Since this means there must be at least as many brokers as the maximum replication factor used, we'd like to be able
# to run this example on a single-broker cluster and so here we instead set the replication factor to 1.
status.storage.topic=connect-status
status.storage.replication.factor=1
#status.storage.partitions=5
# Flush much faster than normal, which is useful for testing/debugging
offset.flush.interval.ms=10000
# These are provided to inform the user about the presence of the REST host and port configs
# Hostname & Port for the REST API to listen on. If this is set, it will bind to the interface used to listen to requests.
#rest.host.name=
#rest.port=8083
# The Hostname & Port that will be given out to other workers to connect to i.e. URLs that are routable from other servers.
#rest.advertised.host.name=
#rest.advertised.port=
# Set to a list of filesystem paths separated by commas (,) to enable class loading isolation for plugins
# (connectors, converters, transformations). The list should consist of top level directories that include
# any combination of:
# a) directories immediately containing jars with plugins and their dependencies
# b) uber-jars with plugins and their dependencies
# c) directories immediately containing the package directory structure of classes of plugins and their dependencies
# Examples:
# plugin.path=/usr/local/share/java,/usr/local/share/kafka/plugins,/opt/connectors,
# Replace the relative path below with an absolute path if you are planning to start Kafka Connect from within a
# directory other than the home directory of Confluent Platform.
plugin.path=./share/java

View File

@ -0,0 +1,22 @@
{
"firstName": "John",
"lastName": "Smith",
"age": 25,
"address": {
"streetAddress": "21 2nd Street",
"city": "New York",
"state": "NY",
"postalCode": "10021"
},
"phoneNumber": [{
"type": "home",
"number": "212 555-1234"
}, {
"type": "fax",
"number": "646 555-4567"
}
],
"gender": {
"type": "male"
}
}

View File

@ -0,0 +1,11 @@
{
"name": "mqtt-source",
"config": {
"connector.class": "io.confluent.connect.mqtt.MqttSourceConnector",
"tasks.max": 1,
"mqtt.server.uri": "ws://broker.hivemq.com:8000/mqtt",
"mqtt.topics": "baeldung",
"kafka.topic": "connect-custom",
"value.converter": "org.apache.kafka.connect.converters.ByteArrayConverter"
}
}

View File

@ -44,12 +44,6 @@
<artifactId>disruptor</artifactId> <artifactId>disruptor</artifactId>
<version>${disruptor.version}</version> <version>${disruptor.version}</version>
</dependency> </dependency>
<!-- Webflux for reactive logging example -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-webflux</artifactId>
<version>${spring-boot.version}</version>
</dependency>
</dependencies> </dependencies>
<properties> <properties>
@ -57,7 +51,6 @@
<log4j-api.version>2.7</log4j-api.version> <log4j-api.version>2.7</log4j-api.version>
<log4j-core.version>2.7</log4j-core.version> <log4j-core.version>2.7</log4j-core.version>
<disruptor.version>3.3.6</disruptor.version> <disruptor.version>3.3.6</disruptor.version>
<spring-boot.version>2.1.0.RELEASE</spring-boot.version>
</properties> </properties>
</project> </project>

View File

@ -0,0 +1,35 @@
package com.baeldung.hibernate.findall;
import java.util.List;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Root;
import org.hibernate.Session;
import com.baeldung.hibernate.pojo.Student;
public class FindAll {
private Session session;
public FindAll(Session session) {
super();
this.session = session;
}
public List<Student> findAllWithJpql() {
return session.createQuery("SELECT a FROM Student a", Student.class).getResultList();
}
public List<Student> findAllWithCriteriaQuery() {
CriteriaBuilder cb = session.getCriteriaBuilder();
CriteriaQuery<Student> cq = cb.createQuery(Student.class);
Root<Student> rootEntry = cq.from(Student.class);
CriteriaQuery<Student> all = cq.select(rootEntry);
TypedQuery<Student> allQuery = session.createQuery(all);
return allQuery.getResultList();
}
}

View File

@ -0,0 +1,63 @@
package com.baeldung.hibernate.findall;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.util.List;
import org.hibernate.Session;
import org.hibernate.Transaction;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.baeldung.hibernate.HibernateUtil;
import com.baeldung.hibernate.pojo.Student;
public class FindAllUnitTest {
private Session session;
private Transaction transaction;
private FindAll findAll;
@Before
public void setUp() throws IOException {
session = HibernateUtil.getSessionFactory().openSession();
transaction = session.beginTransaction();
findAll = new FindAll(session);
session.createNativeQuery("delete from Student").executeUpdate();
Student student1 = new Student();
session.persist(student1);
Student student2 = new Student();
session.persist(student2);
Student student3 = new Student();
session.persist(student3);
transaction.commit();
transaction = session.beginTransaction();
}
@After
public void tearDown() {
transaction.rollback();
session.close();
}
@Test
public void givenCriteriaQuery_WhenFindAll_ThenGetAllPersons() {
List<Student> list = findAll.findAllWithCriteriaQuery();
assertEquals(3, list.size());
}
@Test
public void givenJpql_WhenFindAll_ThenGetAllPersons() {
List<Student> list = findAll.findAllWithJpql();
assertEquals(3, list.size());
}
}

View File

@ -0,0 +1,70 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.baeldung</groupId>
<artifactId>spring-data-cassandra-reactive</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>jar</packaging>
<name>spring-data-cassandra-reactive</name>
<description>Spring Data Cassandra reactive</description>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.1.0.RELEASE</version>
<relativePath/>
</parent>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8
</project.reporting.outputEncoding>
<java.version>1.8</java.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-cassandra</artifactId>
<version>2.1.2.RELEASE</version>
</dependency>
<dependency>
<groupId>io.projectreactor</groupId>
<artifactId>reactor-core</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.projectreactor</groupId>
<artifactId>reactor-test</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,12 @@
package com.baeldung.cassandra.reactive;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class SpringDataCassandraReactiveApplication {
public static void main(String[] args) {
SpringApplication.run(SpringDataCassandraReactiveApplication.class, args);
}
}

View File

@ -0,0 +1,49 @@
package com.baeldung.cassandra.reactive.controller;
import com.baeldung.cassandra.reactive.model.Employee;
import com.baeldung.cassandra.reactive.service.EmployeeService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import javax.annotation.PostConstruct;
import java.util.ArrayList;
import java.util.List;
@RestController
@RequestMapping("employee")
public class EmployeeController {
@Autowired
EmployeeService employeeService;
@PostConstruct
public void saveEmployees() {
List<Employee> employees = new ArrayList<>();
employees.add(new Employee(123, "John Doe", "Delaware", "jdoe@xyz.com", 31));
employees.add(new Employee(324, "Adam Smith", "North Carolina", "asmith@xyz.com", 43));
employees.add(new Employee(355, "Kevin Dunner", "Virginia", "kdunner@xyz.com", 24));
employees.add(new Employee(643, "Mike Lauren", "New York", "mlauren@xyz.com", 41));
employeeService.initializeEmployees(employees);
}
@GetMapping("/list")
public Flux<Employee> getAllEmployees() {
Flux<Employee> employees = employeeService.getAllEmployees();
return employees;
}
@GetMapping("/{id}")
public Mono<Employee> getEmployeeById(@PathVariable int id) {
return employeeService.getEmployeeById(id);
}
@GetMapping("/filterByAge/{age}")
public Flux<Employee> getEmployeesFilterByAge(@PathVariable int age) {
return employeeService.getEmployeesFilterByAge(age);
}
}

View File

@ -0,0 +1,20 @@
package com.baeldung.cassandra.reactive.model;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.springframework.data.cassandra.core.mapping.PrimaryKey;
import org.springframework.data.cassandra.core.mapping.Table;
@Table
@Data
@AllArgsConstructor
@NoArgsConstructor
public class Employee {
@PrimaryKey
private int id;
private String name;
private String address;
private String email;
private int age;
}

View File

@ -0,0 +1,11 @@
package com.baeldung.cassandra.reactive.repository;
import com.baeldung.cassandra.reactive.model.Employee;
import org.springframework.data.cassandra.repository.AllowFiltering;
import org.springframework.data.cassandra.repository.ReactiveCassandraRepository;
import reactor.core.publisher.Flux;
public interface EmployeeRepository extends ReactiveCassandraRepository<Employee, Integer> {
@AllowFiltering
Flux<Employee> findByAgeGreaterThan(int age);
}

View File

@ -0,0 +1,35 @@
package com.baeldung.cassandra.reactive.service;
import com.baeldung.cassandra.reactive.model.Employee;
import com.baeldung.cassandra.reactive.repository.EmployeeRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import java.util.List;
@Service
public class EmployeeService {
@Autowired
EmployeeRepository employeeRepository;
public void initializeEmployees(List<Employee> employees) {
Flux<Employee> savedEmployees = employeeRepository.saveAll(employees);
savedEmployees.subscribe();
}
public Flux<Employee> getAllEmployees() {
Flux<Employee> employees = employeeRepository.findAll();
return employees;
}
public Flux<Employee> getEmployeesFilterByAge(int age) {
return employeeRepository.findByAgeGreaterThan(age);
}
public Mono<Employee> getEmployeeById(int id) {
return employeeRepository.findById(id);
}
}

View File

@ -0,0 +1,2 @@
spring.data.cassandra.keyspace-name=practice
spring.data.cassandra.port=9042

View File

@ -0,0 +1,55 @@
package com.baeldung.cassandra.reactive;
import com.baeldung.cassandra.reactive.model.Employee;
import com.baeldung.cassandra.reactive.repository.EmployeeRepository;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.test.StepVerifier;
@RunWith(SpringRunner.class)
@SpringBootTest
public class ReactiveEmployeeRepositoryIntegrationTest {
@Autowired
EmployeeRepository repository;
@Before
public void setUp() {
Flux<Employee> deleteAndInsert = repository.deleteAll() //
.thenMany(repository.saveAll(Flux.just(
new Employee(111, "John Doe", "Delaware", "jdoe@xyz.com", 31),
new Employee(222, "Adam Smith", "North Carolina", "asmith@xyz.com", 43),
new Employee(333, "Kevin Dunner", "Virginia", "kdunner@xyz.com", 24),
new Employee(444, "Mike Lauren", "New York", "mlauren@xyz.com", 41))));
StepVerifier.create(deleteAndInsert).expectNextCount(4).verifyComplete();
}
@Test
public void givenRecordsAreInserted_whenDbIsQueried_thenShouldIncludeNewRecords() {
Mono<Long> saveAndCount = repository.count()
.doOnNext(System.out::println)
.thenMany(repository.saveAll(Flux.just(new Employee(325, "Kim Jones", "Florida", "kjones@xyz.com", 42),
new Employee(654, "Tom Moody", "New Hampshire", "tmoody@xyz.com", 44))))
.last()
.flatMap(v -> repository.count())
.doOnNext(System.out::println);
StepVerifier.create(saveAndCount).expectNext(6L).verifyComplete();
}
@Test
public void givenAgeForFilter_whenDbIsQueried_thenShouldReturnFilteredRecords() {
StepVerifier.create(repository.findByAgeGreaterThan(35)).expectNextCount(2).verifyComplete();
}
}

35
pom.xml
View File

@ -377,6 +377,7 @@
<module>feign</module> <module>feign</module>
<module>flips</module> <module>flips</module>
<module>testing-modules/groovy-spock</module> <module>testing-modules/groovy-spock</module>
<module>testing-modules/load-testing-comparison</module>
<module>google-cloud</module> <module>google-cloud</module>
<module>google-web-toolkit</module> <module>google-web-toolkit</module>
<module>gson</module> <module>gson</module>
@ -636,7 +637,7 @@
<module>dubbo</module> <module>dubbo</module>
<module>persistence-modules/flyway</module> <module>persistence-modules/flyway</module>
<!-- <module>grpc</module> --><!-- protobuf-maven-plugin filecopy failure --> <!-- <module>grpc</module> --><!-- protobuf-maven-plugin filecopy failure -->
<!-- <module>JGit</module> --><!-- Unit test failure --> <module>JGit</module>
<module>jni</module> <module>jni</module>
<module>jooby</module> <module>jooby</module>
<!-- <module>micronaut</module> --><!-- Not upgraded to java 9 --> <!-- <module>micronaut</module> --><!-- Not upgraded to java 9 -->
@ -661,24 +662,13 @@
<module>sse-jaxrs</module> <module>sse-jaxrs</module>
<module>static-analysis</module> <module>static-analysis</module>
<module>stripe</module> <module>stripe</module>
<!-- <module>structurizr</module> --><!-- Artiifact not found --> <module>structurizr</module>
<module>Twitter4J</module> <module>Twitter4J</module>
<module>wicket</module> <module>wicket</module>
<module>xstream</module> <module>xstream</module>
<module>cas/cas-secured-app</module> <module>cas/cas-secured-app</module>
<module>cas/cas-server</module> <module>cas/cas-server</module>
<!-- <module>graphql/graphql-java</module> --><!-- Wrong parent --> <module>graphql/graphql-java</module>
<!-- <module>guest/deep-jsf</module> --><!-- guest post on different site -->
<!-- <module>guest/junit5-example</module> --><!-- guest post on different site - Compilation failure -->
<!-- <module>guest/log4j2-example</module> --><!-- guest post on different site -->
<!-- <module>guest/logback-example</module> --><!-- guest post on different site -->
<!-- <module>guest/memory-leaks</module> --><!-- guest post on different site -->
<!-- <module>guest/remote-debugging</module> --><!-- guest post on different site -->
<!-- <module>guest/spring-boot-app</module> --><!-- guest post on different site -->
<!-- <module>guest/spring-mvc</module> --><!-- guest post on different site -->
<!-- <module>guest/spring-security</module> --><!-- guest post on different site -->
<!-- <module>guest/thread-pools</module> --><!-- guest post on different site -->
<!-- <module>guest/tomcat-app</module> --><!-- guest post on different site -->
<!-- <module>jenkins/hello-world</module> --> <!-- disabled in the default profile --> <!-- <module>jenkins/hello-world</module> --> <!-- disabled in the default profile -->
<!-- <module>rule-engines/easy-rules</module> --><!-- Wrong Parent --> <!-- <module>rule-engines/easy-rules</module> --><!-- Wrong Parent -->
<!-- <module>rule-engines/openl-tablets</module> --><!-- Wrong Parent --> <!-- <module>rule-engines/openl-tablets</module> --><!-- Wrong Parent -->
@ -686,9 +676,6 @@
<module>spring-boot-custom-starter/greeter</module> <module>spring-boot-custom-starter/greeter</module>
<module>persistence-modules/spring-boot-h2/spring-boot-h2-database</module> <module>persistence-modules/spring-boot-h2/spring-boot-h2-database</module>
<!--module>persistence-modules/spring-boot-h2/spring-boot-h2-remote-app</module--> <!--module>persistence-modules/spring-boot-h2/spring-boot-h2-remote-app</module-->
<!-- <module>guest\webservices\rest-client</module> --><!-- guest post on different site -->
<!-- <module>guest\webservices\rest-server</module> --><!-- guest post on different site -->
<!-- <module>guest\webservices\spring-rest-service</module> --><!-- guest post on different site -->
<module>flyway-cdi-extension</module> <module>flyway-cdi-extension</module>
<module>spring-security-acl</module> <module>spring-security-acl</module>
@ -710,7 +697,7 @@
<module>spring-security-mvc-session</module> <module>spring-security-mvc-session</module>
<module>spring-security-mvc-socket</module> <module>spring-security-mvc-socket</module>
<module>spring-security-openid</module> <module>spring-security-openid</module>
<!--<module>spring-security-react</module> --> <module>spring-security-react</module>
<module>spring-security-rest-basic-auth</module> <module>spring-security-rest-basic-auth</module>
<module>spring-security-rest-custom</module> <module>spring-security-rest-custom</module>
<module>spring-security-rest</module> <module>spring-security-rest</module>
@ -733,8 +720,8 @@
<module>spring-rest-angular</module> <module>spring-rest-angular</module>
<module>spring-rest-full</module> <module>spring-rest-full</module>
<module>spring-rest-query-language</module> <module>spring-rest-query-language</module>
<!-- <module>spring-rest</module> --> <!-- temporarily disabled --> <module>spring-rest</module>
<!-- <module>spring-rest-simple</module> --> <!-- temporarily disabled --> <module>spring-rest-simple</module>
<module>spring-resttemplate</module> <module>spring-resttemplate</module>
<module>helidon</module> <module>helidon</module>
</modules> </modules>
@ -1029,7 +1016,7 @@
<module>jackson</module> <module>jackson</module>
<module>java-strings</module> <module>java-strings</module>
<!--<module>core-java-9</module> --> <!-- Commented because we have still not upgraded to java 9 --> <!--<module>core-java-9</module> --> <!-- we haven't upgraded to java 9 -->
<module>java-collections-conversions</module> <module>java-collections-conversions</module>
<module>java-collections-maps</module> <module>java-collections-maps</module>
<module>java-streams</module> <module>java-streams</module>
@ -1113,9 +1100,10 @@
<module>rabbitmq</module> <module>rabbitmq</module>
<!-- very long running - temporarily disabled --> <!-- very long running - temporarily disabled -->
<!-- <!--
-->
<module>persistence-modules/spring-data-mongodb</module> <module>persistence-modules/spring-data-mongodb</module>
-->
</modules> </modules>
@ -1713,6 +1701,7 @@
<module>persistence-modules/spring-data-elasticsearch</module> <module>persistence-modules/spring-data-elasticsearch</module>
<module>core-java-concurrency</module> <module>core-java-concurrency</module>
<module>core-java-concurrency-collections</module> <module>core-java-concurrency-collections</module>
<module>restx</module>
</modules> </modules>
</profile> </profile>

View File

@ -0,0 +1,12 @@
{
"//": "lines with // keys are just comments (we don't have real comments in json)",
"//": "this file stores password passed through md5+bcrypt hash",
"//": "you can use `restx hash md5+bcrypt {password}` shell command to get hashed passwords to put here",
"//": "to help startup with restx, there are comments with clear text passwords,",
"//": "which should obviously not be stored here.",
"user1": "$2a$10$iZluFUJShbjb1ue68bLrDuGCeJL9EMLHelVIf8u0SUbCseDOvKnoe",
"//": "user 1 password is 'user1-pwd'",
"user2": "$2a$10$oym3SYMFXf/9gGfDKKHO4eM1vWNqAZMsRZCL.BORCaP4yp5cdiCXu",
"//": "user 2 password is 'user2-pwd'"
}

4
restx/data/users.json Normal file
View File

@ -0,0 +1,4 @@
[
{"name":"user1", "roles": ["hello"]},
{"name":"user2", "roles": []}
]

38
restx/md.restx.json Normal file
View File

@ -0,0 +1,38 @@
{
"module": "restx-demo:restx-demo:0.1-SNAPSHOT",
"packaging": "war",
"properties": {
"java.version": "1.8",
"restx.version": "0.35-rc4"
},
"fragments": {
"maven": [
"classpath:///restx/build/fragments/maven/javadoc-apidoclet.xml" ]
},
"dependencies": {
"compile": [
"io.restx:restx-core:${restx.version}",
"io.restx:restx-security-basic:${restx.version}",
"io.restx:restx-core-annotation-processor:${restx.version}",
"io.restx:restx-factory:${restx.version}",
"io.restx:restx-factory-admin:${restx.version}",
"io.restx:restx-validation:${restx.version}",
"io.restx:restx-monitor-codahale:${restx.version}",
"io.restx:restx-monitor-admin:${restx.version}",
"io.restx:restx-log-admin:${restx.version}",
"io.restx:restx-i18n-admin:${restx.version}",
"io.restx:restx-stats-admin:${restx.version}",
"io.restx:restx-servlet:${restx.version}",
"io.restx:restx-server-jetty8:${restx.version}!optional",
"io.restx:restx-apidocs:${restx.version}",
"io.restx:restx-specs-admin:${restx.version}",
"io.restx:restx-admin:${restx.version}",
"ch.qos.logback:logback-classic:1.0.13"
],
"test": [
"io.restx:restx-specs-tests:${restx.version}",
"junit:junit:4.11"
]
}
}

155
restx/pom.xml Normal file
View File

@ -0,0 +1,155 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>restx</artifactId>
<version>0.1-SNAPSHOT</version>
<packaging>war</packaging>
<name>restx-demo</name>
<properties>
<maven.compiler.target>1.8</maven.compiler.target>
<maven.compiler.source>1.8</maven.compiler.source>
<restx.version>0.35-rc4</restx.version>
</properties>
<parent>
<groupId>com.baeldung</groupId>
<artifactId>parent-modules</artifactId>
<version>1.0.0-SNAPSHOT</version>
</parent>
<dependencies>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-core</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-security-basic</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-core-annotation-processor</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-factory</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-factory-admin</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-validation</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-monitor-codahale</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-monitor-admin</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-log-admin</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-i18n-admin</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-stats-admin</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-servlet</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-server-jetty8</artifactId>
<version>${restx.version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-apidocs</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-specs-admin</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-admin</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.0.13</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-specs-tests</artifactId>
<version>${restx.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<executions>
<execution>
<id>attach-docs</id>
<!--
we generate javadoc before packaging the jar to let a chance to apidocs doclet
to generate comments dictionary to be packaged inside the jar as a resource
-->
<phase>prepare-package</phase>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
<configuration>
<source>${maven.compiler.source}</source>
<doclet>restx.apidocs.doclet.ApidocsDoclet</doclet>
<docletArtifact>
<groupId>io.restx</groupId>
<artifactId>restx-apidocs-doclet</artifactId>
<version>${restx.version}</version>
</docletArtifact>
<additionalparam>-restx-target-dir ${project.basedir}/target/classes</additionalparam>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,74 @@
package restx.demo;
import restx.config.ConfigLoader;
import restx.config.ConfigSupplier;
import restx.factory.Provides;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Charsets;
import com.google.common.collect.ImmutableSet;
import restx.security.*;
import restx.factory.Module;
import restx.factory.Provides;
import javax.inject.Named;
import java.nio.file.Paths;
@Module
public class AppModule {
@Provides
public SignatureKey signatureKey() {
return new SignatureKey("restx-demo -447494532235718370 restx-demo 801c9eaf-4116-48f2-906b-e979fba72757".getBytes(Charsets.UTF_8));
}
@Provides
@Named("restx.admin.password")
public String restxAdminPassword() {
return "4780";
}
@Provides
public ConfigSupplier appConfigSupplier(ConfigLoader configLoader) {
// Load settings.properties in restx.demo package as a set of config entries
return configLoader.fromResource("restx/demo/settings");
}
@Provides
public CredentialsStrategy credentialsStrategy() {
return new BCryptCredentialsStrategy();
}
@Provides
public BasicPrincipalAuthenticator basicPrincipalAuthenticator(
SecuritySettings securitySettings, CredentialsStrategy credentialsStrategy,
@Named("restx.admin.passwordHash") String defaultAdminPasswordHash, ObjectMapper mapper) {
return new StdBasicPrincipalAuthenticator(new StdUserService<>(
// use file based users repository.
// Developer's note: prefer another storage mechanism for your users if you need real user management
// and better perf
new FileBasedUserRepository<>(
StdUser.class, // this is the class for the User objects, that you can get in your app code
// with RestxSession.current().getPrincipal().get()
// it can be a custom user class, it just need to be json deserializable
mapper,
// this is the default restx admin, useful to access the restx admin console.
// if one user with restx-admin role is defined in the repository, this default user won't be
// available anymore
new StdUser("admin", ImmutableSet.<String>of("*")),
// the path where users are stored
Paths.get("data/users.json"),
// the path where credentials are stored. isolating both is a good practice in terms of security
// it is strongly recommended to follow this approach even if you use your own repository
Paths.get("data/credentials.json"),
// tells that we want to reload the files dynamically if they are touched.
// this has a performance impact, if you know your users / credentials never change without a
// restart you can disable this to get better perfs
true),
credentialsStrategy, defaultAdminPasswordHash),
securitySettings);
}
}

View File

@ -0,0 +1,32 @@
package restx.demo;
import com.google.common.base.Optional;
import restx.server.WebServer;
import restx.server.Jetty8WebServer;
/**
* This class can be used to run the app.
*
* Alternatively, you can deploy the app as a war in a regular container like tomcat or jetty.
*
* Reading the port from system env PORT makes it compatible with heroku.
*/
public class AppServer {
public static final String WEB_INF_LOCATION = "src/main/webapp/WEB-INF/web.xml";
public static final String WEB_APP_LOCATION = "src/main/webapp";
public static void main(String[] args) throws Exception {
int port = Integer.valueOf(Optional.fromNullable(System.getenv("PORT")).or("8080"));
WebServer server = new Jetty8WebServer(WEB_INF_LOCATION, WEB_APP_LOCATION, port, "0.0.0.0");
/*
* load mode from system property if defined, or default to dev
* be careful with that setting, if you use this class to launch your server in production, make sure to launch
* it with -Drestx.mode=prod or change the default here
*/
System.setProperty("restx.mode", System.getProperty("restx.mode", "dev"));
System.setProperty("restx.app.package", "restx.demo");
server.startAndAwait();
}
}

View File

@ -0,0 +1,10 @@
package restx.demo;
/**
* A list of roles for the application.
*
* We don't use an enum here because it must be used inside an annotation.
*/
public final class Roles {
public static final String HELLO_ROLE = "hello";
}

View File

@ -0,0 +1,21 @@
package restx.demo.domain;
public class Message {
private String message;
public String getMessage() {
return message;
}
public Message setMessage(final String message) {
this.message = message;
return this;
}
@Override
public String toString() {
return "Message{" +
"message='" + message + '\'' +
'}';
}
}

View File

@ -0,0 +1,62 @@
package restx.demo.rest;
import restx.demo.domain.Message;
import restx.demo.Roles;
import org.joda.time.DateTime;
import restx.annotations.GET;
import restx.annotations.POST;
import restx.annotations.RestxResource;
import restx.factory.Component;
import restx.security.PermitAll;
import restx.security.RolesAllowed;
import restx.security.RestxSession;
import javax.validation.constraints.NotNull;
@Component @RestxResource
public class HelloResource {
/**
* Say hello to currently logged in user.
*
* Authorized only for principals with Roles.HELLO_ROLE role.
*
* @return a Message to say hello
*/
@GET("/message")
@RolesAllowed(Roles.HELLO_ROLE)
public Message sayHello() {
return new Message().setMessage(String.format(
"hello %s, it's %s",
RestxSession.current().getPrincipal().get().getName(),
DateTime.now().toString("HH:mm:ss")));
}
/**
* Say hello to anybody.
*
* Does not require authentication.
*
* @return a Message to say hello
*/
@GET("/hello")
@PermitAll
public Message helloPublic(String who) {
return new Message().setMessage(String.format(
"hello %s, it's %s",
who, DateTime.now().toString("HH:mm:ss")));
}
public static class MyPOJO {
@NotNull
String value;
public String getValue(){ return value; }
public void setValue(String value){ this.value = value; }
}
@POST("/mypojo")
@PermitAll
public MyPOJO helloPojo(MyPOJO pojo){
pojo.setValue("hello "+pojo.getValue());
return pojo;
}
}

View File

@ -0,0 +1,94 @@
<configuration>
<contextListener class="ch.qos.logback.classic.jul.LevelChangePropagator">
<resetJUL>true</resetJUL>
</contextListener>
<property name="LOGS_FOLDER" value="${logs.base:-logs}" />
<appender name="errorFile" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${LOGS_FOLDER}/errors.log</File>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>ERROR</level>
</filter>
<encoder>
<pattern>%d [%-16thread] [%-10X{principal}] %-5level %logger{36} - %msg%n</pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOGS_FOLDER}/errors.%d.log</fileNamePattern>
<maxHistory>30</maxHistory>
</rollingPolicy>
</appender>
<if condition='p("restx.mode").equals("prod")'>
<then>
<!-- production mode -->
<appender name="appLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${LOGS_FOLDER}/app.log</File>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>INFO</level>
</filter>
<encoder>
<pattern>%d [%-16thread] [%-10X{principal}] %-5level %logger{36} - %msg%n</pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOGS_FOLDER}/app.%d.log</fileNamePattern>
<maxHistory>10</maxHistory>
</rollingPolicy>
</appender>
<appender name="debugFile" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${LOGS_FOLDER}/debug.log</File>
<encoder>
<pattern>%d [%-16thread] [%-10X{principal}] %-5level %logger{36} - %msg%n</pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
<fileNamePattern>${LOGS_FOLDER}/debug.%i.log.zip</fileNamePattern>
<minIndex>1</minIndex>
<maxIndex>3</maxIndex>
</rollingPolicy>
<triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
<maxFileSize>50MB</maxFileSize>
</triggeringPolicy>
</appender>
<root level="INFO">
<appender-ref ref="debugFile" />
<appender-ref ref="appLog" />
</root>
</then>
<else>
<!-- not production mode -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d [%-16thread] [%-10X{principal}] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<appender name="appLog" class="ch.qos.logback.core.FileAppender">
<File>${LOGS_FOLDER}/app.log</File>
<encoder>
<pattern>%d [%-16thread] [%-10X{principal}] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<root level="INFO">
<appender-ref ref="STDOUT" />
<appender-ref ref="appLog" />
</root>
</else>
</if>
<!-- clean up container logs -->
<logger name="org.eclipse.jetty.server.AbstractConnector" level="WARN" />
<logger name="org.eclipse.jetty.server.handler.ContextHandler" level="WARN" />
<logger name="org.eclipse.jetty.webapp.StandardDescriptorProcessor" level="WARN" />
<logger name="org.hibernate.validator.internal.engine.ConfigurationImpl" level="WARN" />
<logger name="org.reflections.Reflections" level="WARN" />
<logger name="restx.factory.Factory" level="WARN" />
<!-- app logs - set DEBUG level, in prod it will go to a dedicated file -->
<logger name="restx.demo" level="DEBUG" />
<root level="INFO">
<appender-ref ref="errorFile" />
</root>
</configuration>

View File

@ -0,0 +1 @@
app.name=restx-demo

View File

@ -0,0 +1,15 @@
<web-app xmlns="http://java.sun.com/xml/ns/javaee"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd"
version="3.0" metadata-complete="true">
<servlet>
<servlet-name>restx</servlet-name>
<servlet-class>restx.servlet.RestxMainRouterServlet</servlet-class>
<load-on-startup>1</load-on-startup>
</servlet>
<servlet-mapping>
<servlet-name>restx</servlet-name>
<url-pattern>/api/*</url-pattern>
</servlet-mapping>
</web-app>

View File

@ -0,0 +1,23 @@
package restx.demo.rest;
import org.junit.runner.RunWith;
import restx.tests.FindSpecsIn;
import restx.tests.RestxSpecTestsRunner;
@RunWith(RestxSpecTestsRunner.class)
@FindSpecsIn("specs/hello")
public class HelloResourceSpecUnitTest {
/**
* Useless, thanks to both @RunWith(RestxSpecTestsRunner.class) & @FindSpecsIn()
*
* @Rule
* public RestxSpecRule rule = new RestxSpecRule();
*
* @Test
* public void test_spec() throws Exception {
* rule.runTest(specTestPath);
* }
*/
}

View File

@ -0,0 +1,10 @@
title: should admin say hello
given:
- time: 2013-08-28T01:18:00.822+02:00
- uuids: [ "e2b4430f-9541-4602-9a3a-413d17c56a6b" ]
wts:
- when: |
GET message
$RestxSession: {"_expires":"2013-09-27T01:18:00.822+02:00","principal":"admin","sessionKey":"e2b4430f-9541-4602-9a3a-413d17c56a6b"}
then: |
{"message":"hello admin, it's 01:18:00"}

View File

@ -0,0 +1,8 @@
title: should admin say hello
given:
- time: 2013-08-28T01:18:00.822+02:00
wts:
- when: |
GET hello?who=xavier
then: |
{"message":"hello xavier, it's 01:18:00"}

View File

@ -0,0 +1,17 @@
title: should missing post value triggers a validation error
given:
- time: 2013-08-28T01:18:00.822+02:00
- uuids: [ "e2b4430f-9541-4602-9a3a-413d17c56a6b" ]
wts:
- when: |
POST mypojo
$RestxSession: {"_expires":"2013-09-27T01:18:00.822+02:00","principal":"user1","sessionKey":"e2b4430f-9541-4602-9a3a-413d17c56a6b"}
{}
then: |
400
- when: |
POST mypojo
$RestxSession: {"_expires":"2013-09-27T01:18:00.822+02:00","principal":"user1","sessionKey":"e2b4430f-9541-4602-9a3a-413d17c56a6b"}
{"value":"world"}
then: |
{"value":"hello world"}

View File

@ -0,0 +1,10 @@
title: should user1 say hello
given:
- time: 2013-08-28T01:18:00.822+02:00
- uuids: [ "e2b4430f-9541-4602-9a3a-413d17c56a6b" ]
wts:
- when: |
GET message
$RestxSession: {"_expires":"2013-09-27T01:18:00.822+02:00","principal":"user1","sessionKey":"e2b4430f-9541-4602-9a3a-413d17c56a6b"}
then: |
{"message":"hello user1, it's 01:18:00"}

View File

@ -0,0 +1,10 @@
title: should user2 not say hello
given:
- time: 2013-08-28T01:19:44.770+02:00
- uuids: [ "56f71fcc-42d3-422f-9458-8ad37fc4a0b5" ]
wts:
- when: |
GET message
$RestxSession: {"_expires":"2013-09-27T01:19:44.770+02:00","principal":"user2","sessionKey":"56f71fcc-42d3-422f-9458-8ad37fc4a0b5"}
then: |
403

View File

@ -0,0 +1,33 @@
package com.baeldung.debugging.consumer;
import java.util.Collections;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.mongo.MongoReactiveAutoConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.security.config.web.server.ServerHttpSecurity;
import org.springframework.security.web.server.SecurityWebFilterChain;
import reactor.core.publisher.Hooks;
@SpringBootApplication(exclude = MongoReactiveAutoConfiguration.class)
@EnableScheduling
public class ConsumerSSEApplication {
public static void main(String[] args) {
Hooks.onOperatorDebug();
SpringApplication app = new SpringApplication(ConsumerSSEApplication.class);
app.setDefaultProperties(Collections.singletonMap("server.port", "8082"));
app.run(args);
}
@Bean
public SecurityWebFilterChain springSecurityFilterChain(ServerHttpSecurity http) {
http.authorizeExchange()
.anyExchange()
.permitAll();
return http.build();
}
}

View File

@ -0,0 +1,122 @@
package com.baeldung.debugging.consumer.chronjobs;
import java.time.Duration;
import java.util.concurrent.ThreadLocalRandom;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import org.springframework.web.reactive.function.client.WebClient;
import com.baeldung.debugging.consumer.model.Foo;
import com.baeldung.debugging.consumer.model.FooDto;
import com.baeldung.debugging.consumer.service.FooService;
import reactor.core.publisher.Flux;
@Component
public class ChronJobs {
private static Logger logger = LoggerFactory.getLogger(ChronJobs.class);
private WebClient client = WebClient.create("http://localhost:8081");
@Autowired
private FooService service;
@Scheduled(fixedRate = 10000)
public void consumeInfiniteFlux() {
Flux<Foo> fluxFoo = client.get()
.uri("/functional-reactive/periodic-foo")
.accept(MediaType.TEXT_EVENT_STREAM)
.retrieve()
.bodyToFlux(FooDto.class)
.delayElements(Duration.ofMillis(100))
.map(dto -> {
logger.debug("process 1 with dto id {} name{}", dto.getId(), dto.getName());
return new Foo(dto);
});
Integer random = ThreadLocalRandom.current()
.nextInt(0, 3);
switch (random) {
case 0:
logger.info("process 1 with approach 1");
service.processFoo(fluxFoo);
break;
case 1:
logger.info("process 1 with approach 1 EH");
service.processUsingApproachOneWithErrorHandling(fluxFoo);
break;
default:
logger.info("process 1 with approach 2");
service.processFooInAnotherScenario(fluxFoo);
break;
}
}
@Scheduled(fixedRate = 20000)
public void consumeFiniteFlux2() {
Flux<Foo> fluxFoo = client.get()
.uri("/functional-reactive/periodic-foo-2")
.accept(MediaType.TEXT_EVENT_STREAM)
.retrieve()
.bodyToFlux(FooDto.class)
.delayElements(Duration.ofMillis(100))
.map(dto -> {
logger.debug("process 2 with dto id {} name{}", dto.getId(), dto.getName());
return new Foo(dto);
});
Integer random = ThreadLocalRandom.current()
.nextInt(0, 3);
switch (random) {
case 0:
logger.info("process 2 with approach 1");
service.processFoo(fluxFoo);
break;
case 1:
logger.info("process 2 with approach 1 EH");
service.processUsingApproachOneWithErrorHandling(fluxFoo);
break;
default:
logger.info("process 2 with approach 2");
service.processFooInAnotherScenario(fluxFoo);
break;
}
}
@Scheduled(fixedRate = 20000)
public void consumeFiniteFlux3() {
Flux<Foo> fluxFoo = client.get()
.uri("/functional-reactive/periodic-foo-2")
.accept(MediaType.TEXT_EVENT_STREAM)
.retrieve()
.bodyToFlux(FooDto.class)
.delayElements(Duration.ofMillis(100))
.map(dto -> {
logger.debug("process 3 with dto id {} name{}", dto.getId(), dto.getName());
return new Foo(dto);
});
logger.info("process 3 with approach 3");
service.processUsingApproachThree(fluxFoo);
}
@Scheduled(fixedRate = 20000)
public void consumeFiniteFluxWithCheckpoint4() {
Flux<Foo> fluxFoo = client.get()
.uri("/functional-reactive/periodic-foo-2")
.accept(MediaType.TEXT_EVENT_STREAM)
.retrieve()
.bodyToFlux(FooDto.class)
.delayElements(Duration.ofMillis(100))
.map(dto -> {
logger.debug("process 4 with dto id {} name{}", dto.getId(), dto.getName());
return new Foo(dto);
});
logger.info("process 4 with approach 4");
service.processUsingApproachFourWithCheckpoint(fluxFoo);
}
}

View File

@ -0,0 +1,23 @@
package com.baeldung.debugging.consumer.controllers;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Hooks;
@RestController
public class ReactiveConfigsToggleRestController {
@GetMapping("/debug-hook-on")
public String setReactiveDebugOn() {
Hooks.onOperatorDebug();
return "DEBUG HOOK ON";
}
@GetMapping("/debug-hook-off")
public String setReactiveDebugOff() {
Hooks.resetOnOperatorDebug();
return "DEBUG HOOK OFF";
}
}

View File

@ -0,0 +1,26 @@
package com.baeldung.debugging.consumer.model;
import java.util.concurrent.ThreadLocalRandom;
import org.springframework.data.annotation.Id;
import lombok.AllArgsConstructor;
import lombok.Data;
@Data
@AllArgsConstructor
public class Foo {
@Id
private Integer id;
private String formattedName;
private Integer quantity;
public Foo(FooDto dto) {
this.id = (ThreadLocalRandom.current()
.nextInt(0, 100) == 0) ? null : dto.getId();
this.formattedName = dto.getName();
this.quantity = ThreadLocalRandom.current()
.nextInt(0, 10);
}
}

View File

@ -0,0 +1,12 @@
package com.baeldung.debugging.consumer.model;
import lombok.AllArgsConstructor;
import lombok.Data;
@Data
@AllArgsConstructor
public class FooDto {
private Integer id;
private String name;
}

View File

@ -0,0 +1,45 @@
package com.baeldung.debugging.consumer.service;
import java.util.concurrent.ThreadLocalRandom;
import com.baeldung.debugging.consumer.model.Foo;
import reactor.core.publisher.Flux;
public class FooNameHelper {
public static Flux<Foo> concatAndSubstringFooName(Flux<Foo> flux) {
flux = concatFooName(flux);
flux = substringFooName(flux);
return flux;
}
public static Flux<Foo> concatFooName(Flux<Foo> flux) {
flux = flux.map(foo -> {
String processedName = null;
Integer random = ThreadLocalRandom.current()
.nextInt(0, 80);
processedName = (random != 0) ? foo.getFormattedName() : foo.getFormattedName() + "-bael";
foo.setFormattedName(processedName);
return foo;
});
return flux;
}
public static Flux<Foo> substringFooName(Flux<Foo> flux) {
return flux.map(foo -> {
String processedName;
Integer random = ThreadLocalRandom.current()
.nextInt(0, 100);
processedName = (random == 0) ? foo.getFormattedName()
.substring(10, 15)
: foo.getFormattedName()
.substring(0, 5);
foo.setFormattedName(processedName);
return foo;
});
}
}

View File

@ -0,0 +1,31 @@
package com.baeldung.debugging.consumer.service;
import java.util.concurrent.ThreadLocalRandom;
import com.baeldung.debugging.consumer.model.Foo;
import reactor.core.publisher.Flux;
public class FooQuantityHelper {
public static Flux<Foo> processFooReducingQuantity(Flux<Foo> flux) {
flux = flux.map(foo -> {
Integer result;
Integer random = ThreadLocalRandom.current()
.nextInt(0, 90);
result = (random == 0) ? result = 0 : foo.getQuantity() + 2;
foo.setQuantity(result);
return foo;
});
return divideFooQuantity(flux);
}
public static Flux<Foo> divideFooQuantity(Flux<Foo> flux) {
return flux.map(foo -> {
Integer result = Math.round(5 / foo.getQuantity());
foo.setQuantity(result);
return foo;
});
}
}

View File

@ -0,0 +1,26 @@
package com.baeldung.debugging.consumer.service;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.baeldung.debugging.consumer.model.Foo;
import reactor.core.publisher.Flux;
public class FooReporter {
private static Logger logger = LoggerFactory.getLogger(FooReporter.class);
public static Flux<Foo> reportResult(Flux<Foo> input, String approach) {
return input.map(foo -> {
if (foo.getId() == null)
throw new IllegalArgumentException("Null id is not valid!");
logger.info("Reporting for approach {}: Foo with id '{}' name '{}' and quantity '{}'", approach, foo.getId(), foo.getFormattedName(), foo.getQuantity());
return foo;
});
}
public static Flux<Foo> reportResult(Flux<Foo> input) {
return reportResult(input, "default");
}
}

View File

@ -0,0 +1,91 @@
package com.baeldung.debugging.consumer.service;
import static com.baeldung.debugging.consumer.service.FooNameHelper.concatAndSubstringFooName;
import static com.baeldung.debugging.consumer.service.FooNameHelper.substringFooName;
import static com.baeldung.debugging.consumer.service.FooQuantityHelper.divideFooQuantity;
import static com.baeldung.debugging.consumer.service.FooQuantityHelper.processFooReducingQuantity;
import static com.baeldung.debugging.consumer.service.FooReporter.reportResult;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.baeldung.debugging.consumer.model.Foo;
import reactor.core.publisher.Flux;
@Component
public class FooService {
private static Logger logger = LoggerFactory.getLogger(FooService.class);
public void processFoo(Flux<Foo> flux) {
flux = FooNameHelper.concatFooName(flux);
flux = FooNameHelper.substringFooName(flux);
flux = flux.log();
flux = FooReporter.reportResult(flux);
flux = flux.doOnError(error -> {
logger.error("The following error happened on processFoo method!", error);
});
flux.subscribe();
}
public void processFooInAnotherScenario(Flux<Foo> flux) {
flux = FooNameHelper.substringFooName(flux);
flux = FooQuantityHelper.divideFooQuantity(flux);
flux.subscribe();
}
public void processUsingApproachOneWithErrorHandling(Flux<Foo> flux) {
logger.info("starting approach one w error handling!");
flux = concatAndSubstringFooName(flux);
flux = concatAndSubstringFooName(flux);
flux = substringFooName(flux);
flux = processFooReducingQuantity(flux);
flux = processFooReducingQuantity(flux);
flux = processFooReducingQuantity(flux);
flux = reportResult(flux, "ONE w/ EH");
flux = flux.doOnError(error -> {
logger.error("Approach 1 with Error Handling failed!", error);
});
flux.subscribe();
}
public void processUsingApproachThree(Flux<Foo> flux) {
logger.info("starting approach three!");
flux = concatAndSubstringFooName(flux);
flux = reportResult(flux, "THREE");
flux = flux.doOnError(error -> {
logger.error("Approach 3 failed!", error);
});
flux.subscribe();
}
public void processUsingApproachFourWithCheckpoint(Flux<Foo> flux) {
logger.info("starting approach four!");
flux = concatAndSubstringFooName(flux);
flux = flux.checkpoint("CHECKPOINT 1");
flux = concatAndSubstringFooName(flux);
flux = divideFooQuantity(flux);
flux = flux.checkpoint("CHECKPOINT 2", true);
flux = reportResult(flux, "FOUR");
flux = concatAndSubstringFooName(flux).doOnError(error -> {
logger.error("Approach 4 failed!", error);
});
flux.subscribe();
}
public void processUsingApproachFourWithInitialCheckpoint(Flux<Foo> flux) {
logger.info("starting approach four!");
flux = concatAndSubstringFooName(flux);
flux = flux.checkpoint("CHECKPOINT 1", true);
flux = concatAndSubstringFooName(flux);
flux = divideFooQuantity(flux);
flux = reportResult(flux, "FOUR");
flux = flux.doOnError(error -> {
logger.error("Approach 4-2 failed!", error);
});
flux.subscribe();
}
}

View File

@ -0,0 +1,29 @@
package com.baeldung.debugging.server;
import java.util.Collections;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.security.config.web.server.ServerHttpSecurity;
import org.springframework.security.web.server.SecurityWebFilterChain;
import org.springframework.web.reactive.config.EnableWebFlux;
@EnableWebFlux
@SpringBootApplication
public class ServerSSEApplication {
public static void main(String[] args) {
SpringApplication app = new SpringApplication(ServerSSEApplication.class);
app.setDefaultProperties(Collections.singletonMap("server.port", "8081"));
app.run(args);
}
@Bean
public SecurityWebFilterChain springSecurityFilterChain(ServerHttpSecurity http) {
http.authorizeExchange()
.anyExchange()
.permitAll();
return http.build();
}
}

View File

@ -0,0 +1,47 @@
package com.baeldung.debugging.server.handlers;
import java.time.Duration;
import java.util.concurrent.ThreadLocalRandom;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Component;
import org.springframework.web.reactive.function.server.ServerRequest;
import org.springframework.web.reactive.function.server.ServerResponse;
import com.baeldung.debugging.server.model.Foo;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
@Component
public class ServerHandler {
private static Logger logger = LoggerFactory.getLogger(ServerHandler.class);
public Mono<ServerResponse> useHandler(final ServerRequest request) {
// there are chances that something goes wrong here...
return ServerResponse.ok()
.contentType(MediaType.TEXT_EVENT_STREAM)
.body(Flux.interval(Duration.ofSeconds(1))
.map(sequence -> {
logger.info("retrieving Foo. Sequence: {}", sequence);
if (ThreadLocalRandom.current()
.nextInt(0, 50) == 1) {
throw new RuntimeException("There was an error retrieving the Foo!");
}
return new Foo(sequence, "name" + sequence);
}), Foo.class);
}
public Mono<ServerResponse> useHandlerFinite(final ServerRequest request) {
return ServerResponse.ok()
.contentType(MediaType.TEXT_EVENT_STREAM)
.body(Flux.range(0, 50)
.map(sequence -> {
return new Foo(new Long(sequence), "theFooNameNumber" + sequence);
}), Foo.class);
}
}

View File

@ -0,0 +1,16 @@
package com.baeldung.debugging.server.model;
import org.springframework.data.annotation.Id;
import lombok.AllArgsConstructor;
import lombok.Data;
@Data
@AllArgsConstructor
public class Foo {
@Id
private Long id;
private String name;
}

View File

@ -0,0 +1,22 @@
package com.baeldung.debugging.server.routers;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.reactive.function.server.RequestPredicates;
import org.springframework.web.reactive.function.server.RouterFunction;
import org.springframework.web.reactive.function.server.RouterFunctions;
import org.springframework.web.reactive.function.server.ServerResponse;
import com.baeldung.debugging.server.handlers.ServerHandler;
@Configuration
public class ServerRouter {
@Bean
public RouterFunction<ServerResponse> responseRoute(@Autowired ServerHandler handler) {
return RouterFunctions.route(RequestPredicates.GET("/functional-reactive/periodic-foo"), handler::useHandler)
.andRoute(RequestPredicates.GET("/functional-reactive/periodic-foo-2"), handler::useHandlerFinite);
}
}

View File

@ -1,4 +1,4 @@
package com.baeldung.webFluxLogging; package com.baeldung.webflux.logging;
import reactor.core.publisher.Flux; import reactor.core.publisher.Flux;

View File

@ -1,2 +1 @@
logging.level.root=INFO logging.level.root=INFO

View File

@ -0,0 +1,65 @@
package com.baeldung.debugging.consumer;
import static org.assertj.core.api.Assertions.assertThat;
import java.util.Arrays;
import java.util.Collection;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import com.baeldung.debugging.consumer.model.Foo;
import com.baeldung.debugging.consumer.service.FooService;
import com.baeldung.debugging.consumer.utils.ListAppender;
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.classic.spi.IThrowableProxy;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Hooks;
public class ConsumerFooServiceIntegrationTest {
FooService service = new FooService();
@BeforeEach
public void clearLogList() {
Hooks.onOperatorDebug();
ListAppender.clearEventList();
}
@Test
public void givenFooWithNullId_whenProcessFoo_thenLogsWithDebugTrace() {
Foo one = new Foo(1, "nameverylong", 8);
Foo two = new Foo(null, "nameverylong", 4);
Flux<Foo> flux = Flux.just(one, two);
service.processFoo(flux);
Collection<String> allLoggedEntries = ListAppender.getEvents()
.stream()
.map(ILoggingEvent::getFormattedMessage)
.collect(Collectors.toList());
Collection<String> allSuppressedEntries = ListAppender.getEvents()
.stream()
.map(ILoggingEvent::getThrowableProxy)
.flatMap(t -> {
return Optional.ofNullable(t)
.map(IThrowableProxy::getSuppressed)
.map(Arrays::stream)
.orElse(Stream.empty());
})
.map(IThrowableProxy::getMessage)
.collect(Collectors.toList());
assertThat(allLoggedEntries).anyMatch(entry -> entry.contains("The following error happened on processFoo method!"))
.anyMatch(entry -> entry.contains("| onSubscribe"))
.anyMatch(entry -> entry.contains("| cancel()"));
assertThat(allSuppressedEntries).anyMatch(entry -> entry.contains("Assembly trace from producer"))
.anyMatch(entry -> entry.contains("Error has been observed by the following operator(s)"));
}
}

View File

@ -0,0 +1,49 @@
package com.baeldung.debugging.consumer;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.springframework.test.web.reactive.server.WebTestClient;
import org.springframework.test.web.reactive.server.WebTestClient.ResponseSpec;
import com.baeldung.debugging.consumer.service.FooService;
public class ConsumerFooServiceLiveTest {
FooService service = new FooService();
private static final String BASE_URL = "http://localhost:8082";
private static final String DEBUG_HOOK_ON = BASE_URL + "/debug-hook-on";
private static final String DEBUG_HOOK_OFF = BASE_URL + "/debug-hook-off";
private static WebTestClient client;
@BeforeAll
public static void setup() {
client = WebTestClient.bindToServer()
.baseUrl(BASE_URL)
.build();
}
@Test
public void whenRequestingDebugHookOn_thenObtainExpectedMessage() {
ResponseSpec response = client.get()
.uri(DEBUG_HOOK_ON)
.exchange();
response.expectStatus()
.isOk()
.expectBody(String.class)
.isEqualTo("DEBUG HOOK ON");
}
@Test
public void whenRequestingDebugHookOff_thenObtainExpectedMessage() {
ResponseSpec response = client.get()
.uri(DEBUG_HOOK_OFF)
.exchange();
response.expectStatus()
.isOk()
.expectBody(String.class)
.isEqualTo("DEBUG HOOK OFF");
}
}

View File

@ -0,0 +1,25 @@
package com.baeldung.debugging.consumer.utils;
import java.util.ArrayList;
import java.util.List;
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.core.AppenderBase;
public class ListAppender extends AppenderBase<ILoggingEvent> {
static private List<ILoggingEvent> events = new ArrayList<>();
@Override
protected void append(ILoggingEvent eventObject) {
events.add(eventObject);
}
public static List<ILoggingEvent> getEvents() {
return events;
}
public static void clearEventList() {
events.clear();
}
}

View File

@ -0,0 +1,16 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<include
resource="org/springframework/boot/logging/logback/base.xml" />
<appender name="LISTAPPENDER"
class="com.baeldung.debugging.consumer.utils.ListAppender">
</appender>
<logger
name="com.baeldung.debugging.consumer.service.FooService">
<appender-ref ref="LISTAPPENDER" />
</logger>
<root level="info">
<appender-ref ref="CONSOLE" />
<appender-ref ref="LISTAPPENDER" />
</root>
</configuration>

View File

@ -31,7 +31,7 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.thymeleaf.extras</groupId> <groupId>org.thymeleaf.extras</groupId>
<artifactId>thymeleaf-extras-springsecurity4</artifactId> <artifactId>thymeleaf-extras-springsecurity5</artifactId>
</dependency> </dependency>
<!-- oauth2 --> <!-- oauth2 -->
@ -66,7 +66,8 @@
</dependencies> </dependencies>
<properties> <properties>
<oauth-auto.version>2.0.1.RELEASE</oauth-auto.version> <spring-boot.version>2.1.0.RELEASE</spring-boot.version>
<oauth-auto.version>2.1.0.RELEASE</oauth-auto.version>
<start-class>com.baeldung.oauth2.SpringOAuthApplication</start-class> <start-class>com.baeldung.oauth2.SpringOAuthApplication</start-class>
</properties> </properties>

View File

@ -0,0 +1,50 @@
package com.baeldung.oauth2;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import org.springframework.security.oauth2.client.registration.ClientRegistrationRepository;
import org.springframework.security.oauth2.client.web.DefaultOAuth2AuthorizationRequestResolver;
import org.springframework.security.oauth2.client.web.OAuth2AuthorizationRequestResolver;
import org.springframework.security.oauth2.core.endpoint.OAuth2AuthorizationRequest;
public class CustomAuthorizationRequestResolver implements OAuth2AuthorizationRequestResolver {
private OAuth2AuthorizationRequestResolver defaultResolver;
public CustomAuthorizationRequestResolver(ClientRegistrationRepository repo, String authorizationRequestBaseUri){
defaultResolver = new DefaultOAuth2AuthorizationRequestResolver(repo, authorizationRequestBaseUri);
}
@Override
public OAuth2AuthorizationRequest resolve(HttpServletRequest request) {
OAuth2AuthorizationRequest req = defaultResolver.resolve(request);
if(req != null){
req = customizeAuthorizationRequest(req);
}
return req;
}
@Override
public OAuth2AuthorizationRequest resolve(HttpServletRequest request, String clientRegistrationId) {
OAuth2AuthorizationRequest req = defaultResolver.resolve(request, clientRegistrationId);
if(req != null){
req = customizeAuthorizationRequest(req);
}
return req;
}
private OAuth2AuthorizationRequest customizeAuthorizationRequest(OAuth2AuthorizationRequest req) {
Map<String,Object> extraParams = new HashMap<String,Object>();
extraParams.putAll(req.getAdditionalParameters()); //VIP note
extraParams.put("test", "extra");
System.out.println("here =====================");
return OAuth2AuthorizationRequest.from(req).additionalParameters(extraParams).build();
}
private OAuth2AuthorizationRequest customizeAuthorizationRequest1(OAuth2AuthorizationRequest req) {
return OAuth2AuthorizationRequest.from(req).state("xyz").build();
}
}

View File

@ -0,0 +1,26 @@
package com.baeldung.oauth2;
import org.springframework.core.convert.converter.Converter;
import org.springframework.http.RequestEntity;
import org.springframework.security.oauth2.client.endpoint.OAuth2AuthorizationCodeGrantRequest;
import org.springframework.security.oauth2.client.endpoint.OAuth2AuthorizationCodeGrantRequestEntityConverter;
import org.springframework.util.MultiValueMap;
public class CustomRequestEntityConverter implements Converter<OAuth2AuthorizationCodeGrantRequest, RequestEntity<?>> {
private OAuth2AuthorizationCodeGrantRequestEntityConverter defaultConverter;
public CustomRequestEntityConverter() {
defaultConverter = new OAuth2AuthorizationCodeGrantRequestEntityConverter();
}
@Override
public RequestEntity<?> convert(OAuth2AuthorizationCodeGrantRequest req) {
RequestEntity<?> entity = defaultConverter.convert(req);
MultiValueMap<String, String> params = (MultiValueMap<String,String>) entity.getBody();
params.add("test2", "extra2");
System.out.println(params.entrySet());
return new RequestEntity<>(params, entity.getHeaders(), entity.getMethod(), entity.getUrl());
}
}

View File

@ -0,0 +1,67 @@
package com.baeldung.oauth2;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.springframework.core.convert.converter.Converter;
import org.springframework.security.oauth2.core.OAuth2AccessToken;
import org.springframework.security.oauth2.core.endpoint.OAuth2AccessTokenResponse;
import org.springframework.security.oauth2.core.endpoint.OAuth2ParameterNames;
import org.springframework.util.StringUtils;
public class CustomTokenResponseConverter implements Converter<Map<String, String>, OAuth2AccessTokenResponse> {
private static final Set<String> TOKEN_RESPONSE_PARAMETER_NAMES = Stream.of(
OAuth2ParameterNames.ACCESS_TOKEN,
OAuth2ParameterNames.TOKEN_TYPE,
OAuth2ParameterNames.EXPIRES_IN,
OAuth2ParameterNames.REFRESH_TOKEN,
OAuth2ParameterNames.SCOPE) .collect(Collectors.toSet());
@Override
public OAuth2AccessTokenResponse convert(Map<String, String> tokenResponseParameters) {
String accessToken = tokenResponseParameters.get(OAuth2ParameterNames.ACCESS_TOKEN);
OAuth2AccessToken.TokenType accessTokenType = null;
if (OAuth2AccessToken.TokenType.BEARER.getValue()
.equalsIgnoreCase(tokenResponseParameters.get(OAuth2ParameterNames.TOKEN_TYPE))) {
accessTokenType = OAuth2AccessToken.TokenType.BEARER;
}
long expiresIn = 0;
if (tokenResponseParameters.containsKey(OAuth2ParameterNames.EXPIRES_IN)) {
try {
expiresIn = Long.valueOf(tokenResponseParameters.get(OAuth2ParameterNames.EXPIRES_IN));
} catch (NumberFormatException ex) {
}
}
Set<String> scopes = Collections.emptySet();
if (tokenResponseParameters.containsKey(OAuth2ParameterNames.SCOPE)) {
String scope = tokenResponseParameters.get(OAuth2ParameterNames.SCOPE);
scopes = Arrays.stream(StringUtils.delimitedListToStringArray(scope, " "))
.collect(Collectors.toSet());
}
String refreshToken = tokenResponseParameters.get(OAuth2ParameterNames.REFRESH_TOKEN);
Map<String, Object> additionalParameters = new LinkedHashMap<>();
tokenResponseParameters.entrySet()
.stream()
.filter(e -> !TOKEN_RESPONSE_PARAMETER_NAMES.contains(e.getKey()))
.forEach(e -> additionalParameters.put(e.getKey(), e.getValue()));
return OAuth2AccessTokenResponse.withToken(accessToken)
.tokenType(accessTokenType)
.expiresIn(expiresIn)
.scopes(scopes)
.refreshToken(refreshToken)
.additionalParameters(additionalParameters)
.build();
}
}

View File

@ -9,18 +9,22 @@ import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.PropertySource;
import org.springframework.core.env.Environment; import org.springframework.core.env.Environment;
import org.springframework.http.converter.FormHttpMessageConverter;
import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.config.oauth2.client.CommonOAuth2Provider; import org.springframework.security.config.oauth2.client.CommonOAuth2Provider;
import org.springframework.security.oauth2.client.endpoint.NimbusAuthorizationCodeTokenResponseClient; import org.springframework.security.oauth2.client.endpoint.DefaultAuthorizationCodeTokenResponseClient;
import org.springframework.security.oauth2.client.endpoint.OAuth2AccessTokenResponseClient; import org.springframework.security.oauth2.client.endpoint.OAuth2AccessTokenResponseClient;
import org.springframework.security.oauth2.client.endpoint.OAuth2AuthorizationCodeGrantRequest; import org.springframework.security.oauth2.client.endpoint.OAuth2AuthorizationCodeGrantRequest;
import org.springframework.security.oauth2.client.http.OAuth2ErrorResponseErrorHandler;
import org.springframework.security.oauth2.client.registration.ClientRegistration; import org.springframework.security.oauth2.client.registration.ClientRegistration;
import org.springframework.security.oauth2.client.registration.ClientRegistrationRepository; import org.springframework.security.oauth2.client.registration.ClientRegistrationRepository;
import org.springframework.security.oauth2.client.registration.InMemoryClientRegistrationRepository; import org.springframework.security.oauth2.client.registration.InMemoryClientRegistrationRepository;
import org.springframework.security.oauth2.client.web.AuthorizationRequestRepository; import org.springframework.security.oauth2.client.web.AuthorizationRequestRepository;
import org.springframework.security.oauth2.client.web.HttpSessionOAuth2AuthorizationRequestRepository; import org.springframework.security.oauth2.client.web.HttpSessionOAuth2AuthorizationRequestRepository;
import org.springframework.security.oauth2.core.endpoint.OAuth2AuthorizationRequest; import org.springframework.security.oauth2.core.endpoint.OAuth2AuthorizationRequest;
import org.springframework.security.oauth2.core.http.converter.OAuth2AccessTokenResponseHttpMessageConverter;
import org.springframework.web.client.RestTemplate;
@Configuration @Configuration
@PropertySource("application-oauth2.properties") @PropertySource("application-oauth2.properties")
@ -37,6 +41,8 @@ public class SecurityConfig extends WebSecurityConfigurerAdapter {
.oauth2Login() .oauth2Login()
.loginPage("/oauth_login") .loginPage("/oauth_login")
.authorizationEndpoint() .authorizationEndpoint()
.authorizationRequestResolver( new CustomAuthorizationRequestResolver(clientRegistrationRepository(),"/oauth2/authorize-client"))
.baseUri("/oauth2/authorize-client") .baseUri("/oauth2/authorize-client")
.authorizationRequestRepository(authorizationRequestRepository()) .authorizationRequestRepository(authorizationRequestRepository())
.and() .and()
@ -54,7 +60,15 @@ public class SecurityConfig extends WebSecurityConfigurerAdapter {
@Bean @Bean
public OAuth2AccessTokenResponseClient<OAuth2AuthorizationCodeGrantRequest> accessTokenResponseClient() { public OAuth2AccessTokenResponseClient<OAuth2AuthorizationCodeGrantRequest> accessTokenResponseClient() {
return new NimbusAuthorizationCodeTokenResponseClient(); DefaultAuthorizationCodeTokenResponseClient accessTokenResponseClient = new DefaultAuthorizationCodeTokenResponseClient();
accessTokenResponseClient.setRequestEntityConverter(new CustomRequestEntityConverter());
OAuth2AccessTokenResponseHttpMessageConverter tokenResponseHttpMessageConverter = new OAuth2AccessTokenResponseHttpMessageConverter();
tokenResponseHttpMessageConverter.setTokenResponseConverter(new CustomTokenResponseConverter());
RestTemplate restTemplate = new RestTemplate(Arrays.asList(new FormHttpMessageConverter(), tokenResponseHttpMessageConverter));
restTemplate.setErrorHandler(new OAuth2ErrorResponseErrorHandler());
accessTokenResponseClient.setRestOperations(restTemplate);
return accessTokenResponseClient;
} }

View File

@ -1,3 +1 @@
org.springframework.boot.diagnostics.FailureAnalyzer=com.baeldung.failureanalyzer.MyBeanNotOfRequiredTypeFailureAnalyzer org.springframework.boot.autoconfigure.EnableAutoConfiguration=com.baeldung.autoconfiguration.MySQLAutoconfiguration
org.springframework.boot.autoconfigure.EnableAutoConfiguration=com.baeldung.autoconfiguration.MySQLAutoconfiguration

View File

@ -1,11 +1,19 @@
package com.baeldung.mongodb; package com.baeldung.mongodb;
import com.baeldung.mongodb.daos.UserRepository;
import com.baeldung.mongodb.models.User;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication; import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import java.util.List;
@SpringBootApplication @SpringBootApplication
public class Application { public class Application {
public static void main(String[] args) { public static void main(String[] args) {
SpringApplication.run(Application.class, args); SpringApplication.run(Application.class, args);
} }
} }

View File

@ -0,0 +1,10 @@
package com.baeldung.mongodb.daos;
import com.baeldung.mongodb.models.User;
import org.springframework.data.mongodb.repository.MongoRepository;
public interface UserRepository extends MongoRepository<User, Long> {
}

View File

@ -0,0 +1,28 @@
package com.baeldung.mongodb.events;
import com.baeldung.mongodb.models.User;
import com.baeldung.mongodb.services.SequenceGeneratorService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener;
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
import org.springframework.stereotype.Component;
@Component
public class UserModelListener extends AbstractMongoEventListener<User> {
private SequenceGeneratorService sequenceGenerator;
@Autowired
public UserModelListener(SequenceGeneratorService sequenceGenerator) {
this.sequenceGenerator = sequenceGenerator;
}
@Override
public void onBeforeConvert(BeforeConvertEvent<User> event) {
event.getSource().setId(sequenceGenerator.generateSequence(User.SEQUENCE_NAME));
}
}

View File

@ -0,0 +1,32 @@
package com.baeldung.mongodb.models;
import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.core.mapping.Document;
@Document(collection = "database_sequences")
public class DatabaseSequence {
@Id
private String id;
private long seq;
public DatabaseSequence() {}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public long getSeq() {
return seq;
}
public void setSeq(long seq) {
this.seq = seq;
}
}

View File

@ -0,0 +1,73 @@
package com.baeldung.mongodb.models;
import org.springframework.data.annotation.Id;
import org.springframework.data.annotation.Transient;
import org.springframework.data.mongodb.core.mapping.Document;
@Document(collection = "users")
public class User {
@Transient
public static final String SEQUENCE_NAME = "users_sequence";
@Id
private long id;
private String firstName;
private String lastName;
private String email;
public User() { }
public User(String firstName, String lastName, String email) {
this.firstName = firstName;
this.lastName = lastName;
this.email = email;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
@Override
public String toString() {
return "User{" +
"id=" + id +
", firstName='" + firstName + '\'' +
", lastName='" + lastName + '\'' +
", email='" + email + '\'' +
'}';
}
}

View File

@ -0,0 +1,35 @@
package com.baeldung.mongodb.services;
import com.baeldung.mongodb.models.DatabaseSequence;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.query.Update;
import org.springframework.stereotype.Service;
import java.util.Objects;
import static org.springframework.data.mongodb.core.FindAndModifyOptions.options;
import static org.springframework.data.mongodb.core.query.Criteria.where;
import static org.springframework.data.mongodb.core.query.Query.query;
@Service
public class SequenceGeneratorService {
private MongoOperations mongoOperations;
@Autowired
public SequenceGeneratorService(MongoOperations mongoOperations) {
this.mongoOperations = mongoOperations;
}
public long generateSequence(String seqName) {
DatabaseSequence counter = mongoOperations.findAndModify(query(where("_id").is(seqName)),
new Update().inc("seq",1), options().returnNew(true).upsert(true),
DatabaseSequence.class);
return !Objects.isNull(counter) ? counter.getSeq() : 1;
}
}

View File

@ -0,0 +1,36 @@
package com.baeldung.mongodb;
import com.baeldung.mongodb.daos.UserRepository;
import com.baeldung.mongodb.models.User;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.junit4.SpringRunner;
import static org.assertj.core.api.Assertions.assertThat;
import java.util.List;
@RunWith(SpringRunner.class)
public class MongoDbAutoGeneratedFieldIntegrationTest {
@Autowired
private UserRepository userRepository;
@Test
public void contextLoads() {}
@Test
public void givenUserObject_whenSave_thenCreateNewUser() {
User user = new User();
user.setFirstName("John");
user.setLastName("Doe");
user.setEmail("john.doe@example.com");
userRepository.save(user);
assertThat(userRepository.findAll().size()).isGreaterThan(0);
}
}