Merge branch 'master' of https://github.com/eugenp/tutorials into task/BAEL-9567

This commit is contained in:
Dhawal Kapil 2018-11-30 22:16:55 +05:30
commit 34e86c294a
109 changed files with 4048 additions and 767 deletions

View File

@ -0,0 +1,67 @@
package com.baeldung.java.list;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
/**
* Demonstrates the different ways to loop over
* the elements of a list.
*/
public class WaysToIterate {
List<String> countries = Arrays.asList("Germany", "Panama", "Australia");
/**
* Iterate over a list using a basic for loop
*/
public void iterateWithForLoop() {
for (int i = 0; i < countries.size(); i++) {
System.out.println(countries.get(i));
}
}
/**
* Iterate over a list using the enhanced for loop
*/
public void iterateWithEnhancedForLoop() {
for (String country : countries) {
System.out.println(country);
}
}
/**
* Iterate over a list using an Iterator
*/
public void iterateWithIterator() {
Iterator<String> countriesIterator = countries.iterator();
while(countriesIterator.hasNext()) {
System.out.println(countriesIterator.next());
}
}
/**
* Iterate over a list using a ListIterator
*/
public void iterateWithListIterator() {
ListIterator<String> listIterator = countries.listIterator();
while(listIterator.hasNext()) {
System.out.println(listIterator.next());
}
}
/**
* Iterate over a list using the Iterable.forEach() method
*/
public void iterateWithForEach() {
countries.forEach(System.out::println);
}
/**
* Iterate over a list using the Stream.forEach() method
*/
public void iterateWithStreamForEach() {
countries.stream().forEach((c) -> System.out.println(c));
}
}

View File

@ -0,0 +1,71 @@
package com.baeldung.java.list;
import static org.junit.Assert.assertEquals;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import org.junit.Test;
public class WaysToIterateUnitTest {
List<String> globalCountries = new ArrayList<String>();
List<String> europeanCountries = Arrays.asList("Germany", "Panama", "Australia");
@Test
public void whenIteratingUsingForLoop_thenReturnThreeAsSizeOfList() {
for (int i = 0; i < europeanCountries.size(); i++) {
globalCountries.add(europeanCountries.get(i));
}
assertEquals(globalCountries.size(), 3);
globalCountries.clear();
}
@Test
public void whenIteratingUsingEnhancedForLoop_thenReturnThreeAsSizeOfList() {
for (String country : europeanCountries) {
globalCountries.add(country);
}
assertEquals(globalCountries.size(), 3);
globalCountries.clear();
}
@Test
public void whenIteratingUsingIterator_thenReturnThreeAsSizeOfList() {
Iterator<String> countriesIterator = europeanCountries.iterator();
while (countriesIterator.hasNext()) {
globalCountries.add(countriesIterator.next());
}
assertEquals(globalCountries.size(), 3);
globalCountries.clear();
}
@Test
public void whenIteratingUsingListIterator_thenReturnThreeAsSizeOfList() {
ListIterator<String> countriesIterator = europeanCountries.listIterator();
while (countriesIterator.hasNext()) {
globalCountries.add(countriesIterator.next());
}
assertEquals(globalCountries.size(), 3);
globalCountries.clear();
}
@Test
public void whenIteratingUsingForEach_thenReturnThreeAsSizeOfList() {
europeanCountries.forEach(country -> globalCountries.add(country));
assertEquals(globalCountries.size(), 3);
globalCountries.clear();
}
@Test
public void whenIteratingUsingStreamForEach_thenReturnThreeAsSizeOfList() {
europeanCountries.stream().forEach((country) -> globalCountries.add(country));
assertEquals(globalCountries.size(), 3);
globalCountries.clear();
}
}

View File

@ -0,0 +1,33 @@
package com.baeldung.concurrent.countdownlatch;
import java.util.concurrent.CountDownLatch;
public class CountdownLatchCountExample {
private int count;
public CountdownLatchCountExample(int count) {
this.count = count;
}
public boolean callTwiceInSameThread() {
CountDownLatch countDownLatch = new CountDownLatch(count);
Thread t = new Thread(() -> {
countDownLatch.countDown();
countDownLatch.countDown();
});
t.start();
try {
countDownLatch.await();
} catch (InterruptedException e) {
e.printStackTrace();
}
return countDownLatch.getCount() == 0;
}
public static void main(String[] args) {
CountdownLatchCountExample ex = new CountdownLatchCountExample(2);
System.out.println("Is CountDown Completed : " + ex.callTwiceInSameThread());
}
}

View File

@ -0,0 +1,41 @@
package com.baeldung.concurrent.countdownlatch;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicInteger;
public class CountdownLatchResetExample {
private int count;
private int threadCount;
private final AtomicInteger updateCount;
CountdownLatchResetExample(int count, int threadCount) {
updateCount = new AtomicInteger(0);
this.count = count;
this.threadCount = threadCount;
}
public int countWaits() {
CountDownLatch countDownLatch = new CountDownLatch(count);
ExecutorService es = Executors.newFixedThreadPool(threadCount);
for (int i = 0; i < threadCount; i++) {
es.execute(() -> {
long prevValue = countDownLatch.getCount();
countDownLatch.countDown();
if (countDownLatch.getCount() != prevValue) {
updateCount.incrementAndGet();
}
});
}
es.shutdown();
return updateCount.get();
}
public static void main(String[] args) {
CountdownLatchResetExample ex = new CountdownLatchResetExample(5, 20);
System.out.println("Count : " + ex.countWaits());
}
}

View File

@ -0,0 +1,45 @@
package com.baeldung.concurrent.cyclicbarrier;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicInteger;
public class CyclicBarrierCompletionMethodExample {
private int count;
private int threadCount;
private final AtomicInteger updateCount;
CyclicBarrierCompletionMethodExample(int count, int threadCount) {
updateCount = new AtomicInteger(0);
this.count = count;
this.threadCount = threadCount;
}
public int countTrips() {
CyclicBarrier cyclicBarrier = new CyclicBarrier(count, () -> {
updateCount.incrementAndGet();
});
ExecutorService es = Executors.newFixedThreadPool(threadCount);
for (int i = 0; i < threadCount; i++) {
es.execute(() -> {
try {
cyclicBarrier.await();
} catch (InterruptedException | BrokenBarrierException e) {
e.printStackTrace();
}
});
}
es.shutdown();
return updateCount.get();
}
public static void main(String[] args) {
CyclicBarrierCompletionMethodExample ex = new CyclicBarrierCompletionMethodExample(5, 20);
System.out.println("Count : " + ex.countTrips());
}
}

View File

@ -0,0 +1,32 @@
package com.baeldung.concurrent.cyclicbarrier;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CyclicBarrier;
public class CyclicBarrierCountExample {
private int count;
public CyclicBarrierCountExample(int count) {
this.count = count;
}
public boolean callTwiceInSameThread() {
CyclicBarrier cyclicBarrier = new CyclicBarrier(count);
Thread t = new Thread(() -> {
try {
cyclicBarrier.await();
cyclicBarrier.await();
} catch (InterruptedException | BrokenBarrierException e) {
e.printStackTrace();
}
});
t.start();
return cyclicBarrier.isBroken();
}
public static void main(String[] args) {
CyclicBarrierCountExample ex = new CyclicBarrierCountExample(7);
System.out.println("Count : " + ex.callTwiceInSameThread());
}
}

View File

@ -0,0 +1,46 @@
package com.baeldung.concurrent.cyclicbarrier;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicInteger;
public class CyclicBarrierResetExample {
private int count;
private int threadCount;
private final AtomicInteger updateCount;
CyclicBarrierResetExample(int count, int threadCount) {
updateCount = new AtomicInteger(0);
this.count = count;
this.threadCount = threadCount;
}
public int countWaits() {
CyclicBarrier cyclicBarrier = new CyclicBarrier(count);
ExecutorService es = Executors.newFixedThreadPool(threadCount);
for (int i = 0; i < threadCount; i++) {
es.execute(() -> {
try {
if (cyclicBarrier.getNumberWaiting() > 0) {
updateCount.incrementAndGet();
}
cyclicBarrier.await();
} catch (InterruptedException | BrokenBarrierException e) {
e.printStackTrace();
}
});
}
es.shutdown();
return updateCount.get();
}
public static void main(String[] args) {
CyclicBarrierResetExample ex = new CyclicBarrierResetExample(7, 20);
System.out.println("Count : " + ex.countWaits());
}
}

View File

@ -0,0 +1,15 @@
package com.baeldung.concurrent.countdownlatch;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
public class CountdownLatchCountExampleUnitTest {
@Test
public void whenCountDownLatch_completed() {
CountdownLatchCountExample ex = new CountdownLatchCountExample(2);
boolean isCompleted = ex.callTwiceInSameThread();
assertTrue(isCompleted);
}
}

View File

@ -0,0 +1,15 @@
package com.baeldung.concurrent.countdownlatch;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
public class CountdownLatchResetExampleUnitTest {
@Test
public void whenCountDownLatch_noReset() {
CountdownLatchResetExample ex = new CountdownLatchResetExample(7,20);
int lineCount = ex.countWaits();
assertTrue(lineCount <= 7);
}
}

View File

@ -0,0 +1,15 @@
package com.baeldung.concurrent.cyclicbarrier;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
public class CyclicBarrierCompletionMethodExampleUnitTest {
@Test
public void whenCyclicBarrier_countTrips() {
CyclicBarrierCompletionMethodExample ex = new CyclicBarrierCompletionMethodExample(7,20);
int lineCount = ex.countTrips();
assertEquals(2, lineCount);
}
}

View File

@ -0,0 +1,15 @@
package com.baeldung.concurrent.cyclicbarrier;
import static org.junit.Assert.assertFalse;
import org.junit.Test;
public class CyclicBarrierCountExampleUnitTest {
@Test
public void whenCyclicBarrier_notCompleted() {
CyclicBarrierCountExample ex = new CyclicBarrierCountExample(2);
boolean isCompleted = ex.callTwiceInSameThread();
assertFalse(isCompleted);
}
}

View File

@ -0,0 +1,15 @@
package com.baeldung.concurrent.cyclicbarrier;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
public class CyclicBarrierResetExampleUnitTest {
@Test
public void whenCyclicBarrier_reset() {
CyclicBarrierResetExample ex = new CyclicBarrierResetExample(7,20);
int lineCount = ex.countWaits();
assertTrue(lineCount > 7);
}
}

View File

@ -66,6 +66,12 @@
<artifactId>mail</artifactId>
<version>${javax.mail.version}</version>
</dependency>
<dependency>
<groupId>nl.jqno.equalsverifier</groupId>
<artifactId>equalsverifier</artifactId>
<version>${equalsverifier.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
@ -424,6 +430,7 @@
<maven-shade-plugin.version>3.1.1</maven-shade-plugin.version>
<spring-boot-maven-plugin.version>2.0.3.RELEASE</spring-boot-maven-plugin.version>
<exec-maven-plugin.version>1.6.0</exec-maven-plugin.version>
<equalsverifier.version>3.0.3</equalsverifier.version>
</properties>
</project>

View File

@ -0,0 +1,36 @@
package com.baeldung.equalshashcode;
class Money {
int amount;
String currencyCode;
Money(int amount, String currencyCode) {
this.amount = amount;
this.currencyCode = currencyCode;
}
@Override
public boolean equals(Object o) {
if (o == this)
return true;
if (!(o instanceof Money))
return false;
Money other = (Money)o;
boolean currencyCodeEquals = (this.currencyCode == null && other.currencyCode == null)
|| (this.currencyCode != null && this.currencyCode.equals(other.currencyCode));
return this.amount == other.amount
&& currencyCodeEquals;
}
@Override
public int hashCode() {
int result = 17;
result = 31 * result + amount;
if (currencyCode != null) {
result = 31 * result + currencyCode.hashCode();
}
return result;
}
}

View File

@ -0,0 +1,39 @@
package com.baeldung.equalshashcode;
class Team {
final String city;
final String department;
Team(String city, String department) {
this.city = city;
this.department = department;
}
@Override
public final boolean equals(Object o) {
if (o == this)
return true;
if (!(o instanceof Team))
return false;
Team otherTeam = (Team)o;
boolean cityEquals = (this.city == null && otherTeam.city == null)
|| this.city != null && this.city.equals(otherTeam.city);
boolean departmentEquals = (this.department == null && otherTeam.department == null)
|| this.department != null && this.department.equals(otherTeam.department);
return cityEquals && departmentEquals;
}
@Override
public final int hashCode() {
int result = 17;
if (city != null) {
result = 31 * result + city.hashCode();
}
if (department != null) {
result = 31 * result + department.hashCode();
}
return result;
}
}

View File

@ -0,0 +1,38 @@
package com.baeldung.equalshashcode;
class Voucher {
private Money value;
private String store;
Voucher(int amount, String currencyCode, String store) {
this.value = new Money(amount, currencyCode);
this.store = store;
}
@Override
public boolean equals(Object o) {
if (o == this)
return true;
if (!(o instanceof Voucher))
return false;
Voucher other = (Voucher)o;
boolean valueEquals = (this.value == null && other.value == null)
|| (this.value != null && this.value.equals(other.value));
boolean storeEquals = (this.store == null && other.store == null)
|| (this.store != null && this.store.equals(other.store));
return valueEquals && storeEquals;
}
@Override
public int hashCode() {
int result = 17;
if (this.value != null) {
result = 31 * result + value.hashCode();
}
if (this.store != null) {
result = 31 * result + store.hashCode();
}
return result;
}
}

View File

@ -0,0 +1,30 @@
package com.baeldung.equalshashcode;
/* (non-Javadoc)
* This class overrides equals, but it doesn't override hashCode.
*
* To see which problems this leads to:
* TeamUnitTest.givenMapKeyWithoutHashCode_whenSearched_thenReturnsWrongValue
*/
class WrongTeam {
String city;
String department;
WrongTeam(String city, String department) {
this.city = city;
this.department = department;
}
@Override
public boolean equals(Object o) {
if (o == this)
return true;
if (!(o instanceof WrongTeam))
return false;
WrongTeam otherTeam = (WrongTeam)o;
return this.city == otherTeam.city
&& this.department == otherTeam.department;
}
}

View File

@ -0,0 +1,47 @@
package com.baeldung.equalshashcode;
/* (non-Javadoc)
* This class extends the Money class that has overridden the equals method and once again overrides the equals method.
*
* To see which problems this leads to:
* MoneyUnitTest.givenMoneyAndVoucherInstances_whenEquals_thenReturnValuesArentSymmetric
*/
class WrongVoucher extends Money {
private String store;
WrongVoucher(int amount, String currencyCode, String store) {
super(amount, currencyCode);
this.store = store;
}
@Override
public boolean equals(Object o) {
if (o == this)
return true;
if (!(o instanceof WrongVoucher))
return false;
WrongVoucher other = (WrongVoucher)o;
boolean currencyCodeEquals = (this.currencyCode == null && other.currencyCode == null)
|| (this.currencyCode != null && this.currencyCode.equals(other.currencyCode));
boolean storeEquals = (this.store == null && other.store == null)
|| (this.store != null && this.store.equals(other.store));
return this.amount == other.amount
&& currencyCodeEquals
&& storeEquals;
}
@Override
public int hashCode() {
int result = 17;
result = 31 * result + amount;
if (this.currencyCode != null) {
result = 31 * result + currencyCode.hashCode();
}
if (this.store != null) {
result = 31 * result + store.hashCode();
}
return result;
}
}

View File

@ -0,0 +1,27 @@
package com.baeldung.equalshashcode;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertFalse;
import org.junit.Test;
public class MoneyUnitTest {
@Test
public void givenMoneyInstancesWithSameAmountAndCurrency_whenEquals_thenReturnsTrue() {
Money income = new Money(55, "USD");
Money expenses = new Money(55, "USD");
assertTrue(income.equals(expenses));
}
@Test
public void givenMoneyAndVoucherInstances_whenEquals_thenReturnValuesArentSymmetric() {
Money cash = new Money(42, "USD");
WrongVoucher voucher = new WrongVoucher(42, "USD", "Amazon");
assertFalse(voucher.equals(cash));
assertTrue(cash.equals(voucher));
}
}

View File

@ -0,0 +1,46 @@
package com.baeldung.equalshashcode;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import java.util.HashMap;
import java.util.Map;
import org.junit.Test;
import nl.jqno.equalsverifier.EqualsVerifier;
public class TeamUnitTest {
@Test
public void givenMapKeyWithHashCode_whenSearched_thenReturnsCorrectValue() {
Map<Team,String> leaders = new HashMap<>();
leaders.put(new Team("New York", "development"), "Anne");
leaders.put(new Team("Boston", "development"), "Brian");
leaders.put(new Team("Boston", "marketing"), "Charlie");
Team myTeam = new Team("New York", "development");
String myTeamleader = leaders.get(myTeam);
assertEquals("Anne", myTeamleader);
}
@Test
public void givenMapKeyWithoutHashCode_whenSearched_thenReturnsWrongValue() {
Map<WrongTeam,String> leaders = new HashMap<>();
leaders.put(new WrongTeam("New York", "development"), "Anne");
leaders.put(new WrongTeam("Boston", "development"), "Brian");
leaders.put(new WrongTeam("Boston", "marketing"), "Charlie");
WrongTeam myTeam = new WrongTeam("New York", "development");
String myTeamleader = leaders.get(myTeam);
assertFalse("Anne".equals(myTeamleader));
}
@Test
public void equalsContract() {
EqualsVerifier.forClass(Team.class).verify();
}
}

View File

@ -0,0 +1,76 @@
package com.baeldung.graph;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
public class Graph {
private Map<Vertex, List<Vertex>> adjVertices;
Graph() {
this.adjVertices = new HashMap<Vertex, List<Vertex>>();
}
void addVertex(String label) {
adjVertices.putIfAbsent(new Vertex(label), new ArrayList<>());
}
void removeVertex(String label) {
Vertex v = new Vertex(label);
adjVertices.values().stream().map(e -> e.remove(v)).collect(Collectors.toList());
adjVertices.remove(new Vertex(label));
}
void addEdge(String label1, String label2) {
Vertex v1 = new Vertex(label1);
Vertex v2 = new Vertex(label2);
adjVertices.get(v1).add(v2);
adjVertices.get(v2).add(v1);
}
void removeEdge(String label1, String label2) {
Vertex v1 = new Vertex(label1);
Vertex v2 = new Vertex(label2);
List<Vertex> eV1 = adjVertices.get(v1);
List<Vertex> eV2 = adjVertices.get(v2);
if (eV1 != null)
eV1.remove(v2);
if (eV2 != null)
eV2.remove(v1);
}
List<Vertex> getAdjVertices(String label) {
return adjVertices.get(new Vertex(label));
}
String printGraph() {
StringBuffer sb = new StringBuffer();
for(Vertex v : adjVertices.keySet()) {
sb.append(v);
sb.append(adjVertices.get(v));
}
return sb.toString();
}
class Vertex {
String label;
Vertex(String label) {
this.label = label;
}
@Override
public boolean equals(Object obj) {
Vertex vertex = (Vertex) obj;
return vertex.label == label;
}
@Override
public int hashCode() {
return label.hashCode();
}
@Override
public String toString() {
return label;
}
}
}

View File

@ -0,0 +1,44 @@
package com.baeldung.graph;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.Queue;
import java.util.Set;
import java.util.Stack;
import com.baeldung.graph.Graph.Vertex;
public class GraphTraversal {
static Set<String> depthFirstTraversal(Graph graph, String root) {
Set<String> visited = new LinkedHashSet<String>();
Stack<String> stack = new Stack<String>();
stack.push(root);
while (!stack.isEmpty()) {
String vertex = stack.pop();
if (!visited.contains(vertex)) {
visited.add(vertex);
for (Vertex v : graph.getAdjVertices(vertex)) {
stack.push(v.label);
}
}
}
return visited;
}
static Set<String> breadthFirstTraversal(Graph graph, String root) {
Set<String> visited = new LinkedHashSet<String>();
Queue<String> queue = new LinkedList<String>();
queue.add(root);
visited.add(root);
while (!queue.isEmpty()) {
String vertex = queue.poll();
for (Vertex v : graph.getAdjVertices(vertex)) {
if (!visited.contains(v.label)) {
visited.add(v.label);
queue.add(v.label);
}
}
}
return visited;
}
}

View File

@ -0,0 +1,36 @@
package com.baeldung.graph;
import org.junit.Assert;
import org.junit.Test;
public class GraphTraversalUnitTest {
@Test
public void givenAGraph_whenTraversingDepthFirst_thenExpectedResult() {
Graph graph = createGraph();
Assert.assertEquals("[Bob, Rob, Maria, Alice, Mark]",
GraphTraversal.depthFirstTraversal(graph, "Bob").toString());
}
@Test
public void givenAGraph_whenTraversingBreadthFirst_thenExpectedResult() {
Graph graph = createGraph();
Assert.assertEquals("[Bob, Alice, Rob, Mark, Maria]",
GraphTraversal.breadthFirstTraversal(graph, "Bob").toString());
}
Graph createGraph() {
Graph graph = new Graph();
graph.addVertex("Bob");
graph.addVertex("Alice");
graph.addVertex("Mark");
graph.addVertex("Rob");
graph.addVertex("Maria");
graph.addEdge("Bob", "Alice");
graph.addEdge("Bob", "Rob");
graph.addEdge("Alice", "Mark");
graph.addEdge("Rob", "Mark");
graph.addEdge("Alice", "Maria");
graph.addEdge("Rob", "Maria");
return graph;
}
}

View File

@ -11,7 +11,7 @@ import java.util.regex.Pattern;
import static org.junit.Assert.assertTrue;
public class OptimizedMatcherUnitTest {
public class OptimizedMatcherManualTest {
private String action;

View File

@ -0,0 +1,31 @@
package com.baeldung.operators
import java.math.BigDecimal
enum class Currency {
DOLLARS, EURO
}
class Money(val amount: BigDecimal, val currency: Currency) : Comparable<Money> {
override fun compareTo(other: Money): Int =
convert(Currency.DOLLARS).compareTo(other.convert(Currency.DOLLARS))
fun convert(currency: Currency): BigDecimal = TODO()
override fun equals(other: Any?): Boolean {
if (this === other) return true
if (other !is Money) return false
if (amount != other.amount) return false
if (currency != other.currency) return false
return true
}
override fun hashCode(): Int {
var result = amount.hashCode()
result = 31 * result + currency.hashCode()
return result
}
}

View File

@ -0,0 +1,16 @@
package com.baeldung.operators
interface Page<T> {
fun pageNumber(): Int
fun pageSize(): Int
fun elements(): MutableList<T>
}
operator fun <T> Page<T>.get(index: Int): T = elements()[index]
operator fun <T> Page<T>.get(start: Int, endExclusive: Int): List<T> = elements().subList(start, endExclusive)
operator fun <T> Page<T>.set(index: Int, value: T) {
elements()[index] = value
}
operator fun <T> Page<T>.contains(element: T): Boolean = element in elements()
operator fun <T> Page<T>.iterator() = elements().iterator()

View File

@ -0,0 +1,31 @@
package com.baeldung.operators
data class Point(val x: Int, val y: Int)
operator fun Point.unaryMinus() = Point(-x, -y)
operator fun Point.not() = Point(y, x)
operator fun Point.inc() = Point(x + 1, y + 1)
operator fun Point.dec() = Point(x - 1, y - 1)
operator fun Point.plus(other: Point): Point = Point(x + other.x, y + other.y)
operator fun Point.minus(other: Point): Point = Point(x - other.x, y - other.y)
operator fun Point.times(other: Point): Point = Point(x * other.x, y * other.y)
operator fun Point.div(other: Point): Point = Point(x / other.x, y / other.y)
operator fun Point.rem(other: Point): Point = Point(x % other.x, y % other.y)
operator fun Point.times(factor: Int): Point = Point(x * factor, y * factor)
operator fun Int.times(point: Point): Point = Point(point.x * this, point.y * this)
class Shape {
val points = mutableListOf<Point>()
operator fun Point.unaryPlus() {
points.add(this)
}
}
fun shape(init: Shape.() -> Unit): Shape {
val shape = Shape()
shape.init()
return shape
}

View File

@ -0,0 +1,8 @@
package com.baeldung.operators
import java.math.BigInteger
operator fun <T> MutableCollection<T>.plusAssign(element: T) {
add(element)
}
operator fun BigInteger.plus(other: Int): BigInteger = add(BigInteger("$other"))

View File

@ -0,0 +1,28 @@
package com.baeldung.operators
import org.junit.Test
import kotlin.test.assertEquals
import kotlin.test.assertTrue
class PageTest {
private val page = PageImpl(1, 10, "Java", "Kotlin", "Scala")
@Test
fun `Get convention should work as expected`() {
assertEquals(page[1], "Kotlin")
assertEquals(page[1, 3], listOf("Kotlin", "Scala"))
}
@Test
fun `In convention should work on a page as expected`() {
assertTrue("Kotlin" in page)
}
}
private class PageImpl<T>(val page: Int, val size: Int, vararg val elements: T) : Page<T> {
override fun pageNumber(): Int = page
override fun pageSize(): Int = size
override fun elements(): MutableList<T> = mutableListOf(*elements)
}

View File

@ -0,0 +1,48 @@
package com.baeldung.operators
import org.junit.Test
import kotlin.test.assertEquals
import kotlin.test.assertTrue
class PointTest {
private val p1 = Point(1, 2)
private val p2 = Point(2, 3)
@Test
fun `We should be able to add two points together using +`() {
assertEquals(p1 + p2, Point(3, 5))
}
@Test
fun `We shoud be able to subtract one point from another using -`() {
assertEquals(p1 - p2, Point(-1, -1))
}
@Test
fun `We should be able to multiply two points together with *`() {
assertEquals(p1 * p2, Point(2, 6))
}
@Test
fun `We should be able to divide one point by another`() {
assertEquals(p1 / p2, Point(0, 0))
}
@Test
fun `We should be able to scale a point by an integral factor`() {
assertEquals(p1 * 2, Point(2, 4))
assertEquals(2 * p1, Point(2, 4))
}
@Test
fun `We should be able to add points to an empty shape`() {
val line = shape {
+Point(0, 0)
+Point(1, 3)
}
assertTrue(Point(0, 0) in line.points)
assertTrue(Point(1, 3) in line.points)
}
}

View File

@ -0,0 +1,13 @@
package com.baeldung.operators
import java.math.BigInteger
import org.junit.Test
import kotlin.test.assertEquals
class UtilsTest {
@Test
fun `We should be able to add an int value to an existing BigInteger using +`() {
assertEquals(BigInteger.ZERO + 1, BigInteger.ONE)
}
}

View File

@ -122,13 +122,13 @@
<properties>
<!-- util -->
<commons-lang3.version>3.5</commons-lang3.version>
<commons-lang3.version>3.8.1</commons-lang3.version>
<commons-codec.version>1.10</commons-codec.version>
<!-- testing -->
<assertj.version>3.6.1</assertj.version>
<jmh-core.version>1.19</jmh-core.version>
<icu4j.version>61.1</icu4j.version>
<guava.version>26.0-jre</guava.version>
<guava.version>27.0.1-jre</guava.version>
</properties>
</project>

View File

@ -0,0 +1,20 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name=local-file-sink
connector.class=FileStreamSink
tasks.max=1
file=test.sink.txt
topics=connect-test

View File

@ -0,0 +1,20 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name=local-file-source
connector.class=FileStreamSource
tasks.max=1
file=test.txt
topic=connect-test

View File

@ -0,0 +1,44 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# These are defaults. This file just demonstrates how to override some settings.
bootstrap.servers=localhost:9092
# The converters specify the format of data in Kafka and how to translate it into Connect data. Every Connect user will
# need to configure these based on the format they want their data in when loaded from or stored into Kafka
key.converter=org.apache.kafka.connect.json.JsonConverter
value.converter=org.apache.kafka.connect.json.JsonConverter
# Converter-specific settings can be passed in by prefixing the Converter's setting with the converter we want to apply
# it to
key.converter.schemas.enable=false
value.converter.schemas.enable=false
offset.storage.file.filename=/tmp/connect.offsets
# Flush much faster than normal, which is useful for testing/debugging
offset.flush.interval.ms=10000
# Set to a list of filesystem paths separated by commas (,) to enable class loading isolation for plugins
# (connectors, converters, transformations). The list should consist of top level directories that include
# any combination of:
# a) directories immediately containing jars with plugins and their dependencies
# b) uber-jars with plugins and their dependencies
# c) directories immediately containing the package directory structure of classes of plugins and their dependencies
# Note: symlinks will be followed to discover dependencies or plugins.
# Examples:
# plugin.path=/usr/local/share/java,/usr/local/share/kafka/plugins,/opt/connectors,
# Replace the relative path below with an absolute path if you are planning to start Kafka Connect from within a
# directory other than the home directory of Confluent Platform.
plugin.path=C:\Software\confluent-5.0.0\share\java
#plugin.path=./share/java

View File

@ -0,0 +1,88 @@
##
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
# This file contains some of the configurations for the Kafka Connect distributed worker. This file is intended
# to be used with the examples, and some settings may differ from those used in a production system, especially
# the `bootstrap.servers` and those specifying replication factors.
# A list of host/port pairs to use for establishing the initial connection to the Kafka cluster.
bootstrap.servers=localhost:9092
# unique name for the cluster, used in forming the Connect cluster group. Note that this must not conflict with consumer group IDs
group.id=connect-cluster
# The converters specify the format of data in Kafka and how to translate it into Connect data. Every Connect user will
# need to configure these based on the format they want their data in when loaded from or stored into Kafka
key.converter=org.apache.kafka.connect.json.JsonConverter
value.converter=org.apache.kafka.connect.json.JsonConverter
# Converter-specific settings can be passed in by prefixing the Converter's setting with the converter we want to apply
# it to
key.converter.schemas.enable=true
value.converter.schemas.enable=true
# Topic to use for storing offsets. This topic should have many partitions and be replicated and compacted.
# Kafka Connect will attempt to create the topic automatically when needed, but you can always manually create
# the topic before starting Kafka Connect if a specific topic configuration is needed.
# Most users will want to use the built-in default replication factor of 3 or in some cases even specify a larger value.
# Since this means there must be at least as many brokers as the maximum replication factor used, we'd like to be able
# to run this example on a single-broker cluster and so here we instead set the replication factor to 1.
offset.storage.topic=connect-offsets
offset.storage.replication.factor=1
#offset.storage.partitions=25
# Topic to use for storing connector and task configurations; note that this should be a single partition, highly replicated,
# and compacted topic. Kafka Connect will attempt to create the topic automatically when needed, but you can always manually create
# the topic before starting Kafka Connect if a specific topic configuration is needed.
# Most users will want to use the built-in default replication factor of 3 or in some cases even specify a larger value.
# Since this means there must be at least as many brokers as the maximum replication factor used, we'd like to be able
# to run this example on a single-broker cluster and so here we instead set the replication factor to 1.
config.storage.topic=connect-configs
config.storage.replication.factor=1
# Topic to use for storing statuses. This topic can have multiple partitions and should be replicated and compacted.
# Kafka Connect will attempt to create the topic automatically when needed, but you can always manually create
# the topic before starting Kafka Connect if a specific topic configuration is needed.
# Most users will want to use the built-in default replication factor of 3 or in some cases even specify a larger value.
# Since this means there must be at least as many brokers as the maximum replication factor used, we'd like to be able
# to run this example on a single-broker cluster and so here we instead set the replication factor to 1.
status.storage.topic=connect-status
status.storage.replication.factor=1
#status.storage.partitions=5
# Flush much faster than normal, which is useful for testing/debugging
offset.flush.interval.ms=10000
# These are provided to inform the user about the presence of the REST host and port configs
# Hostname & Port for the REST API to listen on. If this is set, it will bind to the interface used to listen to requests.
#rest.host.name=
#rest.port=8083
# The Hostname & Port that will be given out to other workers to connect to i.e. URLs that are routable from other servers.
#rest.advertised.host.name=
#rest.advertised.port=
# Set to a list of filesystem paths separated by commas (,) to enable class loading isolation for plugins
# (connectors, converters, transformations). The list should consist of top level directories that include
# any combination of:
# a) directories immediately containing jars with plugins and their dependencies
# b) uber-jars with plugins and their dependencies
# c) directories immediately containing the package directory structure of classes of plugins and their dependencies
# Examples:
# plugin.path=/usr/local/share/java,/usr/local/share/kafka/plugins,/opt/connectors,
# Replace the relative path below with an absolute path if you are planning to start Kafka Connect from within a
# directory other than the home directory of Confluent Platform.
plugin.path=./share/java

View File

@ -0,0 +1,9 @@
{
"name": "local-file-sink",
"config": {
"connector.class": "FileStreamSink",
"tasks.max": 1,
"file": "test-distributed.sink.txt",
"topics": "connect-distributed"
}
}

View File

@ -0,0 +1,9 @@
{
"name": "local-file-source",
"config": {
"connector.class": "FileStreamSource",
"tasks.max": 1,
"file": "test-distributed.txt",
"topic": "connect-distributed"
}
}

View File

@ -0,0 +1,88 @@
##
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
# This file contains some of the configurations for the Kafka Connect distributed worker. This file is intended
# to be used with the examples, and some settings may differ from those used in a production system, especially
# the `bootstrap.servers` and those specifying replication factors.
# A list of host/port pairs to use for establishing the initial connection to the Kafka cluster.
bootstrap.servers=localhost:9092
# unique name for the cluster, used in forming the Connect cluster group. Note that this must not conflict with consumer group IDs
group.id=connect-cluster
# The converters specify the format of data in Kafka and how to translate it into Connect data. Every Connect user will
# need to configure these based on the format they want their data in when loaded from or stored into Kafka
key.converter=org.apache.kafka.connect.json.JsonConverter
value.converter=org.apache.kafka.connect.json.JsonConverter
# Converter-specific settings can be passed in by prefixing the Converter's setting with the converter we want to apply
# it to
key.converter.schemas.enable=false
value.converter.schemas.enable=false
# Topic to use for storing offsets. This topic should have many partitions and be replicated and compacted.
# Kafka Connect will attempt to create the topic automatically when needed, but you can always manually create
# the topic before starting Kafka Connect if a specific topic configuration is needed.
# Most users will want to use the built-in default replication factor of 3 or in some cases even specify a larger value.
# Since this means there must be at least as many brokers as the maximum replication factor used, we'd like to be able
# to run this example on a single-broker cluster and so here we instead set the replication factor to 1.
offset.storage.topic=connect-offsets
offset.storage.replication.factor=1
#offset.storage.partitions=25
# Topic to use for storing connector and task configurations; note that this should be a single partition, highly replicated,
# and compacted topic. Kafka Connect will attempt to create the topic automatically when needed, but you can always manually create
# the topic before starting Kafka Connect if a specific topic configuration is needed.
# Most users will want to use the built-in default replication factor of 3 or in some cases even specify a larger value.
# Since this means there must be at least as many brokers as the maximum replication factor used, we'd like to be able
# to run this example on a single-broker cluster and so here we instead set the replication factor to 1.
config.storage.topic=connect-configs
config.storage.replication.factor=1
# Topic to use for storing statuses. This topic can have multiple partitions and should be replicated and compacted.
# Kafka Connect will attempt to create the topic automatically when needed, but you can always manually create
# the topic before starting Kafka Connect if a specific topic configuration is needed.
# Most users will want to use the built-in default replication factor of 3 or in some cases even specify a larger value.
# Since this means there must be at least as many brokers as the maximum replication factor used, we'd like to be able
# to run this example on a single-broker cluster and so here we instead set the replication factor to 1.
status.storage.topic=connect-status
status.storage.replication.factor=1
#status.storage.partitions=5
# Flush much faster than normal, which is useful for testing/debugging
offset.flush.interval.ms=10000
# These are provided to inform the user about the presence of the REST host and port configs
# Hostname & Port for the REST API to listen on. If this is set, it will bind to the interface used to listen to requests.
#rest.host.name=
#rest.port=8083
# The Hostname & Port that will be given out to other workers to connect to i.e. URLs that are routable from other servers.
#rest.advertised.host.name=
#rest.advertised.port=
# Set to a list of filesystem paths separated by commas (,) to enable class loading isolation for plugins
# (connectors, converters, transformations). The list should consist of top level directories that include
# any combination of:
# a) directories immediately containing jars with plugins and their dependencies
# b) uber-jars with plugins and their dependencies
# c) directories immediately containing the package directory structure of classes of plugins and their dependencies
# Examples:
# plugin.path=/usr/local/share/java,/usr/local/share/kafka/plugins,/opt/connectors,
# Replace the relative path below with an absolute path if you are planning to start Kafka Connect from within a
# directory other than the home directory of Confluent Platform.
plugin.path=./share/java

View File

@ -0,0 +1,15 @@
{
"name": "local-file-source",
"config": {
"connector.class": "FileStreamSource",
"tasks.max": 1,
"file": "transformation.txt",
"topic": "connect-transformation",
"transforms": "MakeMap,InsertSource",
"transforms.MakeMap.type": "org.apache.kafka.connect.transforms.HoistField$Value",
"transforms.MakeMap.field": "line",
"transforms.InsertSource.type": "org.apache.kafka.connect.transforms.InsertField$Value",
"transforms.InsertSource.static.field": "data_source",
"transforms.InsertSource.static.value": "test-file-source"
}
}

View File

@ -0,0 +1,88 @@
##
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
# This file contains some of the configurations for the Kafka Connect distributed worker. This file is intended
# to be used with the examples, and some settings may differ from those used in a production system, especially
# the `bootstrap.servers` and those specifying replication factors.
# A list of host/port pairs to use for establishing the initial connection to the Kafka cluster.
bootstrap.servers=localhost:9092
# unique name for the cluster, used in forming the Connect cluster group. Note that this must not conflict with consumer group IDs
group.id=connect-cluster
# The converters specify the format of data in Kafka and how to translate it into Connect data. Every Connect user will
# need to configure these based on the format they want their data in when loaded from or stored into Kafka
key.converter=org.apache.kafka.connect.json.JsonConverter
value.converter=org.apache.kafka.connect.json.JsonConverter
# Converter-specific settings can be passed in by prefixing the Converter's setting with the converter we want to apply
# it to
key.converter.schemas.enable=true
value.converter.schemas.enable=true
# Topic to use for storing offsets. This topic should have many partitions and be replicated and compacted.
# Kafka Connect will attempt to create the topic automatically when needed, but you can always manually create
# the topic before starting Kafka Connect if a specific topic configuration is needed.
# Most users will want to use the built-in default replication factor of 3 or in some cases even specify a larger value.
# Since this means there must be at least as many brokers as the maximum replication factor used, we'd like to be able
# to run this example on a single-broker cluster and so here we instead set the replication factor to 1.
offset.storage.topic=connect-offsets
offset.storage.replication.factor=1
#offset.storage.partitions=25
# Topic to use for storing connector and task configurations; note that this should be a single partition, highly replicated,
# and compacted topic. Kafka Connect will attempt to create the topic automatically when needed, but you can always manually create
# the topic before starting Kafka Connect if a specific topic configuration is needed.
# Most users will want to use the built-in default replication factor of 3 or in some cases even specify a larger value.
# Since this means there must be at least as many brokers as the maximum replication factor used, we'd like to be able
# to run this example on a single-broker cluster and so here we instead set the replication factor to 1.
config.storage.topic=connect-configs
config.storage.replication.factor=1
# Topic to use for storing statuses. This topic can have multiple partitions and should be replicated and compacted.
# Kafka Connect will attempt to create the topic automatically when needed, but you can always manually create
# the topic before starting Kafka Connect if a specific topic configuration is needed.
# Most users will want to use the built-in default replication factor of 3 or in some cases even specify a larger value.
# Since this means there must be at least as many brokers as the maximum replication factor used, we'd like to be able
# to run this example on a single-broker cluster and so here we instead set the replication factor to 1.
status.storage.topic=connect-status
status.storage.replication.factor=1
#status.storage.partitions=5
# Flush much faster than normal, which is useful for testing/debugging
offset.flush.interval.ms=10000
# These are provided to inform the user about the presence of the REST host and port configs
# Hostname & Port for the REST API to listen on. If this is set, it will bind to the interface used to listen to requests.
#rest.host.name=
#rest.port=8083
# The Hostname & Port that will be given out to other workers to connect to i.e. URLs that are routable from other servers.
#rest.advertised.host.name=
#rest.advertised.port=
# Set to a list of filesystem paths separated by commas (,) to enable class loading isolation for plugins
# (connectors, converters, transformations). The list should consist of top level directories that include
# any combination of:
# a) directories immediately containing jars with plugins and their dependencies
# b) uber-jars with plugins and their dependencies
# c) directories immediately containing the package directory structure of classes of plugins and their dependencies
# Examples:
# plugin.path=/usr/local/share/java,/usr/local/share/kafka/plugins,/opt/connectors,
# Replace the relative path below with an absolute path if you are planning to start Kafka Connect from within a
# directory other than the home directory of Confluent Platform.
plugin.path=./share/java

View File

@ -0,0 +1,22 @@
{
"firstName": "John",
"lastName": "Smith",
"age": 25,
"address": {
"streetAddress": "21 2nd Street",
"city": "New York",
"state": "NY",
"postalCode": "10021"
},
"phoneNumber": [{
"type": "home",
"number": "212 555-1234"
}, {
"type": "fax",
"number": "646 555-4567"
}
],
"gender": {
"type": "male"
}
}

View File

@ -0,0 +1,11 @@
{
"name": "mqtt-source",
"config": {
"connector.class": "io.confluent.connect.mqtt.MqttSourceConnector",
"tasks.max": 1,
"mqtt.server.uri": "ws://broker.hivemq.com:8000/mqtt",
"mqtt.topics": "baeldung",
"kafka.topic": "connect-custom",
"value.converter": "org.apache.kafka.connect.converters.ByteArrayConverter"
}
}

View File

@ -45,12 +45,6 @@
<artifactId>disruptor</artifactId>
<version>${disruptor.version}</version>
</dependency>
<!-- Webflux for reactive logging example -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-webflux</artifactId>
<version>${spring-boot.version}</version>
</dependency>
</dependencies>
<properties>
@ -58,7 +52,6 @@
<log4j-api.version>2.7</log4j-api.version>
<log4j-core.version>2.7</log4j-core.version>
<disruptor.version>3.3.6</disruptor.version>
<spring-boot.version>2.1.0.RELEASE</spring-boot.version>
</properties>
</project>

View File

@ -13,3 +13,4 @@
- [Interpreter Design Pattern in Java](http://www.baeldung.com/java-interpreter-pattern)
- [State Design Pattern in Java](https://www.baeldung.com/java-state-design-pattern)
- [The Decorator Pattern in Java](https://www.baeldung.com/java-decorator-pattern)
- [Abstract Factory Pattern in Java](https://www.baeldung.com/java-abstract-factory-pattern)

View File

@ -0,0 +1,35 @@
package com.baeldung.hibernate.findall;
import java.util.List;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Root;
import org.hibernate.Session;
import com.baeldung.hibernate.pojo.Student;
public class FindAll {
private Session session;
public FindAll(Session session) {
super();
this.session = session;
}
public List<Student> findAllWithJpql() {
return session.createQuery("SELECT a FROM Student a", Student.class).getResultList();
}
public List<Student> findAllWithCriteriaQuery() {
CriteriaBuilder cb = session.getCriteriaBuilder();
CriteriaQuery<Student> cq = cb.createQuery(Student.class);
Root<Student> rootEntry = cq.from(Student.class);
CriteriaQuery<Student> all = cq.select(rootEntry);
TypedQuery<Student> allQuery = session.createQuery(all);
return allQuery.getResultList();
}
}

View File

@ -0,0 +1,63 @@
package com.baeldung.hibernate.findall;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.util.List;
import org.hibernate.Session;
import org.hibernate.Transaction;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.baeldung.hibernate.HibernateUtil;
import com.baeldung.hibernate.pojo.Student;
public class FindAllUnitTest {
private Session session;
private Transaction transaction;
private FindAll findAll;
@Before
public void setUp() throws IOException {
session = HibernateUtil.getSessionFactory().openSession();
transaction = session.beginTransaction();
findAll = new FindAll(session);
session.createNativeQuery("delete from Student").executeUpdate();
Student student1 = new Student();
session.persist(student1);
Student student2 = new Student();
session.persist(student2);
Student student3 = new Student();
session.persist(student3);
transaction.commit();
transaction = session.beginTransaction();
}
@After
public void tearDown() {
transaction.rollback();
session.close();
}
@Test
public void givenCriteriaQuery_WhenFindAll_ThenGetAllPersons() {
List<Student> list = findAll.findAllWithCriteriaQuery();
assertEquals(3, list.size());
}
@Test
public void givenJpql_WhenFindAll_ThenGetAllPersons() {
List<Student> list = findAll.findAllWithJpql();
assertEquals(3, list.size());
}
}

View File

@ -0,0 +1,70 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.baeldung</groupId>
<artifactId>spring-data-cassandra-reactive</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>jar</packaging>
<name>spring-data-cassandra-reactive</name>
<description>Spring Data Cassandra reactive</description>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.1.0.RELEASE</version>
<relativePath/>
</parent>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8
</project.reporting.outputEncoding>
<java.version>1.8</java.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-cassandra</artifactId>
<version>2.1.2.RELEASE</version>
</dependency>
<dependency>
<groupId>io.projectreactor</groupId>
<artifactId>reactor-core</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.projectreactor</groupId>
<artifactId>reactor-test</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,12 @@
package com.baeldung.cassandra.reactive;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class SpringDataCassandraReactiveApplication {
public static void main(String[] args) {
SpringApplication.run(SpringDataCassandraReactiveApplication.class, args);
}
}

View File

@ -0,0 +1,49 @@
package com.baeldung.cassandra.reactive.controller;
import com.baeldung.cassandra.reactive.model.Employee;
import com.baeldung.cassandra.reactive.service.EmployeeService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import javax.annotation.PostConstruct;
import java.util.ArrayList;
import java.util.List;
@RestController
@RequestMapping("employee")
public class EmployeeController {
@Autowired
EmployeeService employeeService;
@PostConstruct
public void saveEmployees() {
List<Employee> employees = new ArrayList<>();
employees.add(new Employee(123, "John Doe", "Delaware", "jdoe@xyz.com", 31));
employees.add(new Employee(324, "Adam Smith", "North Carolina", "asmith@xyz.com", 43));
employees.add(new Employee(355, "Kevin Dunner", "Virginia", "kdunner@xyz.com", 24));
employees.add(new Employee(643, "Mike Lauren", "New York", "mlauren@xyz.com", 41));
employeeService.initializeEmployees(employees);
}
@GetMapping("/list")
public Flux<Employee> getAllEmployees() {
Flux<Employee> employees = employeeService.getAllEmployees();
return employees;
}
@GetMapping("/{id}")
public Mono<Employee> getEmployeeById(@PathVariable int id) {
return employeeService.getEmployeeById(id);
}
@GetMapping("/filterByAge/{age}")
public Flux<Employee> getEmployeesFilterByAge(@PathVariable int age) {
return employeeService.getEmployeesFilterByAge(age);
}
}

View File

@ -0,0 +1,20 @@
package com.baeldung.cassandra.reactive.model;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.springframework.data.cassandra.core.mapping.PrimaryKey;
import org.springframework.data.cassandra.core.mapping.Table;
@Table
@Data
@AllArgsConstructor
@NoArgsConstructor
public class Employee {
@PrimaryKey
private int id;
private String name;
private String address;
private String email;
private int age;
}

View File

@ -0,0 +1,11 @@
package com.baeldung.cassandra.reactive.repository;
import com.baeldung.cassandra.reactive.model.Employee;
import org.springframework.data.cassandra.repository.AllowFiltering;
import org.springframework.data.cassandra.repository.ReactiveCassandraRepository;
import reactor.core.publisher.Flux;
public interface EmployeeRepository extends ReactiveCassandraRepository<Employee, Integer> {
@AllowFiltering
Flux<Employee> findByAgeGreaterThan(int age);
}

View File

@ -0,0 +1,35 @@
package com.baeldung.cassandra.reactive.service;
import com.baeldung.cassandra.reactive.model.Employee;
import com.baeldung.cassandra.reactive.repository.EmployeeRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import java.util.List;
@Service
public class EmployeeService {
@Autowired
EmployeeRepository employeeRepository;
public void initializeEmployees(List<Employee> employees) {
Flux<Employee> savedEmployees = employeeRepository.saveAll(employees);
savedEmployees.subscribe();
}
public Flux<Employee> getAllEmployees() {
Flux<Employee> employees = employeeRepository.findAll();
return employees;
}
public Flux<Employee> getEmployeesFilterByAge(int age) {
return employeeRepository.findByAgeGreaterThan(age);
}
public Mono<Employee> getEmployeeById(int id) {
return employeeRepository.findById(id);
}
}

View File

@ -0,0 +1,2 @@
spring.data.cassandra.keyspace-name=practice
spring.data.cassandra.port=9042

View File

@ -0,0 +1,55 @@
package com.baeldung.cassandra.reactive;
import com.baeldung.cassandra.reactive.model.Employee;
import com.baeldung.cassandra.reactive.repository.EmployeeRepository;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.test.StepVerifier;
@RunWith(SpringRunner.class)
@SpringBootTest
public class ReactiveEmployeeRepositoryIntegrationTest {
@Autowired
EmployeeRepository repository;
@Before
public void setUp() {
Flux<Employee> deleteAndInsert = repository.deleteAll() //
.thenMany(repository.saveAll(Flux.just(
new Employee(111, "John Doe", "Delaware", "jdoe@xyz.com", 31),
new Employee(222, "Adam Smith", "North Carolina", "asmith@xyz.com", 43),
new Employee(333, "Kevin Dunner", "Virginia", "kdunner@xyz.com", 24),
new Employee(444, "Mike Lauren", "New York", "mlauren@xyz.com", 41))));
StepVerifier.create(deleteAndInsert).expectNextCount(4).verifyComplete();
}
@Test
public void givenRecordsAreInserted_whenDbIsQueried_thenShouldIncludeNewRecords() {
Mono<Long> saveAndCount = repository.count()
.doOnNext(System.out::println)
.thenMany(repository.saveAll(Flux.just(new Employee(325, "Kim Jones", "Florida", "kjones@xyz.com", 42),
new Employee(654, "Tom Moody", "New Hampshire", "tmoody@xyz.com", 44))))
.last()
.flatMap(v -> repository.count())
.doOnNext(System.out::println);
StepVerifier.create(saveAndCount).expectNext(6L).verifyComplete();
}
@Test
public void givenAgeForFilter_whenDbIsQueried_thenShouldReturnFilteredRecords() {
StepVerifier.create(repository.findByAgeGreaterThan(35)).expectNextCount(2).verifyComplete();
}
}

View File

@ -0,0 +1,38 @@
package com.baeldung.hibernate.onetoone;
import org.hibernate.SessionFactory;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.cfg.Configuration;
import org.hibernate.service.ServiceRegistry;
public class HibernateUtil {
private static SessionFactory sessionFactory;
private static SessionFactory buildSessionFactory(Strategy strategy) {
try {
// Create the SessionFactory from hibernate-annotation.cfg.xml
Configuration configuration = new Configuration();
for (Class<?> entityClass : strategy.getEntityClasses()) {
configuration.addAnnotatedClass(entityClass);
}
configuration.configure("one-to-one.cfg.xml");
ServiceRegistry serviceRegistry = new StandardServiceRegistryBuilder().applySettings(configuration.getProperties())
.build();
SessionFactory sessionFactory = configuration.buildSessionFactory(serviceRegistry);
return sessionFactory;
} catch (Throwable ex) {
ex.printStackTrace();
throw new ExceptionInInitializerError(ex);
}
}
public static SessionFactory getSessionFactory(Strategy strategy) {
if (sessionFactory == null)
sessionFactory = buildSessionFactory(strategy);
return sessionFactory;
}
}

View File

@ -0,0 +1,25 @@
package com.baeldung.hibernate.onetoone;
import java.util.Arrays;
import java.util.List;
public enum Strategy {
//See that the classes belongs to different packages
FOREIGN_KEY(Arrays.asList(com.baeldung.hibernate.onetoone.foreignkeybased.User.class,
com.baeldung.hibernate.onetoone.foreignkeybased.Address.class)),
SHARED_PRIMARY_KEY(Arrays.asList(com.baeldung.hibernate.onetoone.sharedkeybased.User.class,
com.baeldung.hibernate.onetoone.sharedkeybased.Address.class)),
JOIN_TABLE_BASED(Arrays.asList(com.baeldung.hibernate.onetoone.jointablebased.Employee.class,
com.baeldung.hibernate.onetoone.jointablebased.WorkStation.class));
private List<Class<?>> entityClasses;
Strategy(List<Class<?>> entityClasses) {
this.entityClasses = entityClasses;
}
public List<Class<?>> getEntityClasses() {
return entityClasses;
}
}

View File

@ -0,0 +1,60 @@
package com.baeldung.hibernate.onetoone.foreignkeybased;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.OneToOne;
import javax.persistence.Table;
@Entity
@Table(name = "address")
public class Address {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
@Column(name = "id")
private Long id;
@Column(name = "street")
private String street;
@Column(name = "city")
private String city;
@OneToOne(mappedBy = "address")
private User user;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getStreet() {
return street;
}
public void setStreet(String street) {
this.street = street;
}
public String getCity() {
return city;
}
public void setCity(String city) {
this.city = city;
}
public User getUser() {
return user;
}
public void setUser(User user) {
this.user = user;
}
}

View File

@ -0,0 +1,51 @@
package com.baeldung.hibernate.onetoone.foreignkeybased;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.OneToOne;
import javax.persistence.Table;
@Entity
@Table(name = "users")
public class User {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
@Column(name = "id")
private Long id;
@Column(name = "username")
private String userName;
@OneToOne(cascade = CascadeType.ALL)
@JoinColumn(name = "address_id", referencedColumnName = "id")
private Address address;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public Address getAddress() {
return address;
}
public void setAddress(Address address) {
this.address = address;
}
}

View File

@ -0,0 +1,53 @@
package com.baeldung.hibernate.onetoone.jointablebased;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.OneToOne;
import javax.persistence.Table;
@Entity
@Table(name = "employee")
public class Employee {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
@Column(name = "id")
private Long id;
@Column(name = "ename")
private String name;
@OneToOne(cascade = CascadeType.ALL)
@JoinTable(name = "emp_workstation", joinColumns = {@JoinColumn(name = "employee_id", referencedColumnName = "id")},
inverseJoinColumns = {@JoinColumn(name = "workstation_id", referencedColumnName = "id")})
private WorkStation workStation;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public WorkStation getWorkStation() {
return workStation;
}
public void setWorkStation(WorkStation workStation) {
this.workStation = workStation;
}
}

View File

@ -0,0 +1,61 @@
package com.baeldung.hibernate.onetoone.jointablebased;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.OneToOne;
import javax.persistence.Table;
@Entity
@Table(name = "workstation")
public class WorkStation {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
@Column(name = "id")
private Long id;
@Column(name = "workstation_number")
private Integer workstationNumber;
@Column(name = "floor")
private String floor;
@OneToOne(mappedBy = "workStation")
private Employee employee;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Integer getWorkstationNumber() {
return workstationNumber;
}
public void setWorkstationNumber(Integer workstationNumber) {
this.workstationNumber = workstationNumber;
}
public String getFloor() {
return floor;
}
public void setFloor(String floor) {
this.floor = floor;
}
public Employee getEmployee() {
return employee;
}
public void setEmployee(Employee employee) {
this.employee = employee;
}
}

View File

@ -0,0 +1,60 @@
package com.baeldung.hibernate.onetoone.sharedkeybased;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.MapsId;
import javax.persistence.OneToOne;
import javax.persistence.Table;
@Entity
@Table(name = "address")
public class Address {
@Id
@Column(name = "id")
private Long id;
@Column(name = "street")
private String street;
@Column(name = "city")
private String city;
@OneToOne
@MapsId
private User user;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getStreet() {
return street;
}
public void setStreet(String street) {
this.street = street;
}
public String getCity() {
return city;
}
public void setCity(String city) {
this.city = city;
}
public User getUser() {
return user;
}
public void setUser(User user) {
this.user = user;
}
}

View File

@ -0,0 +1,50 @@
package com.baeldung.hibernate.onetoone.sharedkeybased;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.OneToOne;
import javax.persistence.Table;
@Entity
@Table(name = "users")
public class User {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
@Column(name = "id")
private Long id;
@Column(name = "username")
private String userName;
@OneToOne(mappedBy = "user", cascade = CascadeType.ALL)
private Address address;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public Address getAddress() {
return address;
}
public void setAddress(Address address) {
this.address = address;
}
}

View File

@ -0,0 +1,16 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://hibernate.org/dtd/hibernate-configuration-3.0.dtd">
<hibernate-configuration>
<session-factory>
<property name="hibernate.connection.driver_class">org.h2.Driver</property>
<property name="hibernate.connection.password"></property>
<property name="hibernate.connection.url">jdbc:h2:mem:spring_hibernate_one_to_one</property>
<property name="hibernate.connection.username">sa</property>
<property name="hibernate.dialect">org.hibernate.dialect.H2Dialect</property>
<property name="hibernate.current_session_context_class">thread</property>
<property name="hibernate.show_sql">true</property>
<property name="hibernate.hbm2ddl.auto">create-drop</property>
</session-factory>
</hibernate-configuration>

View File

@ -0,0 +1,80 @@
package com.baeldung.hibernate.onetoone;
import com.baeldung.hibernate.onetoone.foreignkeybased.Address;
import com.baeldung.hibernate.onetoone.foreignkeybased.User;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
public class HibernateOneToOneAnnotationFKBasedIntegrationTest {
private static SessionFactory sessionFactory;
private Session session;
@BeforeClass
public static void beforeTests() {
sessionFactory = HibernateUtil.getSessionFactory(Strategy.FOREIGN_KEY);
}
@Before
public void setUp() {
session = sessionFactory.openSession();
session.beginTransaction();
}
@Test
public void givenData_whenInsert_thenCreates1to1relationship() {
User user = new User();
user.setUserName("alice@baeldung.com");
Address address = new Address();
address.setStreet("FK Street");
address.setCity("FK City");
address.setUser(user);
user.setAddress(address);
//Address entry will automatically be created by hibernate, since cascade type is specified as ALL
session.persist(user);
session.getTransaction().commit();
assert1to1InsertedData();
}
private void assert1to1InsertedData() {
@SuppressWarnings("unchecked")
List<User> userList = session.createQuery("FROM User").list();
assertNotNull(userList);
assertEquals(1, userList.size());
User user = userList.get(0);
assertEquals("alice@baeldung.com", user.getUserName());
Address address = user.getAddress();
assertNotNull(address);
assertEquals("FK Street", address.getStreet());
assertEquals("FK City", address.getCity());
}
@After
public void tearDown() {
session.close();
}
@AfterClass
public static void afterTests() {
sessionFactory.close();
}
}

View File

@ -0,0 +1,80 @@
package com.baeldung.hibernate.onetoone;
import com.baeldung.hibernate.onetoone.jointablebased.Employee;
import com.baeldung.hibernate.onetoone.jointablebased.WorkStation;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
public class HibernateOneToOneAnnotationJTBasedIntegrationTest {
private static SessionFactory sessionFactory;
private Session session;
@BeforeClass
public static void beforeTests() {
sessionFactory = HibernateUtil.getSessionFactory(Strategy.JOIN_TABLE_BASED);
}
@Before
public void setUp() {
session = sessionFactory.openSession();
session.beginTransaction();
}
@Test
public void givenData_whenInsert_thenCreates1to1relationship() {
Employee employee = new Employee();
employee.setName("bob@baeldung.com");
WorkStation workStation = new WorkStation();
workStation.setWorkstationNumber(626);
workStation.setFloor("Sixth Floor");
employee.setWorkStation(workStation);
workStation.setEmployee(employee);
session.persist(employee);
session.getTransaction().commit();
assert1to1InsertedData();
}
private void assert1to1InsertedData() {
@SuppressWarnings("unchecked")
List<Employee> employeeList = session.createQuery("FROM Employee").list();
assertNotNull(employeeList);
assertEquals(1, employeeList.size());
Employee employee = employeeList.get(0);
assertEquals("bob@baeldung.com", employee.getName());
WorkStation workStation = employee.getWorkStation();
assertNotNull(workStation);
assertEquals((long) 626, (long) workStation.getWorkstationNumber());
assertEquals("Sixth Floor", workStation.getFloor());
}
@After
public void tearDown() {
session.close();
}
@AfterClass
public static void afterTests() {
sessionFactory.close();
}
}

View File

@ -0,0 +1,79 @@
package com.baeldung.hibernate.onetoone;
import com.baeldung.hibernate.onetoone.sharedkeybased.Address;
import com.baeldung.hibernate.onetoone.sharedkeybased.User;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
public class HibernateOneToOneAnnotationSPKBasedIntegrationTest {
private static SessionFactory sessionFactory;
private Session session;
@BeforeClass
public static void beforeTests() {
sessionFactory = HibernateUtil.getSessionFactory(Strategy.SHARED_PRIMARY_KEY);
}
@Before
public void setUp() {
session = sessionFactory.openSession();
session.beginTransaction();
}
@Test
public void givenData_whenInsert_thenCreates1to1relationship() {
User user = new User();
user.setUserName("alice@baeldung.com");
Address address = new Address();
address.setStreet("SPK Street");
address.setCity("SPK City");
address.setUser(user);
user.setAddress(address);
//Address entry will automatically be created by hibernate, since cascade type is specified as ALL
session.persist(user);
session.getTransaction().commit();
assert1to1InsertedData();
}
private void assert1to1InsertedData() {
@SuppressWarnings("unchecked")
List<User> userList = session.createQuery("FROM User").list();
assertNotNull(userList);
assertEquals(1, userList.size());
User user = userList.get(0);
assertEquals("alice@baeldung.com", user.getUserName());
Address address = user.getAddress();
assertNotNull(address);
assertEquals("SPK Street", address.getStreet());
assertEquals("SPK City", address.getCity());
}
@After
public void tearDown() {
session.close();
}
@AfterClass
public static void afterTests() {
sessionFactory.close();
}
}

View File

@ -0,0 +1,16 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://hibernate.org/dtd/hibernate-configuration-3.0.dtd">
<hibernate-configuration>
<session-factory>
<property name="hibernate.connection.driver_class">org.h2.Driver</property>
<property name="hibernate.connection.password"></property>
<property name="hibernate.connection.url">jdbc:h2:mem:spring_hibernate_one_to_one</property>
<property name="hibernate.connection.username">sa</property>
<property name="hibernate.dialect">org.hibernate.dialect.H2Dialect</property>
<property name="hibernate.current_session_context_class">thread</property>
<property name="hibernate.show_sql">true</property>
<property name="hibernate.hbm2ddl.auto">create-drop</property>
</session-factory>
</hibernate-configuration>

1067
pom.xml

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,12 @@
{
"//": "lines with // keys are just comments (we don't have real comments in json)",
"//": "this file stores password passed through md5+bcrypt hash",
"//": "you can use `restx hash md5+bcrypt {password}` shell command to get hashed passwords to put here",
"//": "to help startup with restx, there are comments with clear text passwords,",
"//": "which should obviously not be stored here.",
"user1": "$2a$10$iZluFUJShbjb1ue68bLrDuGCeJL9EMLHelVIf8u0SUbCseDOvKnoe",
"//": "user 1 password is 'user1-pwd'",
"user2": "$2a$10$oym3SYMFXf/9gGfDKKHO4eM1vWNqAZMsRZCL.BORCaP4yp5cdiCXu",
"//": "user 2 password is 'user2-pwd'"
}

4
restx/data/users.json Normal file
View File

@ -0,0 +1,4 @@
[
{"name":"user1", "roles": ["hello"]},
{"name":"user2", "roles": []}
]

38
restx/md.restx.json Normal file
View File

@ -0,0 +1,38 @@
{
"module": "restx-demo:restx-demo:0.1-SNAPSHOT",
"packaging": "war",
"properties": {
"java.version": "1.8",
"restx.version": "0.35-rc4"
},
"fragments": {
"maven": [
"classpath:///restx/build/fragments/maven/javadoc-apidoclet.xml" ]
},
"dependencies": {
"compile": [
"io.restx:restx-core:${restx.version}",
"io.restx:restx-security-basic:${restx.version}",
"io.restx:restx-core-annotation-processor:${restx.version}",
"io.restx:restx-factory:${restx.version}",
"io.restx:restx-factory-admin:${restx.version}",
"io.restx:restx-validation:${restx.version}",
"io.restx:restx-monitor-codahale:${restx.version}",
"io.restx:restx-monitor-admin:${restx.version}",
"io.restx:restx-log-admin:${restx.version}",
"io.restx:restx-i18n-admin:${restx.version}",
"io.restx:restx-stats-admin:${restx.version}",
"io.restx:restx-servlet:${restx.version}",
"io.restx:restx-server-jetty8:${restx.version}!optional",
"io.restx:restx-apidocs:${restx.version}",
"io.restx:restx-specs-admin:${restx.version}",
"io.restx:restx-admin:${restx.version}",
"ch.qos.logback:logback-classic:1.0.13"
],
"test": [
"io.restx:restx-specs-tests:${restx.version}",
"junit:junit:4.11"
]
}
}

155
restx/pom.xml Normal file
View File

@ -0,0 +1,155 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>restx</artifactId>
<version>0.1-SNAPSHOT</version>
<packaging>war</packaging>
<name>restx-demo</name>
<properties>
<maven.compiler.target>1.8</maven.compiler.target>
<maven.compiler.source>1.8</maven.compiler.source>
<restx.version>0.35-rc4</restx.version>
</properties>
<parent>
<groupId>com.baeldung</groupId>
<artifactId>parent-modules</artifactId>
<version>1.0.0-SNAPSHOT</version>
</parent>
<dependencies>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-core</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-security-basic</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-core-annotation-processor</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-factory</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-factory-admin</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-validation</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-monitor-codahale</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-monitor-admin</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-log-admin</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-i18n-admin</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-stats-admin</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-servlet</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-server-jetty8</artifactId>
<version>${restx.version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-apidocs</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-specs-admin</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-admin</artifactId>
<version>${restx.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.0.13</version>
</dependency>
<dependency>
<groupId>io.restx</groupId>
<artifactId>restx-specs-tests</artifactId>
<version>${restx.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<executions>
<execution>
<id>attach-docs</id>
<!--
we generate javadoc before packaging the jar to let a chance to apidocs doclet
to generate comments dictionary to be packaged inside the jar as a resource
-->
<phase>prepare-package</phase>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
<configuration>
<source>${maven.compiler.source}</source>
<doclet>restx.apidocs.doclet.ApidocsDoclet</doclet>
<docletArtifact>
<groupId>io.restx</groupId>
<artifactId>restx-apidocs-doclet</artifactId>
<version>${restx.version}</version>
</docletArtifact>
<additionalparam>-restx-target-dir ${project.basedir}/target/classes</additionalparam>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,74 @@
package restx.demo;
import restx.config.ConfigLoader;
import restx.config.ConfigSupplier;
import restx.factory.Provides;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Charsets;
import com.google.common.collect.ImmutableSet;
import restx.security.*;
import restx.factory.Module;
import restx.factory.Provides;
import javax.inject.Named;
import java.nio.file.Paths;
@Module
public class AppModule {
@Provides
public SignatureKey signatureKey() {
return new SignatureKey("restx-demo -447494532235718370 restx-demo 801c9eaf-4116-48f2-906b-e979fba72757".getBytes(Charsets.UTF_8));
}
@Provides
@Named("restx.admin.password")
public String restxAdminPassword() {
return "4780";
}
@Provides
public ConfigSupplier appConfigSupplier(ConfigLoader configLoader) {
// Load settings.properties in restx.demo package as a set of config entries
return configLoader.fromResource("restx/demo/settings");
}
@Provides
public CredentialsStrategy credentialsStrategy() {
return new BCryptCredentialsStrategy();
}
@Provides
public BasicPrincipalAuthenticator basicPrincipalAuthenticator(
SecuritySettings securitySettings, CredentialsStrategy credentialsStrategy,
@Named("restx.admin.passwordHash") String defaultAdminPasswordHash, ObjectMapper mapper) {
return new StdBasicPrincipalAuthenticator(new StdUserService<>(
// use file based users repository.
// Developer's note: prefer another storage mechanism for your users if you need real user management
// and better perf
new FileBasedUserRepository<>(
StdUser.class, // this is the class for the User objects, that you can get in your app code
// with RestxSession.current().getPrincipal().get()
// it can be a custom user class, it just need to be json deserializable
mapper,
// this is the default restx admin, useful to access the restx admin console.
// if one user with restx-admin role is defined in the repository, this default user won't be
// available anymore
new StdUser("admin", ImmutableSet.<String>of("*")),
// the path where users are stored
Paths.get("data/users.json"),
// the path where credentials are stored. isolating both is a good practice in terms of security
// it is strongly recommended to follow this approach even if you use your own repository
Paths.get("data/credentials.json"),
// tells that we want to reload the files dynamically if they are touched.
// this has a performance impact, if you know your users / credentials never change without a
// restart you can disable this to get better perfs
true),
credentialsStrategy, defaultAdminPasswordHash),
securitySettings);
}
}

View File

@ -0,0 +1,32 @@
package restx.demo;
import com.google.common.base.Optional;
import restx.server.WebServer;
import restx.server.Jetty8WebServer;
/**
* This class can be used to run the app.
*
* Alternatively, you can deploy the app as a war in a regular container like tomcat or jetty.
*
* Reading the port from system env PORT makes it compatible with heroku.
*/
public class AppServer {
public static final String WEB_INF_LOCATION = "src/main/webapp/WEB-INF/web.xml";
public static final String WEB_APP_LOCATION = "src/main/webapp";
public static void main(String[] args) throws Exception {
int port = Integer.valueOf(Optional.fromNullable(System.getenv("PORT")).or("8080"));
WebServer server = new Jetty8WebServer(WEB_INF_LOCATION, WEB_APP_LOCATION, port, "0.0.0.0");
/*
* load mode from system property if defined, or default to dev
* be careful with that setting, if you use this class to launch your server in production, make sure to launch
* it with -Drestx.mode=prod or change the default here
*/
System.setProperty("restx.mode", System.getProperty("restx.mode", "dev"));
System.setProperty("restx.app.package", "restx.demo");
server.startAndAwait();
}
}

View File

@ -0,0 +1,10 @@
package restx.demo;
/**
* A list of roles for the application.
*
* We don't use an enum here because it must be used inside an annotation.
*/
public final class Roles {
public static final String HELLO_ROLE = "hello";
}

View File

@ -0,0 +1,21 @@
package restx.demo.domain;
public class Message {
private String message;
public String getMessage() {
return message;
}
public Message setMessage(final String message) {
this.message = message;
return this;
}
@Override
public String toString() {
return "Message{" +
"message='" + message + '\'' +
'}';
}
}

View File

@ -0,0 +1,62 @@
package restx.demo.rest;
import restx.demo.domain.Message;
import restx.demo.Roles;
import org.joda.time.DateTime;
import restx.annotations.GET;
import restx.annotations.POST;
import restx.annotations.RestxResource;
import restx.factory.Component;
import restx.security.PermitAll;
import restx.security.RolesAllowed;
import restx.security.RestxSession;
import javax.validation.constraints.NotNull;
@Component @RestxResource
public class HelloResource {
/**
* Say hello to currently logged in user.
*
* Authorized only for principals with Roles.HELLO_ROLE role.
*
* @return a Message to say hello
*/
@GET("/message")
@RolesAllowed(Roles.HELLO_ROLE)
public Message sayHello() {
return new Message().setMessage(String.format(
"hello %s, it's %s",
RestxSession.current().getPrincipal().get().getName(),
DateTime.now().toString("HH:mm:ss")));
}
/**
* Say hello to anybody.
*
* Does not require authentication.
*
* @return a Message to say hello
*/
@GET("/hello")
@PermitAll
public Message helloPublic(String who) {
return new Message().setMessage(String.format(
"hello %s, it's %s",
who, DateTime.now().toString("HH:mm:ss")));
}
public static class MyPOJO {
@NotNull
String value;
public String getValue(){ return value; }
public void setValue(String value){ this.value = value; }
}
@POST("/mypojo")
@PermitAll
public MyPOJO helloPojo(MyPOJO pojo){
pojo.setValue("hello "+pojo.getValue());
return pojo;
}
}

View File

@ -0,0 +1,94 @@
<configuration>
<contextListener class="ch.qos.logback.classic.jul.LevelChangePropagator">
<resetJUL>true</resetJUL>
</contextListener>
<property name="LOGS_FOLDER" value="${logs.base:-logs}" />
<appender name="errorFile" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${LOGS_FOLDER}/errors.log</File>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>ERROR</level>
</filter>
<encoder>
<pattern>%d [%-16thread] [%-10X{principal}] %-5level %logger{36} - %msg%n</pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOGS_FOLDER}/errors.%d.log</fileNamePattern>
<maxHistory>30</maxHistory>
</rollingPolicy>
</appender>
<if condition='p("restx.mode").equals("prod")'>
<then>
<!-- production mode -->
<appender name="appLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${LOGS_FOLDER}/app.log</File>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>INFO</level>
</filter>
<encoder>
<pattern>%d [%-16thread] [%-10X{principal}] %-5level %logger{36} - %msg%n</pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOGS_FOLDER}/app.%d.log</fileNamePattern>
<maxHistory>10</maxHistory>
</rollingPolicy>
</appender>
<appender name="debugFile" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${LOGS_FOLDER}/debug.log</File>
<encoder>
<pattern>%d [%-16thread] [%-10X{principal}] %-5level %logger{36} - %msg%n</pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
<fileNamePattern>${LOGS_FOLDER}/debug.%i.log.zip</fileNamePattern>
<minIndex>1</minIndex>
<maxIndex>3</maxIndex>
</rollingPolicy>
<triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
<maxFileSize>50MB</maxFileSize>
</triggeringPolicy>
</appender>
<root level="INFO">
<appender-ref ref="debugFile" />
<appender-ref ref="appLog" />
</root>
</then>
<else>
<!-- not production mode -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d [%-16thread] [%-10X{principal}] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<appender name="appLog" class="ch.qos.logback.core.FileAppender">
<File>${LOGS_FOLDER}/app.log</File>
<encoder>
<pattern>%d [%-16thread] [%-10X{principal}] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<root level="INFO">
<appender-ref ref="STDOUT" />
<appender-ref ref="appLog" />
</root>
</else>
</if>
<!-- clean up container logs -->
<logger name="org.eclipse.jetty.server.AbstractConnector" level="WARN" />
<logger name="org.eclipse.jetty.server.handler.ContextHandler" level="WARN" />
<logger name="org.eclipse.jetty.webapp.StandardDescriptorProcessor" level="WARN" />
<logger name="org.hibernate.validator.internal.engine.ConfigurationImpl" level="WARN" />
<logger name="org.reflections.Reflections" level="WARN" />
<logger name="restx.factory.Factory" level="WARN" />
<!-- app logs - set DEBUG level, in prod it will go to a dedicated file -->
<logger name="restx.demo" level="DEBUG" />
<root level="INFO">
<appender-ref ref="errorFile" />
</root>
</configuration>

View File

@ -0,0 +1 @@
app.name=restx-demo

View File

@ -0,0 +1,15 @@
<web-app xmlns="http://java.sun.com/xml/ns/javaee"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd"
version="3.0" metadata-complete="true">
<servlet>
<servlet-name>restx</servlet-name>
<servlet-class>restx.servlet.RestxMainRouterServlet</servlet-class>
<load-on-startup>1</load-on-startup>
</servlet>
<servlet-mapping>
<servlet-name>restx</servlet-name>
<url-pattern>/api/*</url-pattern>
</servlet-mapping>
</web-app>

View File

@ -0,0 +1,23 @@
package restx.demo.rest;
import org.junit.runner.RunWith;
import restx.tests.FindSpecsIn;
import restx.tests.RestxSpecTestsRunner;
@RunWith(RestxSpecTestsRunner.class)
@FindSpecsIn("specs/hello")
public class HelloResourceSpecUnitTest {
/**
* Useless, thanks to both @RunWith(RestxSpecTestsRunner.class) & @FindSpecsIn()
*
* @Rule
* public RestxSpecRule rule = new RestxSpecRule();
*
* @Test
* public void test_spec() throws Exception {
* rule.runTest(specTestPath);
* }
*/
}

View File

@ -0,0 +1,10 @@
title: should admin say hello
given:
- time: 2013-08-28T01:18:00.822+02:00
- uuids: [ "e2b4430f-9541-4602-9a3a-413d17c56a6b" ]
wts:
- when: |
GET message
$RestxSession: {"_expires":"2013-09-27T01:18:00.822+02:00","principal":"admin","sessionKey":"e2b4430f-9541-4602-9a3a-413d17c56a6b"}
then: |
{"message":"hello admin, it's 01:18:00"}

View File

@ -0,0 +1,8 @@
title: should admin say hello
given:
- time: 2013-08-28T01:18:00.822+02:00
wts:
- when: |
GET hello?who=xavier
then: |
{"message":"hello xavier, it's 01:18:00"}

View File

@ -0,0 +1,17 @@
title: should missing post value triggers a validation error
given:
- time: 2013-08-28T01:18:00.822+02:00
- uuids: [ "e2b4430f-9541-4602-9a3a-413d17c56a6b" ]
wts:
- when: |
POST mypojo
$RestxSession: {"_expires":"2013-09-27T01:18:00.822+02:00","principal":"user1","sessionKey":"e2b4430f-9541-4602-9a3a-413d17c56a6b"}
{}
then: |
400
- when: |
POST mypojo
$RestxSession: {"_expires":"2013-09-27T01:18:00.822+02:00","principal":"user1","sessionKey":"e2b4430f-9541-4602-9a3a-413d17c56a6b"}
{"value":"world"}
then: |
{"value":"hello world"}

View File

@ -0,0 +1,10 @@
title: should user1 say hello
given:
- time: 2013-08-28T01:18:00.822+02:00
- uuids: [ "e2b4430f-9541-4602-9a3a-413d17c56a6b" ]
wts:
- when: |
GET message
$RestxSession: {"_expires":"2013-09-27T01:18:00.822+02:00","principal":"user1","sessionKey":"e2b4430f-9541-4602-9a3a-413d17c56a6b"}
then: |
{"message":"hello user1, it's 01:18:00"}

View File

@ -0,0 +1,10 @@
title: should user2 not say hello
given:
- time: 2013-08-28T01:19:44.770+02:00
- uuids: [ "56f71fcc-42d3-422f-9458-8ad37fc4a0b5" ]
wts:
- when: |
GET message
$RestxSession: {"_expires":"2013-09-27T01:19:44.770+02:00","principal":"user2","sessionKey":"56f71fcc-42d3-422f-9458-8ad37fc4a0b5"}
then: |
403

View File

@ -0,0 +1,33 @@
package com.baeldung.debugging.consumer;
import java.util.Collections;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.mongo.MongoReactiveAutoConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.security.config.web.server.ServerHttpSecurity;
import org.springframework.security.web.server.SecurityWebFilterChain;
import reactor.core.publisher.Hooks;
@SpringBootApplication(exclude = MongoReactiveAutoConfiguration.class)
@EnableScheduling
public class ConsumerSSEApplication {
public static void main(String[] args) {
Hooks.onOperatorDebug();
SpringApplication app = new SpringApplication(ConsumerSSEApplication.class);
app.setDefaultProperties(Collections.singletonMap("server.port", "8082"));
app.run(args);
}
@Bean
public SecurityWebFilterChain springSecurityFilterChain(ServerHttpSecurity http) {
http.authorizeExchange()
.anyExchange()
.permitAll();
return http.build();
}
}

View File

@ -0,0 +1,122 @@
package com.baeldung.debugging.consumer.chronjobs;
import java.time.Duration;
import java.util.concurrent.ThreadLocalRandom;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import org.springframework.web.reactive.function.client.WebClient;
import com.baeldung.debugging.consumer.model.Foo;
import com.baeldung.debugging.consumer.model.FooDto;
import com.baeldung.debugging.consumer.service.FooService;
import reactor.core.publisher.Flux;
@Component
public class ChronJobs {
private static Logger logger = LoggerFactory.getLogger(ChronJobs.class);
private WebClient client = WebClient.create("http://localhost:8081");
@Autowired
private FooService service;
@Scheduled(fixedRate = 10000)
public void consumeInfiniteFlux() {
Flux<Foo> fluxFoo = client.get()
.uri("/functional-reactive/periodic-foo")
.accept(MediaType.TEXT_EVENT_STREAM)
.retrieve()
.bodyToFlux(FooDto.class)
.delayElements(Duration.ofMillis(100))
.map(dto -> {
logger.debug("process 1 with dto id {} name{}", dto.getId(), dto.getName());
return new Foo(dto);
});
Integer random = ThreadLocalRandom.current()
.nextInt(0, 3);
switch (random) {
case 0:
logger.info("process 1 with approach 1");
service.processFoo(fluxFoo);
break;
case 1:
logger.info("process 1 with approach 1 EH");
service.processUsingApproachOneWithErrorHandling(fluxFoo);
break;
default:
logger.info("process 1 with approach 2");
service.processFooInAnotherScenario(fluxFoo);
break;
}
}
@Scheduled(fixedRate = 20000)
public void consumeFiniteFlux2() {
Flux<Foo> fluxFoo = client.get()
.uri("/functional-reactive/periodic-foo-2")
.accept(MediaType.TEXT_EVENT_STREAM)
.retrieve()
.bodyToFlux(FooDto.class)
.delayElements(Duration.ofMillis(100))
.map(dto -> {
logger.debug("process 2 with dto id {} name{}", dto.getId(), dto.getName());
return new Foo(dto);
});
Integer random = ThreadLocalRandom.current()
.nextInt(0, 3);
switch (random) {
case 0:
logger.info("process 2 with approach 1");
service.processFoo(fluxFoo);
break;
case 1:
logger.info("process 2 with approach 1 EH");
service.processUsingApproachOneWithErrorHandling(fluxFoo);
break;
default:
logger.info("process 2 with approach 2");
service.processFooInAnotherScenario(fluxFoo);
break;
}
}
@Scheduled(fixedRate = 20000)
public void consumeFiniteFlux3() {
Flux<Foo> fluxFoo = client.get()
.uri("/functional-reactive/periodic-foo-2")
.accept(MediaType.TEXT_EVENT_STREAM)
.retrieve()
.bodyToFlux(FooDto.class)
.delayElements(Duration.ofMillis(100))
.map(dto -> {
logger.debug("process 3 with dto id {} name{}", dto.getId(), dto.getName());
return new Foo(dto);
});
logger.info("process 3 with approach 3");
service.processUsingApproachThree(fluxFoo);
}
@Scheduled(fixedRate = 20000)
public void consumeFiniteFluxWithCheckpoint4() {
Flux<Foo> fluxFoo = client.get()
.uri("/functional-reactive/periodic-foo-2")
.accept(MediaType.TEXT_EVENT_STREAM)
.retrieve()
.bodyToFlux(FooDto.class)
.delayElements(Duration.ofMillis(100))
.map(dto -> {
logger.debug("process 4 with dto id {} name{}", dto.getId(), dto.getName());
return new Foo(dto);
});
logger.info("process 4 with approach 4");
service.processUsingApproachFourWithCheckpoint(fluxFoo);
}
}

View File

@ -0,0 +1,23 @@
package com.baeldung.debugging.consumer.controllers;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Hooks;
@RestController
public class ReactiveConfigsToggleRestController {
@GetMapping("/debug-hook-on")
public String setReactiveDebugOn() {
Hooks.onOperatorDebug();
return "DEBUG HOOK ON";
}
@GetMapping("/debug-hook-off")
public String setReactiveDebugOff() {
Hooks.resetOnOperatorDebug();
return "DEBUG HOOK OFF";
}
}

View File

@ -0,0 +1,26 @@
package com.baeldung.debugging.consumer.model;
import java.util.concurrent.ThreadLocalRandom;
import org.springframework.data.annotation.Id;
import lombok.AllArgsConstructor;
import lombok.Data;
@Data
@AllArgsConstructor
public class Foo {
@Id
private Integer id;
private String formattedName;
private Integer quantity;
public Foo(FooDto dto) {
this.id = (ThreadLocalRandom.current()
.nextInt(0, 100) == 0) ? null : dto.getId();
this.formattedName = dto.getName();
this.quantity = ThreadLocalRandom.current()
.nextInt(0, 10);
}
}

View File

@ -0,0 +1,12 @@
package com.baeldung.debugging.consumer.model;
import lombok.AllArgsConstructor;
import lombok.Data;
@Data
@AllArgsConstructor
public class FooDto {
private Integer id;
private String name;
}

View File

@ -0,0 +1,45 @@
package com.baeldung.debugging.consumer.service;
import java.util.concurrent.ThreadLocalRandom;
import com.baeldung.debugging.consumer.model.Foo;
import reactor.core.publisher.Flux;
public class FooNameHelper {
public static Flux<Foo> concatAndSubstringFooName(Flux<Foo> flux) {
flux = concatFooName(flux);
flux = substringFooName(flux);
return flux;
}
public static Flux<Foo> concatFooName(Flux<Foo> flux) {
flux = flux.map(foo -> {
String processedName = null;
Integer random = ThreadLocalRandom.current()
.nextInt(0, 80);
processedName = (random != 0) ? foo.getFormattedName() : foo.getFormattedName() + "-bael";
foo.setFormattedName(processedName);
return foo;
});
return flux;
}
public static Flux<Foo> substringFooName(Flux<Foo> flux) {
return flux.map(foo -> {
String processedName;
Integer random = ThreadLocalRandom.current()
.nextInt(0, 100);
processedName = (random == 0) ? foo.getFormattedName()
.substring(10, 15)
: foo.getFormattedName()
.substring(0, 5);
foo.setFormattedName(processedName);
return foo;
});
}
}

View File

@ -0,0 +1,31 @@
package com.baeldung.debugging.consumer.service;
import java.util.concurrent.ThreadLocalRandom;
import com.baeldung.debugging.consumer.model.Foo;
import reactor.core.publisher.Flux;
public class FooQuantityHelper {
public static Flux<Foo> processFooReducingQuantity(Flux<Foo> flux) {
flux = flux.map(foo -> {
Integer result;
Integer random = ThreadLocalRandom.current()
.nextInt(0, 90);
result = (random == 0) ? result = 0 : foo.getQuantity() + 2;
foo.setQuantity(result);
return foo;
});
return divideFooQuantity(flux);
}
public static Flux<Foo> divideFooQuantity(Flux<Foo> flux) {
return flux.map(foo -> {
Integer result = Math.round(5 / foo.getQuantity());
foo.setQuantity(result);
return foo;
});
}
}

View File

@ -0,0 +1,26 @@
package com.baeldung.debugging.consumer.service;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.baeldung.debugging.consumer.model.Foo;
import reactor.core.publisher.Flux;
public class FooReporter {
private static Logger logger = LoggerFactory.getLogger(FooReporter.class);
public static Flux<Foo> reportResult(Flux<Foo> input, String approach) {
return input.map(foo -> {
if (foo.getId() == null)
throw new IllegalArgumentException("Null id is not valid!");
logger.info("Reporting for approach {}: Foo with id '{}' name '{}' and quantity '{}'", approach, foo.getId(), foo.getFormattedName(), foo.getQuantity());
return foo;
});
}
public static Flux<Foo> reportResult(Flux<Foo> input) {
return reportResult(input, "default");
}
}

View File

@ -0,0 +1,91 @@
package com.baeldung.debugging.consumer.service;
import static com.baeldung.debugging.consumer.service.FooNameHelper.concatAndSubstringFooName;
import static com.baeldung.debugging.consumer.service.FooNameHelper.substringFooName;
import static com.baeldung.debugging.consumer.service.FooQuantityHelper.divideFooQuantity;
import static com.baeldung.debugging.consumer.service.FooQuantityHelper.processFooReducingQuantity;
import static com.baeldung.debugging.consumer.service.FooReporter.reportResult;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.baeldung.debugging.consumer.model.Foo;
import reactor.core.publisher.Flux;
@Component
public class FooService {
private static Logger logger = LoggerFactory.getLogger(FooService.class);
public void processFoo(Flux<Foo> flux) {
flux = FooNameHelper.concatFooName(flux);
flux = FooNameHelper.substringFooName(flux);
flux = flux.log();
flux = FooReporter.reportResult(flux);
flux = flux.doOnError(error -> {
logger.error("The following error happened on processFoo method!", error);
});
flux.subscribe();
}
public void processFooInAnotherScenario(Flux<Foo> flux) {
flux = FooNameHelper.substringFooName(flux);
flux = FooQuantityHelper.divideFooQuantity(flux);
flux.subscribe();
}
public void processUsingApproachOneWithErrorHandling(Flux<Foo> flux) {
logger.info("starting approach one w error handling!");
flux = concatAndSubstringFooName(flux);
flux = concatAndSubstringFooName(flux);
flux = substringFooName(flux);
flux = processFooReducingQuantity(flux);
flux = processFooReducingQuantity(flux);
flux = processFooReducingQuantity(flux);
flux = reportResult(flux, "ONE w/ EH");
flux = flux.doOnError(error -> {
logger.error("Approach 1 with Error Handling failed!", error);
});
flux.subscribe();
}
public void processUsingApproachThree(Flux<Foo> flux) {
logger.info("starting approach three!");
flux = concatAndSubstringFooName(flux);
flux = reportResult(flux, "THREE");
flux = flux.doOnError(error -> {
logger.error("Approach 3 failed!", error);
});
flux.subscribe();
}
public void processUsingApproachFourWithCheckpoint(Flux<Foo> flux) {
logger.info("starting approach four!");
flux = concatAndSubstringFooName(flux);
flux = flux.checkpoint("CHECKPOINT 1");
flux = concatAndSubstringFooName(flux);
flux = divideFooQuantity(flux);
flux = flux.checkpoint("CHECKPOINT 2", true);
flux = reportResult(flux, "FOUR");
flux = concatAndSubstringFooName(flux).doOnError(error -> {
logger.error("Approach 4 failed!", error);
});
flux.subscribe();
}
public void processUsingApproachFourWithInitialCheckpoint(Flux<Foo> flux) {
logger.info("starting approach four!");
flux = concatAndSubstringFooName(flux);
flux = flux.checkpoint("CHECKPOINT 1", true);
flux = concatAndSubstringFooName(flux);
flux = divideFooQuantity(flux);
flux = reportResult(flux, "FOUR");
flux = flux.doOnError(error -> {
logger.error("Approach 4-2 failed!", error);
});
flux.subscribe();
}
}

View File

@ -0,0 +1,29 @@
package com.baeldung.debugging.server;
import java.util.Collections;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.security.config.web.server.ServerHttpSecurity;
import org.springframework.security.web.server.SecurityWebFilterChain;
import org.springframework.web.reactive.config.EnableWebFlux;
@EnableWebFlux
@SpringBootApplication
public class ServerSSEApplication {
public static void main(String[] args) {
SpringApplication app = new SpringApplication(ServerSSEApplication.class);
app.setDefaultProperties(Collections.singletonMap("server.port", "8081"));
app.run(args);
}
@Bean
public SecurityWebFilterChain springSecurityFilterChain(ServerHttpSecurity http) {
http.authorizeExchange()
.anyExchange()
.permitAll();
return http.build();
}
}

View File

@ -0,0 +1,47 @@
package com.baeldung.debugging.server.handlers;
import java.time.Duration;
import java.util.concurrent.ThreadLocalRandom;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Component;
import org.springframework.web.reactive.function.server.ServerRequest;
import org.springframework.web.reactive.function.server.ServerResponse;
import com.baeldung.debugging.server.model.Foo;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
@Component
public class ServerHandler {
private static Logger logger = LoggerFactory.getLogger(ServerHandler.class);
public Mono<ServerResponse> useHandler(final ServerRequest request) {
// there are chances that something goes wrong here...
return ServerResponse.ok()
.contentType(MediaType.TEXT_EVENT_STREAM)
.body(Flux.interval(Duration.ofSeconds(1))
.map(sequence -> {
logger.info("retrieving Foo. Sequence: {}", sequence);
if (ThreadLocalRandom.current()
.nextInt(0, 50) == 1) {
throw new RuntimeException("There was an error retrieving the Foo!");
}
return new Foo(sequence, "name" + sequence);
}), Foo.class);
}
public Mono<ServerResponse> useHandlerFinite(final ServerRequest request) {
return ServerResponse.ok()
.contentType(MediaType.TEXT_EVENT_STREAM)
.body(Flux.range(0, 50)
.map(sequence -> {
return new Foo(new Long(sequence), "theFooNameNumber" + sequence);
}), Foo.class);
}
}

Some files were not shown because too many files have changed in this diff Show More