diff --git a/algorithms-miscellaneous-3/src/main/java/com/baeldung/algorithms/graphcycledetection/domain/Graph.java b/algorithms-miscellaneous-3/src/main/java/com/baeldung/algorithms/graphcycledetection/domain/Graph.java new file mode 100644 index 0000000000..c77173b288 --- /dev/null +++ b/algorithms-miscellaneous-3/src/main/java/com/baeldung/algorithms/graphcycledetection/domain/Graph.java @@ -0,0 +1,51 @@ +package com.baeldung.algorithms.graphcycledetection.domain; + +import java.util.ArrayList; +import java.util.List; + +public class Graph { + + private List vertices; + + public Graph() { + this.vertices = new ArrayList<>(); + } + + public Graph(List vertices) { + this.vertices = vertices; + } + + public void addVertex(Vertex vertex) { + this.vertices.add(vertex); + } + + public void addEdge(Vertex from, Vertex to) { + from.addNeighbour(to); + } + + public boolean hasCycle() { + for (Vertex vertex : vertices) { + if (!vertex.isVisited() && hasCycle(vertex)) { + return true; + } + } + return false; + } + + public boolean hasCycle(Vertex sourceVertex) { + sourceVertex.setBeingVisited(true); + + for (Vertex neighbour : sourceVertex.getAdjacencyList()) { + if (neighbour.isBeingVisited()) { + // backward edge exists + return true; + } else if (!neighbour.isVisited() && hasCycle(neighbour)) { + return true; + } + } + + sourceVertex.setBeingVisited(false); + sourceVertex.setVisited(true); + return false; + } +} diff --git a/algorithms-miscellaneous-3/src/main/java/com/baeldung/algorithms/graphcycledetection/domain/Vertex.java b/algorithms-miscellaneous-3/src/main/java/com/baeldung/algorithms/graphcycledetection/domain/Vertex.java new file mode 100644 index 0000000000..398cdf0d9c --- /dev/null +++ b/algorithms-miscellaneous-3/src/main/java/com/baeldung/algorithms/graphcycledetection/domain/Vertex.java @@ -0,0 +1,56 @@ +package com.baeldung.algorithms.graphcycledetection.domain; + +import java.util.ArrayList; +import java.util.List; + +public class Vertex { + + private String label; + + private boolean visited; + + private boolean beingVisited; + + private List adjacencyList; + + public Vertex(String label) { + this.label = label; + this.adjacencyList = new ArrayList<>(); + } + + public String getLabel() { + return label; + } + + public void setLabel(String label) { + this.label = label; + } + + public boolean isVisited() { + return visited; + } + + public void setVisited(boolean visited) { + this.visited = visited; + } + + public boolean isBeingVisited() { + return beingVisited; + } + + public void setBeingVisited(boolean beingVisited) { + this.beingVisited = beingVisited; + } + + public List getAdjacencyList() { + return adjacencyList; + } + + public void setAdjacencyList(List adjacencyList) { + this.adjacencyList = adjacencyList; + } + + public void addNeighbour(Vertex adjacent) { + this.adjacencyList.add(adjacent); + } +} diff --git a/algorithms-miscellaneous-3/src/test/java/com/baeldung/algorithms/graphcycledetection/GraphCycleDetectionUnitTest.java b/algorithms-miscellaneous-3/src/test/java/com/baeldung/algorithms/graphcycledetection/GraphCycleDetectionUnitTest.java new file mode 100644 index 0000000000..8d464d7b97 --- /dev/null +++ b/algorithms-miscellaneous-3/src/test/java/com/baeldung/algorithms/graphcycledetection/GraphCycleDetectionUnitTest.java @@ -0,0 +1,56 @@ +package com.baeldung.algorithms.graphcycledetection; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import org.junit.Test; + +import com.baeldung.algorithms.graphcycledetection.domain.Graph; +import com.baeldung.algorithms.graphcycledetection.domain.Vertex; + +public class GraphCycleDetectionUnitTest { + + @Test + public void givenGraph_whenCycleExists_thenReturnTrue() { + + Vertex vertexA = new Vertex("A"); + Vertex vertexB = new Vertex("B"); + Vertex vertexC = new Vertex("C"); + Vertex vertexD = new Vertex("D"); + + Graph graph = new Graph(); + graph.addVertex(vertexA); + graph.addVertex(vertexB); + graph.addVertex(vertexC); + graph.addVertex(vertexD); + + graph.addEdge(vertexA, vertexB); + graph.addEdge(vertexB, vertexC); + graph.addEdge(vertexC, vertexA); + graph.addEdge(vertexD, vertexC); + + assertTrue(graph.hasCycle()); + } + + @Test + public void givenGraph_whenNoCycleExists_thenReturnFalse() { + + Vertex vertexA = new Vertex("A"); + Vertex vertexB = new Vertex("B"); + Vertex vertexC = new Vertex("C"); + Vertex vertexD = new Vertex("D"); + + Graph graph = new Graph(); + graph.addVertex(vertexA); + graph.addVertex(vertexB); + graph.addVertex(vertexC); + graph.addVertex(vertexD); + + graph.addEdge(vertexA, vertexB); + graph.addEdge(vertexB, vertexC); + graph.addEdge(vertexA, vertexC); + graph.addEdge(vertexD, vertexC); + + assertFalse(graph.hasCycle()); + } +} diff --git a/core-groovy-2/gmavenplus-pom.xml b/core-groovy-2/gmavenplus-pom.xml new file mode 100644 index 0000000000..54c89b9834 --- /dev/null +++ b/core-groovy-2/gmavenplus-pom.xml @@ -0,0 +1,178 @@ + + + 4.0.0 + core-groovy-2 + 1.0-SNAPSHOT + core-groovy-2 + jar + + + com.baeldung + parent-modules + 1.0.0-SNAPSHOT + + + + + org.apache.commons + commons-lang3 + ${commons-lang3.version} + + + ch.qos.logback + logback-classic + ${logback.version} + + + org.codehaus.groovy + groovy-all + ${groovy.version} + pom + + + org.junit.platform + junit-platform-runner + ${junit.platform.version} + test + + + org.hsqldb + hsqldb + ${hsqldb.version} + test + + + org.spockframework + spock-core + ${spock-core.version} + test + + + + + src/main/groovy + src/main/java + + + org.codehaus.gmavenplus + gmavenplus-plugin + 1.7.0 + + + + execute + addSources + addTestSources + generateStubs + compile + generateTestStubs + compileTests + removeStubs + removeTestStubs + + + + + + org.codehaus.groovy + groovy-all + + ${groovy.version} + runtime + pom + + + + + org.apache.maven.plugins + maven-compiler-plugin + + + maven-failsafe-plugin + ${maven-failsafe-plugin.version} + + + org.junit.platform + junit-platform-surefire-provider + ${junit.platform.version} + + + + + junit5 + + integration-test + verify + + + + **/*Test5.java + + + + + + + maven-surefire-plugin + 2.20.1 + + false + + **/*Test.java + **/*Spec.java + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 3.1.0 + + + + jar-with-dependencies + + + + + com.baeldung.MyJointCompilationApp + + + + + + + make-assembly + + package + + single + + + + + + + + + + central + http://jcenter.bintray.com + + + + + UTF-8 + 1.0.0 + 2.4.0 + 1.1-groovy-2.4 + 3.9 + 1.8 + 1.2.3 + 2.5.7 + 1.6 + + + diff --git a/core-groovy-2/pom.xml b/core-groovy-2/pom.xml index 77de9c8fc8..b945546c8a 100644 --- a/core-groovy-2/pom.xml +++ b/core-groovy-2/pom.xml @@ -1,6 +1,6 @@ + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> 4.0.0 core-groovy-2 1.0-SNAPSHOT @@ -15,25 +15,20 @@ - org.codehaus.groovy - groovy - ${groovy.version} + org.apache.commons + commons-lang3 + ${commons-lang3.version} + + + ch.qos.logback + logback-classic + ${logback.version} org.codehaus.groovy groovy-all - ${groovy-all.version} - pom - - - org.codehaus.groovy - groovy-dateutil ${groovy.version} - - - org.codehaus.groovy - groovy-sql - ${groovy-sql.version} + pom org.junit.platform @@ -56,21 +51,35 @@ + src/main/groovy + src/main/java - org.codehaus.gmavenplus - gmavenplus-plugin - ${gmavenplus-plugin.version} - - - - addSources - addTestSources - compile - compileTests - - - + org.codehaus.groovy + groovy-eclipse-compiler + 3.3.0-01 + true + + + maven-compiler-plugin + 3.8.0 + + groovy-eclipse-compiler + ${java.version} + ${java.version} + + + + org.codehaus.groovy + groovy-eclipse-compiler + 3.3.0-01 + + + org.codehaus.groovy + groovy-eclipse-batch + ${groovy.version}-01 + + maven-failsafe-plugin @@ -101,13 +110,42 @@ maven-surefire-plugin 2.20.1 - false - - **/*Test.java - **/*Spec.java - + false + + **/*Test.java + **/*Spec.java + + + + org.apache.maven.plugins + maven-assembly-plugin + 3.1.0 + + + + jar-with-dependencies + + + + + com.baeldung.MyJointCompilationApp + + + + + + + make-assembly + + package + + single + + + + @@ -118,14 +156,32 @@ + + + bintray + Groovy Bintray + https://dl.bintray.com/groovy/maven + + + never + + + false + + + + + 1.0.0 - 2.5.6 - 2.5.6 - 2.5.6 2.4.0 1.1-groovy-2.4 - 1.6 + 3.9 + 1.8 + 3.8.1 + 1.2.3 + 2.5.7 + UTF-8 diff --git a/core-groovy-2/src/main/groovy/com/baeldung/CalcMath.groovy b/core-groovy-2/src/main/groovy/com/baeldung/CalcMath.groovy new file mode 100644 index 0000000000..0e233793b2 --- /dev/null +++ b/core-groovy-2/src/main/groovy/com/baeldung/CalcMath.groovy @@ -0,0 +1,25 @@ +package com.baeldung + +import org.slf4j.LoggerFactory + +class CalcMath { + def log = LoggerFactory.getLogger(this.getClass()) + + def calcSum(x, y) { + log.info "Executing $x + $y" + x + y + } + + /** + * example of method that in java would throw error at compile time + * @param x + * @param y + * @return + */ + def calcSum2(x, y) { + log.info "Executing $x + $y" + // DANGER! This won't throw a compilation issue and fail only at runtime!!! + calcSum3() + log.info("Logging an undefined variable: $z") + } +} \ No newline at end of file diff --git a/core-groovy-2/src/main/groovy/com/baeldung/CalcScript.groovy b/core-groovy-2/src/main/groovy/com/baeldung/CalcScript.groovy new file mode 100644 index 0000000000..84615b2217 --- /dev/null +++ b/core-groovy-2/src/main/groovy/com/baeldung/CalcScript.groovy @@ -0,0 +1,16 @@ +package com.baeldung + +def calcSum(x, y) { + x + y +} + +def calcSum2(x, y) { + // DANGER! The variable "log" may be undefined + log.info "Executing $x + $y" + // DANGER! This method doesn't exist! + calcSum3() + // DANGER! The logged variable "z" is undefined! + log.info("Logging an undefined variable: $z") +} + +calcSum(1,5) diff --git a/core-groovy-2/src/main/java/com/baeldung/MyJointCompilationApp.java b/core-groovy-2/src/main/java/com/baeldung/MyJointCompilationApp.java new file mode 100644 index 0000000000..c49f6edc30 --- /dev/null +++ b/core-groovy-2/src/main/java/com/baeldung/MyJointCompilationApp.java @@ -0,0 +1,120 @@ +package com.baeldung; + +import groovy.lang.*; +import groovy.util.GroovyScriptEngine; +import groovy.util.ResourceException; +import groovy.util.ScriptException; +import org.codehaus.groovy.jsr223.GroovyScriptEngineFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import javax.script.ScriptEngine; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; + +/** + * Hello world! + * + */ +public class MyJointCompilationApp { + private final static Logger LOG = LoggerFactory.getLogger(MyJointCompilationApp.class); + private final GroovyClassLoader loader; + private final GroovyShell shell; + private final GroovyScriptEngine engine; + private final ScriptEngine engineFromFactory; + + public MyJointCompilationApp() { + loader = new GroovyClassLoader(this.getClass().getClassLoader()); + shell = new GroovyShell(loader, new Binding()); + + URL url = null; + try { + url = new File("src/main/groovy/com/baeldung/").toURI().toURL(); + } catch (MalformedURLException e) { + LOG.error("Exception while creating url", e); + } + engine = new GroovyScriptEngine(new URL[] {url}, this.getClass().getClassLoader()); + engineFromFactory = new GroovyScriptEngineFactory().getScriptEngine(); + } + + private void addWithCompiledClasses(int x, int y) { + LOG.info("Executing {} + {}", x, y); + Object result1 = new CalcScript().calcSum(x, y); + LOG.info("Result of CalcScript.calcSum() method is {}", result1); + + Object result2 = new CalcMath().calcSum(x, y); + LOG.info("Result of CalcMath.calcSum() method is {}", result2); + } + + private void addWithGroovyShell(int x, int y) throws IOException { + Script script = shell.parse(new File("src/main/groovy/com/baeldung/", "CalcScript.groovy")); + LOG.info("Executing {} + {}", x, y); + Object result = script.invokeMethod("calcSum", new Object[] { x, y }); + LOG.info("Result of CalcScript.calcSum() method is {}", result); + } + + private void addWithGroovyShellRun() throws IOException { + Script script = shell.parse(new File("src/main/groovy/com/baeldung/", "CalcScript.groovy")); + LOG.info("Executing script run method"); + Object result = script.run(); + LOG.info("Result of CalcScript.run() method is {}", result); + } + + private void addWithGroovyClassLoader(int x, int y) throws IllegalAccessException, InstantiationException, IOException { + Class calcClass = loader.parseClass( + new File("src/main/groovy/com/baeldung/", "CalcMath.groovy")); + GroovyObject calc = (GroovyObject) calcClass.newInstance(); + Object result = calc.invokeMethod("calcSum", new Object[] { x + 14, y + 14 }); + LOG.info("Result of CalcMath.calcSum() method is {}", result); + } + + private void addWithGroovyScriptEngine(int x, int y) throws IllegalAccessException, + InstantiationException, ResourceException, ScriptException { + Class calcClass = engine.loadScriptByName("CalcMath.groovy"); + GroovyObject calc = calcClass.newInstance(); + //WARNING the following will throw a ClassCastException + //((CalcMath)calc).calcSum(1,2); + Object result = calc.invokeMethod("calcSum", new Object[] { x, y }); + LOG.info("Result of CalcMath.calcSum() method is {}", result); + } + + private void addWithEngineFactory(int x, int y) throws IllegalAccessException, + InstantiationException, javax.script.ScriptException, FileNotFoundException { + Class calcClass = (Class) engineFromFactory.eval( + new FileReader(new File("src/main/groovy/com/baeldung/", "CalcMath.groovy"))); + GroovyObject calc = (GroovyObject) calcClass.newInstance(); + Object result = calc.invokeMethod("calcSum", new Object[] { x, y }); + LOG.info("Result of CalcMath.calcSum() method is {}", result); + } + + private void addWithStaticCompiledClasses() { + LOG.info("Running the Groovy classes compiled statically..."); + addWithCompiledClasses(5, 10); + + } + + private void addWithDynamicCompiledClasses() throws IOException, IllegalAccessException, InstantiationException, + ResourceException, ScriptException, javax.script.ScriptException { + LOG.info("Invocation of a dynamic groovy script..."); + addWithGroovyShell(5, 10); + LOG.info("Invocation of the run method of a dynamic groovy script..."); + addWithGroovyShellRun(); + LOG.info("Invocation of a dynamic groovy class loaded with GroovyClassLoader..."); + addWithGroovyClassLoader(10, 30); + LOG.info("Invocation of a dynamic groovy class loaded with GroovyScriptEngine..."); + addWithGroovyScriptEngine(15, 0); + LOG.info("Invocation of a dynamic groovy class loaded with GroovyScriptEngine JSR223..."); + addWithEngineFactory(5, 6); + } + + public static void main(String[] args) throws InstantiationException, IllegalAccessException, + ResourceException, ScriptException, IOException, javax.script.ScriptException { + MyJointCompilationApp myJointCompilationApp = new MyJointCompilationApp(); + LOG.info("Example of addition operation via Groovy scripts integration with Java."); + myJointCompilationApp.addWithStaticCompiledClasses(); + myJointCompilationApp.addWithDynamicCompiledClasses(); + } +} diff --git a/core-java-modules/core-java-10/src/main/java/com/baeldung/set/CopySets.java b/core-java-modules/core-java-10/src/main/java/com/baeldung/set/CopySets.java new file mode 100644 index 0000000000..d49724f81d --- /dev/null +++ b/core-java-modules/core-java-10/src/main/java/com/baeldung/set/CopySets.java @@ -0,0 +1,13 @@ +package com.baeldung.set; + +import java.util.Set; + +public class CopySets { + + // Using Java 10 + public static Set copyBySetCopyOf(Set original) { + Set copy = Set.copyOf(original); + return copy; + } + +} diff --git a/core-java-modules/core-java-8-2/pom.xml b/core-java-modules/core-java-8-2/pom.xml index ff2e290086..cc184de529 100644 --- a/core-java-modules/core-java-8-2/pom.xml +++ b/core-java-modules/core-java-8-2/pom.xml @@ -22,9 +22,6 @@ 1.8 1.8 64.2 - 5.4.0.Final - 1.4.197 - 2.9.8 @@ -33,21 +30,6 @@ icu4j ${icu.version} - - org.hibernate - hibernate-core - ${hibernate.core.version} - - - com.h2database - h2 - ${h2database.version} - - - com.fasterxml.jackson.core - jackson-databind - ${jackson.databind.version} - diff --git a/core-java-modules/core-java-collections-set/pom.xml b/core-java-modules/core-java-collections-set/pom.xml index 2a930efde8..4435f8b151 100644 --- a/core-java-modules/core-java-collections-set/pom.xml +++ b/core-java-modules/core-java-collections-set/pom.xml @@ -24,10 +24,20 @@ commons-collections4 ${commons-collections4.version} + + com.google.code.gson + gson + 2.8.5 + + + commons-lang + commons-lang + 2.6 + 4.3 27.1-jre - + \ No newline at end of file diff --git a/core-java-modules/core-java-collections-set/src/main/java/com/baeldung/set/CopySets.java b/core-java-modules/core-java-collections-set/src/main/java/com/baeldung/set/CopySets.java new file mode 100644 index 0000000000..53933e4439 --- /dev/null +++ b/core-java-modules/core-java-collections-set/src/main/java/com/baeldung/set/CopySets.java @@ -0,0 +1,59 @@ +package com.baeldung.set; + +import java.io.Serializable; +import java.util.HashSet; +import java.util.Set; +import java.util.stream.Collectors; + +import org.apache.commons.lang.SerializationUtils; + +import com.google.gson.Gson; + +public class CopySets { + + // Copy Constructor + public static Set copyByConstructor(Set original) { + Set copy = new HashSet<>(original); + return copy; + } + + // Set.addAll + public static Set copyBySetAddAll(Set original) { + Set copy = new HashSet<>(); + copy.addAll(original); + return copy; + } + + // Set.clone + public static Set copyBySetClone(HashSet original) { + Set copy = (Set) original.clone(); + return copy; + } + + // JSON + public static Set copyByJson(Set original) { + Gson gson = new Gson(); + String jsonStr = gson.toJson(original); + Set copy = gson.fromJson(jsonStr, Set.class); + + return copy; + } + + // Apache Commons Lang + public static Set copyByApacheCommonsLang(Set original) { + Set copy = new HashSet<>(); + for (T item : original) { + copy.add((T) SerializationUtils.clone(item)); + } + return copy; + } + + // Collectors.toSet + public static Set copyByCollectorsToSet(Set original) { + Set copy = original.stream() + .collect(Collectors.toSet()); + + return copy; + } + +} diff --git a/core-java-modules/core-java-8-2/src/main/java/com/baeldung/delay/Delay.java b/core-java-modules/core-java-concurrency-basic/src/main/java/com/baeldung/concurrent/delay/Delay.java similarity index 95% rename from core-java-modules/core-java-8-2/src/main/java/com/baeldung/delay/Delay.java rename to core-java-modules/core-java-concurrency-basic/src/main/java/com/baeldung/concurrent/delay/Delay.java index 0cc0c9487f..1689c09f51 100644 --- a/core-java-modules/core-java-8-2/src/main/java/com/baeldung/delay/Delay.java +++ b/core-java-modules/core-java-concurrency-basic/src/main/java/com/baeldung/concurrent/delay/Delay.java @@ -1,4 +1,4 @@ -package com.baeldung.delay; +package com.baeldung.concurrent.delay; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; @@ -61,6 +61,7 @@ public class Delay { executorService.schedule(Delay::someTask1, delayInSeconds, TimeUnit.SECONDS); + executorService.shutdown(); } private static void fixedRateServiceTask(Integer delayInSeconds) { @@ -78,6 +79,7 @@ public class Delay { sf.cancel(true); + executorService.shutdown(); } private static void someTask1() { diff --git a/core-java-modules/core-java-lambdas/README.md b/core-java-modules/core-java-lambdas/README.md deleted file mode 100644 index 10b876735e..0000000000 --- a/core-java-modules/core-java-lambdas/README.md +++ /dev/null @@ -1,3 +0,0 @@ -## Relevant articles: - -- [Why Do Local Variables Used in Lambdas Have to Be Final or Effectively Final?](https://www.baeldung.com/java-lambda-effectively-final-local-variables) diff --git a/core-java-modules/core-java-nio/README.md b/core-java-modules/core-java-nio/README.md new file mode 100644 index 0000000000..e73a9850ad --- /dev/null +++ b/core-java-modules/core-java-nio/README.md @@ -0,0 +1,3 @@ +## Relevant articles: + +- [Determine File Creating Date in Java](https://www.baeldung.com/file-creation-date-java) diff --git a/core-java-modules/core-java-nio/src/main/java/com/baeldung/creationdate/CreationDateResolver.java b/core-java-modules/core-java-nio/src/main/java/com/baeldung/creationdate/CreationDateResolver.java new file mode 100644 index 0000000000..6347a6e681 --- /dev/null +++ b/core-java-modules/core-java-nio/src/main/java/com/baeldung/creationdate/CreationDateResolver.java @@ -0,0 +1,35 @@ +package com.baeldung.creationdate; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.attribute.BasicFileAttributes; +import java.nio.file.attribute.FileTime; +import java.time.Instant; +import java.util.Optional; + +public class CreationDateResolver { + + public Instant resolveCreationTimeWithBasicAttributes(Path path) { + try { + final BasicFileAttributes attr = Files.readAttributes(path, BasicFileAttributes.class); + final FileTime fileTime = attr.creationTime(); + + return fileTime.toInstant(); + } catch (IOException ex) { + throw new RuntimeException("An issue occured went wrong when resolving creation time", ex); + } + } + + public Optional resolveCreationTimeWithAttribute(Path path) { + try { + final FileTime creationTime = (FileTime) Files.getAttribute(path, "creationTime"); + + return Optional + .ofNullable(creationTime) + .map((FileTime::toInstant)); + } catch (IOException ex) { + throw new RuntimeException("An issue occured went wrong when resolving creation time", ex); + } + } +} diff --git a/core-java-modules/core-java-nio/src/test/java/com/baeldung/creationdate/CreationDateResolverUnitTest.java b/core-java-modules/core-java-nio/src/test/java/com/baeldung/creationdate/CreationDateResolverUnitTest.java new file mode 100644 index 0000000000..5402852c74 --- /dev/null +++ b/core-java-modules/core-java-nio/src/test/java/com/baeldung/creationdate/CreationDateResolverUnitTest.java @@ -0,0 +1,45 @@ +package com.baeldung.creationdate; + +import org.junit.Test; + +import java.io.File; +import java.nio.file.Path; +import java.time.Instant; +import java.util.Optional; + +import static org.junit.Assert.assertTrue; + +public class CreationDateResolverUnitTest { + + private final CreationDateResolver creationDateResolver = new CreationDateResolver(); + + @Test + public void givenFile_whenGettingCreationDateTimeFromBasicAttributes_thenReturnDate() throws Exception { + + final File file = File.createTempFile("createdFile", ".txt"); + final Path path = file.toPath(); + + final Instant response = creationDateResolver.resolveCreationTimeWithBasicAttributes(path); + + assertTrue(Instant + .now() + .isAfter(response)); + + } + + @Test + public void givenFile_whenGettingCreationDateTimeFromAttribute_thenReturnDate() throws Exception { + + final File file = File.createTempFile("createdFile", ".txt"); + final Path path = file.toPath(); + + final Optional response = creationDateResolver.resolveCreationTimeWithAttribute(path); + + response.ifPresent((value) -> { + assertTrue(Instant + .now() + .isAfter(value)); + }); + + } +} diff --git a/core-java-modules/core-java-optional/README.md b/core-java-modules/core-java-optional/README.md new file mode 100644 index 0000000000..12a6fd1a56 --- /dev/null +++ b/core-java-modules/core-java-optional/README.md @@ -0,0 +1,5 @@ +========= + +## Core Java Optional + +### Relevant Articles: \ No newline at end of file diff --git a/core-java-modules/core-java-optional/pom.xml b/core-java-modules/core-java-optional/pom.xml new file mode 100644 index 0000000000..f2478c2c87 --- /dev/null +++ b/core-java-modules/core-java-optional/pom.xml @@ -0,0 +1,53 @@ + + 4.0.0 + + com.baeldung.core-java-modules + core-java-modules + 1.0.0-SNAPSHOT + + core-java-optional + 0.1.0-SNAPSHOT + jar + + + UTF-8 + 1.8 + 1.8 + 5.4.0.Final + 1.4.197 + 2.9.8 + + + + + org.hibernate + hibernate-core + ${hibernate.core.version} + + + com.h2database + h2 + ${h2database.version} + + + com.fasterxml.jackson.core + jackson-databind + ${jackson.databind.version} + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + ${maven.compiler.source} + ${maven.compiler.target} + + + + + + \ No newline at end of file diff --git a/core-java-modules/core-java-8-2/src/main/java/com/baeldung/optionalReturnType/HandleOptionalTypeExample.java b/core-java-modules/core-java-optional/src/main/java/com/baeldung/optionalReturnType/HandleOptionalTypeExample.java similarity index 100% rename from core-java-modules/core-java-8-2/src/main/java/com/baeldung/optionalReturnType/HandleOptionalTypeExample.java rename to core-java-modules/core-java-optional/src/main/java/com/baeldung/optionalReturnType/HandleOptionalTypeExample.java diff --git a/core-java-modules/core-java-8-2/src/main/java/com/baeldung/optionalReturnType/OptionalToJsonExample.java b/core-java-modules/core-java-optional/src/main/java/com/baeldung/optionalReturnType/OptionalToJsonExample.java similarity index 100% rename from core-java-modules/core-java-8-2/src/main/java/com/baeldung/optionalReturnType/OptionalToJsonExample.java rename to core-java-modules/core-java-optional/src/main/java/com/baeldung/optionalReturnType/OptionalToJsonExample.java diff --git a/core-java-modules/core-java-8-2/src/main/java/com/baeldung/optionalReturnType/PersistOptionalTypeExample.java b/core-java-modules/core-java-optional/src/main/java/com/baeldung/optionalReturnType/PersistOptionalTypeExample.java similarity index 100% rename from core-java-modules/core-java-8-2/src/main/java/com/baeldung/optionalReturnType/PersistOptionalTypeExample.java rename to core-java-modules/core-java-optional/src/main/java/com/baeldung/optionalReturnType/PersistOptionalTypeExample.java diff --git a/core-java-modules/core-java-8-2/src/main/java/com/baeldung/optionalReturnType/PersistOptionalTypeExample2.java b/core-java-modules/core-java-optional/src/main/java/com/baeldung/optionalReturnType/PersistOptionalTypeExample2.java similarity index 100% rename from core-java-modules/core-java-8-2/src/main/java/com/baeldung/optionalReturnType/PersistOptionalTypeExample2.java rename to core-java-modules/core-java-optional/src/main/java/com/baeldung/optionalReturnType/PersistOptionalTypeExample2.java diff --git a/core-java-modules/core-java-8-2/src/main/java/com/baeldung/optionalReturnType/PersistUserExample.java b/core-java-modules/core-java-optional/src/main/java/com/baeldung/optionalReturnType/PersistUserExample.java similarity index 100% rename from core-java-modules/core-java-8-2/src/main/java/com/baeldung/optionalReturnType/PersistUserExample.java rename to core-java-modules/core-java-optional/src/main/java/com/baeldung/optionalReturnType/PersistUserExample.java diff --git a/core-java-modules/core-java-8-2/src/main/java/com/baeldung/optionalReturnType/SerializeOptionalTypeExample.java b/core-java-modules/core-java-optional/src/main/java/com/baeldung/optionalReturnType/SerializeOptionalTypeExample.java similarity index 100% rename from core-java-modules/core-java-8-2/src/main/java/com/baeldung/optionalReturnType/SerializeOptionalTypeExample.java rename to core-java-modules/core-java-optional/src/main/java/com/baeldung/optionalReturnType/SerializeOptionalTypeExample.java diff --git a/core-java-modules/core-java-8-2/src/main/java/com/baeldung/optionalReturnType/User.java b/core-java-modules/core-java-optional/src/main/java/com/baeldung/optionalReturnType/User.java similarity index 100% rename from core-java-modules/core-java-8-2/src/main/java/com/baeldung/optionalReturnType/User.java rename to core-java-modules/core-java-optional/src/main/java/com/baeldung/optionalReturnType/User.java diff --git a/core-java-modules/core-java-8-2/src/main/java/com/baeldung/optionalReturnType/UserOptional.java b/core-java-modules/core-java-optional/src/main/java/com/baeldung/optionalReturnType/UserOptional.java similarity index 100% rename from core-java-modules/core-java-8-2/src/main/java/com/baeldung/optionalReturnType/UserOptional.java rename to core-java-modules/core-java-optional/src/main/java/com/baeldung/optionalReturnType/UserOptional.java diff --git a/core-java-modules/core-java-8-2/src/main/java/com/baeldung/optionalReturnType/UserOptionalField.java b/core-java-modules/core-java-optional/src/main/java/com/baeldung/optionalReturnType/UserOptionalField.java similarity index 100% rename from core-java-modules/core-java-8-2/src/main/java/com/baeldung/optionalReturnType/UserOptionalField.java rename to core-java-modules/core-java-optional/src/main/java/com/baeldung/optionalReturnType/UserOptionalField.java diff --git a/core-java-modules/pom.xml b/core-java-modules/pom.xml index 2b563a7be4..11a1003460 100644 --- a/core-java-modules/pom.xml +++ b/core-java-modules/pom.xml @@ -16,6 +16,7 @@ pre-jpms core-java-exceptions + core-java-optional diff --git a/jackson-2/pom.xml b/jackson-2/pom.xml index 6b973dd6f5..6a975f1de7 100644 --- a/jackson-2/pom.xml +++ b/jackson-2/pom.xml @@ -28,6 +28,13 @@ jackson-dataformat-yaml 2.9.8 + + + + com.fasterxml.jackson.dataformat + jackson-dataformat-csv + 2.9.8 + diff --git a/jackson-2/src/main/java/com/baeldung/jackson/csv/JsonCsvConverter.java b/jackson-2/src/main/java/com/baeldung/jackson/csv/JsonCsvConverter.java new file mode 100644 index 0000000000..71c6de4d7e --- /dev/null +++ b/jackson-2/src/main/java/com/baeldung/jackson/csv/JsonCsvConverter.java @@ -0,0 +1,59 @@ +package com.baeldung.jackson.csv; + +import java.io.File; +import java.io.IOException; + +import com.baeldung.jackson.entities.OrderLine; +import com.baeldung.jackson.mixin.OrderLineForCsv; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.MappingIterator; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import com.fasterxml.jackson.dataformat.csv.CsvMapper; +import com.fasterxml.jackson.dataformat.csv.CsvSchema; +import com.fasterxml.jackson.dataformat.csv.CsvSchema.Builder; + +public class JsonCsvConverter { + + public static void JsonToCsv(File jsonFile, File csvFile) throws IOException { + JsonNode jsonTree = new ObjectMapper().readTree(jsonFile); + + Builder csvSchemaBuilder = CsvSchema.builder(); + JsonNode firstObject = jsonTree.elements().next(); + firstObject.fieldNames().forEachRemaining(fieldName -> {csvSchemaBuilder.addColumn(fieldName);} ); + CsvSchema csvSchema = csvSchemaBuilder + .build() + .withHeader(); + + CsvMapper csvMapper = new CsvMapper(); + csvMapper.writerFor(JsonNode.class) + .with(csvSchema) + .writeValue(csvFile, jsonTree); + } + + public static void csvToJson(File csvFile, File jsonFile) throws IOException { + CsvSchema orderLineSchema = CsvSchema.emptySchema().withHeader(); + CsvMapper csvMapper = new CsvMapper(); + MappingIterator orderLines = csvMapper.readerFor(OrderLine.class) + .with(orderLineSchema) + .readValues(csvFile); + + new ObjectMapper() + .configure(SerializationFeature.INDENT_OUTPUT, true) + .writeValue(jsonFile, orderLines.readAll()); + } + + public static void JsonToFormattedCsv(File jsonFile, File csvFile) throws IOException { + CsvMapper csvMapper = new CsvMapper(); + CsvSchema csvSchema = csvMapper + .schemaFor(OrderLineForCsv.class) + .withHeader(); + csvMapper.addMixIn(OrderLine.class, OrderLineForCsv.class); + + OrderLine[] orderLines = new ObjectMapper() + .readValue(jsonFile, OrderLine[].class); + csvMapper.writerFor(OrderLine[].class) + .with(csvSchema) + .writeValue(csvFile, orderLines); + } +} diff --git a/jackson-2/src/main/java/com/baeldung/jackson/mixin/OrderLineForCsv.java b/jackson-2/src/main/java/com/baeldung/jackson/mixin/OrderLineForCsv.java new file mode 100644 index 0000000000..05d70a8053 --- /dev/null +++ b/jackson-2/src/main/java/com/baeldung/jackson/mixin/OrderLineForCsv.java @@ -0,0 +1,25 @@ +package com.baeldung.jackson.mixin; + +import java.math.BigDecimal; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; + +@JsonPropertyOrder({ + "count", + "name" +}) +public abstract class OrderLineForCsv { + + @JsonProperty("name") + private String item; + + @JsonProperty("count") + private int quantity; + + @JsonIgnore + private BigDecimal unitPrice; + + +} diff --git a/jackson-2/src/main/resources/orderLines.csv b/jackson-2/src/main/resources/orderLines.csv new file mode 100644 index 0000000000..e15e12f2bf --- /dev/null +++ b/jackson-2/src/main/resources/orderLines.csv @@ -0,0 +1,3 @@ +item,quantity,unitPrice +"No. 9 Sprockets",12,1.23 +"Widget (10mm)",4,3.45 diff --git a/jackson-2/src/main/resources/orderLines.json b/jackson-2/src/main/resources/orderLines.json new file mode 100644 index 0000000000..64f18e1673 --- /dev/null +++ b/jackson-2/src/main/resources/orderLines.json @@ -0,0 +1,9 @@ +[ { + "item" : "No. 9 Sprockets", + "quantity" : 12, + "unitPrice" : 1.23 +}, { + "item" : "Widget (10mm)", + "quantity" : 4, + "unitPrice" : 3.45 +} ] \ No newline at end of file diff --git a/jackson-2/src/test/java/com/baeldung/jackson/csv/CsvUnitTest.java b/jackson-2/src/test/java/com/baeldung/jackson/csv/CsvUnitTest.java new file mode 100644 index 0000000000..60c8ce79f3 --- /dev/null +++ b/jackson-2/src/test/java/com/baeldung/jackson/csv/CsvUnitTest.java @@ -0,0 +1,54 @@ +package com.baeldung.jackson.csv; + +import static org.junit.Assert.assertEquals; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.Charset; +import java.util.List; + +import org.junit.Test; + +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.google.common.io.Files; + + +public class CsvUnitTest { + + @Test + public void givenJsonInput_thenWriteCsv() throws JsonParseException, JsonMappingException, IOException { + JsonCsvConverter.JsonToCsv(new File("src/main/resources/orderLines.json"), + new File("src/main/resources/csvFromJson.csv")); + + assertEquals(readFile("src/main/resources/csvFromJson.csv"), + readFile("src/test/resources/expectedCsvFromJson.csv")); + } + + @Test + public void givenCsvInput_thenWritesJson() throws JsonParseException, JsonMappingException, IOException { + JsonCsvConverter.csvToJson(new File("src/main/resources/orderLines.csv"), + new File("src/main/resources/jsonFromCsv.json")); + + assertEquals(readFile("src/main/resources/jsonFromCsv.json"), + readFile("src/test/resources/expectedJsonFromCsv.json")); + + } + + @Test + public void givenJsonInput_thenWriteFormattedCsvOutput() throws JsonParseException, JsonMappingException, IOException { + JsonCsvConverter.JsonToFormattedCsv(new File("src/main/resources/orderLines.json"), + new File("src/main/resources/formattedCsvFromJson.csv")); + + assertEquals(readFile("src/main/resources/formattedCsvFromJson.csv"), + readFile("src/test/resources/expectedFormattedCsvFromJson.csv")); + + } + + private List readFile(String filename) throws IOException { + return Files.readLines(new File(filename), Charset.forName("utf-8")); + } + + +} +; \ No newline at end of file diff --git a/jackson-2/src/test/resources/expectedCsvFromJson.csv b/jackson-2/src/test/resources/expectedCsvFromJson.csv new file mode 100644 index 0000000000..e15e12f2bf --- /dev/null +++ b/jackson-2/src/test/resources/expectedCsvFromJson.csv @@ -0,0 +1,3 @@ +item,quantity,unitPrice +"No. 9 Sprockets",12,1.23 +"Widget (10mm)",4,3.45 diff --git a/jackson-2/src/test/resources/expectedFormattedCsvFromJson.csv b/jackson-2/src/test/resources/expectedFormattedCsvFromJson.csv new file mode 100644 index 0000000000..5a60ba602a --- /dev/null +++ b/jackson-2/src/test/resources/expectedFormattedCsvFromJson.csv @@ -0,0 +1,3 @@ +count,name +12,"No. 9 Sprockets" +4,"Widget (10mm)" diff --git a/jackson-2/src/test/resources/expectedJsonFromCsv.json b/jackson-2/src/test/resources/expectedJsonFromCsv.json new file mode 100644 index 0000000000..64f18e1673 --- /dev/null +++ b/jackson-2/src/test/resources/expectedJsonFromCsv.json @@ -0,0 +1,9 @@ +[ { + "item" : "No. 9 Sprockets", + "quantity" : 12, + "unitPrice" : 1.23 +}, { + "item" : "Widget (10mm)", + "quantity" : 4, + "unitPrice" : 3.45 +} ] \ No newline at end of file diff --git a/java-collections-conversions/src/test/java/org/baeldung/java/collections/IterableToCollectionUnitTest.java b/java-collections-conversions/src/test/java/org/baeldung/java/collections/IterableToCollectionUnitTest.java new file mode 100644 index 0000000000..f2c80429d1 --- /dev/null +++ b/java-collections-conversions/src/test/java/org/baeldung/java/collections/IterableToCollectionUnitTest.java @@ -0,0 +1,122 @@ +package org.baeldung.java.collections; + +import static org.hamcrest.Matchers.contains; +import static org.junit.Assert.assertThat; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Spliterator; +import java.util.Spliterators; +import java.util.stream.Collectors; +import java.util.stream.StreamSupport; + +import org.apache.commons.collections4.IterableUtils; +import org.apache.commons.collections4.IteratorUtils; +import org.junit.Test; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +public class IterableToCollectionUnitTest { + + Iterable iterable = Arrays.asList("john", "tom", "jane"); + Iterator iterator = iterable.iterator(); + + @Test + public void whenConvertIterableToListUsingJava_thenSuccess() { + List result = new ArrayList(); + for (String str : iterable) { + result.add(str); + } + + assertThat(result, contains("john", "tom", "jane")); + } + + @Test + public void whenConvertIterableToListUsingJava8_thenSuccess() { + List result = new ArrayList(); + iterable.forEach(result::add); + + assertThat(result, contains("john", "tom", "jane")); + } + + @Test + public void whenConvertIterableToListUsingJava8WithSpliterator_thenSuccess() { + List result = StreamSupport.stream(iterable.spliterator(), false) + .collect(Collectors.toList()); + + assertThat(result, contains("john", "tom", "jane")); + } + + @Test + public void whenConvertIterableToListUsingGuava_thenSuccess() { + List result = Lists.newArrayList(iterable); + + assertThat(result, contains("john", "tom", "jane")); + } + + @Test + public void whenConvertIterableToImmutableListUsingGuava_thenSuccess() { + List result = ImmutableList.copyOf(iterable); + + assertThat(result, contains("john", "tom", "jane")); + } + + @Test + public void whenConvertIterableToListUsingApacheCommons_thenSuccess() { + List result = IterableUtils.toList(iterable); + + assertThat(result, contains("john", "tom", "jane")); + } + + // ======================== Iterator + + @Test + public void whenConvertIteratorToListUsingJava_thenSuccess() { + List result = new ArrayList(); + while (iterator.hasNext()) { + result.add(iterator.next()); + } + + assertThat(result, contains("john", "tom", "jane")); + } + + @Test + public void whenConvertIteratorToListUsingJava8_thenSuccess() { + List result = new ArrayList(); + iterator.forEachRemaining(result::add); + + assertThat(result, contains("john", "tom", "jane")); + } + + @Test + public void whenConvertIteratorToListUsingJava8WithSpliterator_thenSuccess() { + List result = StreamSupport.stream(Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED), false) + .collect(Collectors.toList()); + + assertThat(result, contains("john", "tom", "jane")); + } + + @Test + public void whenConvertIteratorToListUsingGuava_thenSuccess() { + List result = Lists.newArrayList(iterator); + + assertThat(result, contains("john", "tom", "jane")); + } + + @Test + public void whenConvertIteratorToImmutableListUsingGuava_thenSuccess() { + List result = ImmutableList.copyOf(iterator); + + assertThat(result, contains("john", "tom", "jane")); + } + + @Test + public void whenConvertIteratorToListUsingApacheCommons_thenSuccess() { + List result = IteratorUtils.toList(iterator); + + assertThat(result, contains("john", "tom", "jane")); + } +} diff --git a/jersey/src/test/java/com/baeldung/jersey/server/rest/FruitResourceIntegrationTest.java b/jersey/src/test/java/com/baeldung/jersey/server/rest/FruitResourceIntegrationTest.java index 376c8c1e75..f7bb0df1ed 100644 --- a/jersey/src/test/java/com/baeldung/jersey/server/rest/FruitResourceIntegrationTest.java +++ b/jersey/src/test/java/com/baeldung/jersey/server/rest/FruitResourceIntegrationTest.java @@ -26,6 +26,7 @@ public class FruitResourceIntegrationTest extends JerseyTest { protected Application configure() { enable(TestProperties.LOG_TRAFFIC); enable(TestProperties.DUMP_ENTITY); + forceSet(TestProperties.CONTAINER_PORT, "0"); ViewApplicationConfig config = new ViewApplicationConfig(); config.register(FruitExceptionMapper.class); diff --git a/libraries-2/pom.xml b/libraries-2/pom.xml index a7aa3dc544..53a0233cdd 100644 --- a/libraries-2/pom.xml +++ b/libraries-2/pom.xml @@ -55,6 +55,17 @@ spring-boot-starter ${spring-boot-starter.version} + + net.openhft + chronicle-map + ${chronicle.map.version} + + + com.sun.java + tools + + + @@ -62,19 +73,16 @@ okhttp 3.14.2 - com.fasterxml.jackson.core jackson-databind 2.9.9 - com.google.code.gson gson 2.8.5 - com.squareup.okhttp3 mockwebserver @@ -86,7 +94,6 @@ crawler4j ${crawler4j.version} - com.github.jknack handlebars @@ -99,6 +106,7 @@ 4.8.28 6.0.0.Final 3.9.6 + 3.17.2 4.4.0 2.1.4.RELEASE diff --git a/libraries-2/src/test/java/com/baeldung/chroniclemap/ChronicleMapUnitTest.java b/libraries-2/src/test/java/com/baeldung/chroniclemap/ChronicleMapUnitTest.java new file mode 100644 index 0000000000..7f36a9abdb --- /dev/null +++ b/libraries-2/src/test/java/com/baeldung/chroniclemap/ChronicleMapUnitTest.java @@ -0,0 +1,132 @@ +package com.baeldung.chroniclemap; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertThat; + +import java.io.File; +import java.util.HashSet; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import net.openhft.chronicle.core.values.LongValue; +import net.openhft.chronicle.map.ChronicleMap; +import net.openhft.chronicle.map.ExternalMapQueryContext; +import net.openhft.chronicle.map.MapEntry; +import net.openhft.chronicle.values.Values; + +public class ChronicleMapUnitTest { + + static ChronicleMap persistedCountryMap = null; + + static ChronicleMap inMemoryCountryMap = null; + + static ChronicleMap> multiMap = null; + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @BeforeClass + public static void init() { + try { + inMemoryCountryMap = ChronicleMap.of(LongValue.class, CharSequence.class) + .name("country-map") + .entries(50) + .averageValue("America") + .create(); + + persistedCountryMap = ChronicleMap.of(LongValue.class, CharSequence.class) + .name("country-map") + .entries(50) + .averageValue("America") + .createPersistedTo(new File(System.getProperty("user.home") + "/country-details.dat")); + + Set averageValue = IntStream.of(1, 2) + .boxed() + .collect(Collectors.toSet()); + multiMap = ChronicleMap.of(Integer.class, (Class>) (Class) Set.class) + .name("multi-map") + .entries(50) + .averageValue(averageValue) + .create(); + + LongValue qatarKey = Values.newHeapInstance(LongValue.class); + qatarKey.setValue(1); + inMemoryCountryMap.put(qatarKey, "Qatar"); + + LongValue key = Values.newHeapInstance(LongValue.class); + key.setValue(1); + persistedCountryMap.put(key, "Romania"); + key.setValue(2); + persistedCountryMap.put(key, "India"); + + Set set1 = new HashSet<>(); + set1.add(1); + set1.add(2); + multiMap.put(1, set1); + + Set set2 = new HashSet<>(); + set2.add(3); + multiMap.put(2, set2); + } catch (Exception e) { + e.printStackTrace(); + } + } + + @Test + public void givenGetQuery_whenCalled_shouldReturnResult() { + LongValue key = Values.newHeapInstance(LongValue.class); + key.setValue(1); + CharSequence country = inMemoryCountryMap.get(key); + assertThat(country.toString(), is(equalTo("Qatar"))); + } + + @Test + public void givenGetUsingQuery_whenCalled_shouldReturnResult() { + LongValue key = Values.newHeapInstance(LongValue.class); + StringBuilder country = new StringBuilder(); + key.setValue(1); + persistedCountryMap.getUsing(key, country); + assertThat(country.toString(), is(equalTo("Romania"))); + key.setValue(2); + persistedCountryMap.getUsing(key, country); + assertThat(country.toString(), is(equalTo("India"))); + } + + @Test + public void givenMultipleKeyQuery_whenProcessed_shouldChangeTheValue() { + try (ExternalMapQueryContext, ?> fistContext = multiMap.queryContext(1)) { + try (ExternalMapQueryContext, ?> secondContext = multiMap.queryContext(2)) { + fistContext.updateLock() + .lock(); + secondContext.updateLock() + .lock(); + MapEntry> firstEntry = fistContext.entry(); + Set firstSet = firstEntry.value() + .get(); + firstSet.remove(2); + MapEntry> secondEntry = secondContext.entry(); + Set secondSet = secondEntry.value() + .get(); + secondSet.add(4); + firstEntry.doReplaceValue(fistContext.wrapValueAsData(firstSet)); + secondEntry.doReplaceValue(secondContext.wrapValueAsData(secondSet)); + } + } finally { + assertThat(multiMap.get(1) + .size(), is(equalTo(1))); + assertThat(multiMap.get(2) + .size(), is(equalTo(2))); + } + } + + @AfterClass + public static void finish() { + persistedCountryMap.close(); + inMemoryCountryMap.close(); + multiMap.close(); + } +} diff --git a/libraries-data/pom.xml b/libraries-data/pom.xml index 3276ebcdbb..31aaaea951 100644 --- a/libraries-data/pom.xml +++ b/libraries-data/pom.xml @@ -263,6 +263,16 @@ org.apache.storm storm-core ${storm.version} + + + org.slf4j + slf4j-log4j12 + + + org.slf4j + log4j-over-slf4j + + diff --git a/libraries-data/src/test/java/com/baeldung/flink/BackupCreatorIntegrationTest.java b/libraries-data/src/test/java/com/baeldung/flink/BackupCreatorIntegrationTest.java index ab7d119c16..f46fffbb59 100644 --- a/libraries-data/src/test/java/com/baeldung/flink/BackupCreatorIntegrationTest.java +++ b/libraries-data/src/test/java/com/baeldung/flink/BackupCreatorIntegrationTest.java @@ -26,6 +26,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; public class BackupCreatorIntegrationTest { @@ -88,7 +89,7 @@ public class BackupCreatorIntegrationTest { SerializationSchema serializationSchema = new BackupSerializationSchema(); byte[] backupProcessed = serializationSchema.serialize(backup); - assertEquals(backupSerialized, backupProcessed); + assertArrayEquals(backupSerialized, backupProcessed); } private static class CollectingSink implements SinkFunction { diff --git a/libraries/src/main/java/com/baeldung/ftp/FtpClient.java b/libraries/src/main/java/com/baeldung/ftp/FtpClient.java index 209bed35f0..f885ff13b3 100644 --- a/libraries/src/main/java/com/baeldung/ftp/FtpClient.java +++ b/libraries/src/main/java/com/baeldung/ftp/FtpClient.java @@ -59,5 +59,6 @@ class FtpClient { void downloadFile(String source, String destination) throws IOException { FileOutputStream out = new FileOutputStream(destination); ftp.retrieveFile(source, out); + out.close(); } } diff --git a/libraries/src/test/java/com/baeldung/serenity/spring/AdderMethodDirtiesContextIntegrationTest.java b/libraries/src/test/java/com/baeldung/serenity/spring/AdderMethodDirtiesContextIntegrationTest.java index fc7067520d..3201908bf7 100644 --- a/libraries/src/test/java/com/baeldung/serenity/spring/AdderMethodDirtiesContextIntegrationTest.java +++ b/libraries/src/test/java/com/baeldung/serenity/spring/AdderMethodDirtiesContextIntegrationTest.java @@ -28,7 +28,7 @@ public class AdderMethodDirtiesContextIntegrationTest { @Test public void _1_givenNumber_whenAdd_thenSumWrong() { adderServiceSteps.whenAdd(); - adderServiceSteps.sumWrong(); + adderServiceSteps.summedUp(); } @Rule diff --git a/persistence-modules/elasticsearch/.gitignore b/persistence-modules/elasticsearch/.gitignore new file mode 100644 index 0000000000..153c9335eb --- /dev/null +++ b/persistence-modules/elasticsearch/.gitignore @@ -0,0 +1,29 @@ +HELP.md +/target/ +!.mvn/wrapper/maven-wrapper.jar + +### STS ### +.apt_generated +.classpath +.factorypath +.project +.settings +.springBeans +.sts4-cache + +### IntelliJ IDEA ### +.idea +*.iws +*.iml +*.ipr + +### NetBeans ### +/nbproject/private/ +/nbbuild/ +/dist/ +/nbdist/ +/.nb-gradle/ +/build/ + +### VS Code ### +.vscode/ diff --git a/persistence-modules/elasticsearch/pom.xml b/persistence-modules/elasticsearch/pom.xml new file mode 100644 index 0000000000..ceed88aa24 --- /dev/null +++ b/persistence-modules/elasticsearch/pom.xml @@ -0,0 +1,31 @@ + + + 4.0.0 + com.baeldung + elasticsearch + 0.0.1-SNAPSHOT + elasticsearch + Demo project for Java Elasticsearch libraries + + + com.baeldung + parent-modules + 1.0.0-SNAPSHOT + ../../ + + + + + + io.searchbox + jest + 6.3.1 + + + com.fasterxml.jackson.core + jackson-databind + 2.9.6 + + + diff --git a/persistence-modules/elasticsearch/src/main/java/com/baeldung/jest/Employee.java b/persistence-modules/elasticsearch/src/main/java/com/baeldung/jest/Employee.java new file mode 100644 index 0000000000..6f28a42a9c --- /dev/null +++ b/persistence-modules/elasticsearch/src/main/java/com/baeldung/jest/Employee.java @@ -0,0 +1,42 @@ +package com.baeldung.jest; + +import java.util.List; + +public class Employee { + String name; + String title; + List skills; + int yearsOfService; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getTitle() { + return title; + } + + public void setTitle(String title) { + this.title = title; + } + + public List getSkills() { + return skills; + } + + public void setSkills(List skills) { + this.skills = skills; + } + + public int getYearsOfService() { + return yearsOfService; + } + + public void setYearsOfService(int yearsOfService) { + this.yearsOfService = yearsOfService; + } +} diff --git a/persistence-modules/elasticsearch/src/main/java/com/baeldung/jest/JestDemoApplication.java b/persistence-modules/elasticsearch/src/main/java/com/baeldung/jest/JestDemoApplication.java new file mode 100644 index 0000000000..91e499da2e --- /dev/null +++ b/persistence-modules/elasticsearch/src/main/java/com/baeldung/jest/JestDemoApplication.java @@ -0,0 +1,174 @@ +package com.baeldung.jest; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.searchbox.client.JestClient; +import io.searchbox.client.JestClientFactory; +import io.searchbox.client.JestResult; +import io.searchbox.client.JestResultHandler; +import io.searchbox.client.config.HttpClientConfig; +import io.searchbox.core.*; +import io.searchbox.indices.CreateIndex; +import io.searchbox.indices.IndicesExists; +import io.searchbox.indices.aliases.AddAliasMapping; +import io.searchbox.indices.aliases.ModifyAliases; +import io.searchbox.indices.aliases.RemoveAliasMapping; + +import java.io.IOException; +import java.util.*; + +public class JestDemoApplication { + + public static void main(String[] args) throws IOException { + + // Demo the JestClient + JestClient jestClient = jestClient(); + + // Check an index + JestResult result = jestClient.execute(new IndicesExists.Builder("employees").build()); + if(!result.isSucceeded()) { + System.out.println(result.getErrorMessage()); + } + + // Create an index + jestClient.execute(new CreateIndex.Builder("employees").build()); + + // Create an index with options + Map settings = new HashMap<>(); + settings.put("number_of_shards", 11); + settings.put("number_of_replicas", 2); + jestClient.execute(new CreateIndex.Builder("employees").settings(settings).build()); + + // Create an alias, then remove it + jestClient.execute(new ModifyAliases.Builder( + new AddAliasMapping.Builder( + "employees", + "e") + .build()) + .build()); + JestResult jestResult = jestClient.execute(new ModifyAliases.Builder( + new RemoveAliasMapping.Builder( + "employees", + "e") + .build()) + .build()); + + if(jestResult.isSucceeded()) { + System.out.println("Success!"); + } + else { + System.out.println(jestResult.getErrorMessage()); + } + + // Sample JSON for indexing + + // { + // "name": "Michael Pratt", + // "title": "Java Developer", + // "skills": ["java", "spring", "elasticsearch"], + // "yearsOfService": 2 + // } + + // Index a document from String + ObjectMapper mapper = new ObjectMapper(); + JsonNode employeeJsonNode = mapper.createObjectNode() + .put("name", "Michael Pratt") + .put("title", "Java Developer") + .put("yearsOfService", 2) + .set("skills", mapper.createArrayNode() + .add("java") + .add("spring") + .add("elasticsearch")); + jestClient.execute(new Index.Builder(employeeJsonNode.toString()).index("employees").build()); + + // Index a document from Map + Map employeeHashMap = new LinkedHashMap<>(); + employeeHashMap.put("name", "Michael Pratt"); + employeeHashMap.put("title", "Java Developer"); + employeeHashMap.put("yearsOfService", 2); + employeeHashMap.put("skills", Arrays.asList("java", "spring", "elasticsearch")); + jestClient.execute(new Index.Builder(employeeHashMap).index("employees").build()); + + // Index a document from POJO + Employee employee = new Employee(); + employee.setName("Michael Pratt"); + employee.setTitle("Java Developer"); + employee.setYearsOfService(2); + employee.setSkills(Arrays.asList("java", "spring", "elasticsearch")); + jestClient.execute(new Index.Builder(employee).index("employees").build()); + + // Read document by ID + Employee getResult = jestClient.execute(new Get.Builder("employees", "1").build()).getSourceAsObject(Employee.class); + + // Search documents + String search = "{\n" + + " \"query\": {\n" + + " \"bool\": {\n" + + " \"must\": [\n" + + " { \"match\": { \"name\": \"Michael Pratt\" }}\n" + + " ]\n" + + " }\n" + + " }\n" + + "}"; + List> searchResults = + jestClient.execute(new Search.Builder(search).build()) + .getHits(Employee.class); + + searchResults.forEach(hit -> { + System.out.println(String.format("Document %s has score %s", hit.id, hit.score)); + }); + + // Update document + employee.setYearsOfService(3); + jestClient.execute(new Update.Builder(employee).index("employees").id("1").build()); + + // Delete documents + jestClient.execute(new Delete.Builder("2") .index("employees") .build()); + + // Bulk operations + Employee employeeObject1 = new Employee(); + employee.setName("John Smith"); + employee.setTitle("Python Developer"); + employee.setYearsOfService(10); + employee.setSkills(Arrays.asList("python")); + + Employee employeeObject2 = new Employee(); + employee.setName("Kate Smith"); + employee.setTitle("Senior JavaScript Developer"); + employee.setYearsOfService(10); + employee.setSkills(Arrays.asList("javascript", "angular")); + + jestClient.execute(new Bulk.Builder().defaultIndex("employees") + .addAction(new Index.Builder(employeeObject1).build()) + .addAction(new Index.Builder(employeeObject2).build()) + .addAction(new Delete.Builder("3").build()) .build()); + + // Async operations + Employee employeeObject3 = new Employee(); + employee.setName("Jane Doe"); + employee.setTitle("Manager"); + employee.setYearsOfService(20); + employee.setSkills(Arrays.asList("managing")); + + jestClient.executeAsync( new Index.Builder(employeeObject3).build(), new JestResultHandler() { + @Override public void completed(JestResult result) { + // handle result + } + @Override public void failed(Exception ex) { + // handle exception + } + }); + } + + private static JestClient jestClient() + { + JestClientFactory factory = new JestClientFactory(); + factory.setHttpClientConfig( + new HttpClientConfig.Builder("http://localhost:9200") + .multiThreaded(true) + .defaultMaxTotalConnectionPerRoute(2) + .maxTotalConnection(20) + .build()); + return factory.getObject(); + } +} diff --git a/persistence-modules/pom.xml b/persistence-modules/pom.xml index ba3baf6636..e6d883334f 100644 --- a/persistence-modules/pom.xml +++ b/persistence-modules/pom.xml @@ -18,6 +18,7 @@ apache-cayenne core-java-persistence deltaspike + elasticsearch flyway hbase hibernate5 diff --git a/persistence-modules/spring-data-cassandra-reactive/pom.xml b/persistence-modules/spring-data-cassandra-reactive/pom.xml index 5a34d90c9f..288f842201 100644 --- a/persistence-modules/spring-data-cassandra-reactive/pom.xml +++ b/persistence-modules/spring-data-cassandra-reactive/pom.xml @@ -46,6 +46,13 @@ reactor-test test + + org.cassandraunit + cassandra-unit-spring + ${cassandra-unit-spring.version} + test + + @@ -53,6 +60,7 @@ UTF-8 2.1.2.RELEASE + 3.11.2.0 diff --git a/persistence-modules/spring-data-cassandra-reactive/src/test/java/com/baeldung/cassandra/reactive/ReactiveEmployeeRepositoryIntegrationTest.java b/persistence-modules/spring-data-cassandra-reactive/src/test/java/com/baeldung/cassandra/reactive/ReactiveEmployeeRepositoryIntegrationTest.java index ad726fe969..ae314db5e7 100644 --- a/persistence-modules/spring-data-cassandra-reactive/src/test/java/com/baeldung/cassandra/reactive/ReactiveEmployeeRepositoryIntegrationTest.java +++ b/persistence-modules/spring-data-cassandra-reactive/src/test/java/com/baeldung/cassandra/reactive/ReactiveEmployeeRepositoryIntegrationTest.java @@ -1,13 +1,23 @@ package com.baeldung.cassandra.reactive; -import com.baeldung.cassandra.reactive.model.Employee; -import com.baeldung.cassandra.reactive.repository.EmployeeRepository; +import org.cassandraunit.spring.CassandraDataSet; +import org.cassandraunit.spring.CassandraUnitDependencyInjectionTestExecutionListener; +import org.cassandraunit.spring.CassandraUnitTestExecutionListener; +import org.cassandraunit.spring.EmbeddedCassandra; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.TestExecutionListeners; import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.test.context.support.DependencyInjectionTestExecutionListener; +import org.springframework.test.context.support.DirtiesContextTestExecutionListener; +import org.springframework.test.context.web.ServletTestExecutionListener; + +import com.baeldung.cassandra.reactive.model.Employee; +import com.baeldung.cassandra.reactive.repository.EmployeeRepository; + import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.test.StepVerifier; @@ -15,6 +25,15 @@ import reactor.test.StepVerifier; @RunWith(SpringRunner.class) @SpringBootTest +@TestExecutionListeners(listeners = { + CassandraUnitDependencyInjectionTestExecutionListener.class, + CassandraUnitTestExecutionListener.class, + ServletTestExecutionListener.class, + DependencyInjectionTestExecutionListener.class, + DirtiesContextTestExecutionListener.class +}) +@EmbeddedCassandra(timeout = 60000, configuration = "cassandra-server.yaml") +@CassandraDataSet(value = {"cassandra-init.cql"}, keyspace = "practice") public class ReactiveEmployeeRepositoryIntegrationTest { @Autowired diff --git a/persistence-modules/spring-data-cassandra-reactive/src/test/resources/cassandra-init.cql b/persistence-modules/spring-data-cassandra-reactive/src/test/resources/cassandra-init.cql new file mode 100644 index 0000000000..b28fda5320 --- /dev/null +++ b/persistence-modules/spring-data-cassandra-reactive/src/test/resources/cassandra-init.cql @@ -0,0 +1,7 @@ +CREATE TABLE employee( + id int PRIMARY KEY, + name text, + address text, + email text, + age int +); \ No newline at end of file diff --git a/persistence-modules/spring-data-cassandra-reactive/src/test/resources/cassandra-server.yaml b/persistence-modules/spring-data-cassandra-reactive/src/test/resources/cassandra-server.yaml new file mode 100644 index 0000000000..af3fb21e54 --- /dev/null +++ b/persistence-modules/spring-data-cassandra-reactive/src/test/resources/cassandra-server.yaml @@ -0,0 +1,590 @@ +# Cassandra storage config YAML + +# NOTE: +# See http://wiki.apache.org/cassandra/StorageConfiguration for +# full explanations of configuration directives +# /NOTE + +# The name of the cluster. This is mainly used to prevent machines in +# one logical cluster from joining another. +cluster_name: 'Test Cluster' + +# You should always specify InitialToken when setting up a production +# cluster for the first time, and often when adding capacity later. +# The principle is that each node should be given an equal slice of +# the token ring; see http://wiki.apache.org/cassandra/Operations +# for more details. +# +# If blank, Cassandra will request a token bisecting the range of +# the heaviest-loaded existing node. If there is no load information +# available, such as is the case with a new cluster, it will pick +# a random token, which will lead to hot spots. +#initial_token: + +# See http://wiki.apache.org/cassandra/HintedHandoff +hinted_handoff_enabled: true +# this defines the maximum amount of time a dead host will have hints +# generated. After it has been dead this long, new hints for it will not be +# created until it has been seen alive and gone down again. +max_hint_window_in_ms: 10800000 # 3 hours +# Maximum throttle in KBs per second, per delivery thread. This will be +# reduced proportionally to the number of nodes in the cluster. (If there +# are two nodes in the cluster, each delivery thread will use the maximum +# rate; if there are three, each will throttle to half of the maximum, +# since we expect two nodes to be delivering hints simultaneously.) +hinted_handoff_throttle_in_kb: 1024 +# Number of threads with which to deliver hints; +# Consider increasing this number when you have multi-dc deployments, since +# cross-dc handoff tends to be slower +max_hints_delivery_threads: 2 + +hints_directory: target/embeddedCassandra/hints + +# The following setting populates the page cache on memtable flush and compaction +# WARNING: Enable this setting only when the whole node's data fits in memory. +# Defaults to: false +# populate_io_cache_on_flush: false + +# Authentication backend, implementing IAuthenticator; used to identify users +# Out of the box, Cassandra provides org.apache.cassandra.auth.{AllowAllAuthenticator, +# PasswordAuthenticator}. +# +# - AllowAllAuthenticator performs no checks - set it to disable authentication. +# - PasswordAuthenticator relies on username/password pairs to authenticate +# users. It keeps usernames and hashed passwords in system_auth.credentials table. +# Please increase system_auth keyspace replication factor if you use this authenticator. +authenticator: AllowAllAuthenticator + +# Authorization backend, implementing IAuthorizer; used to limit access/provide permissions +# Out of the box, Cassandra provides org.apache.cassandra.auth.{AllowAllAuthorizer, +# CassandraAuthorizer}. +# +# - AllowAllAuthorizer allows any action to any user - set it to disable authorization. +# - CassandraAuthorizer stores permissions in system_auth.permissions table. Please +# increase system_auth keyspace replication factor if you use this authorizer. +authorizer: AllowAllAuthorizer + +# Validity period for permissions cache (fetching permissions can be an +# expensive operation depending on the authorizer, CassandraAuthorizer is +# one example). Defaults to 2000, set to 0 to disable. +# Will be disabled automatically for AllowAllAuthorizer. +permissions_validity_in_ms: 2000 + + +# The partitioner is responsible for distributing rows (by key) across +# nodes in the cluster. Any IPartitioner may be used, including your/m +# own as long as it is on the classpath. Out of the box, Cassandra +# provides org.apache.cassandra.dht.{Murmur3Partitioner, RandomPartitioner +# ByteOrderedPartitioner, OrderPreservingPartitioner (deprecated)}. +# +# - RandomPartitioner distributes rows across the cluster evenly by md5. +# This is the default prior to 1.2 and is retained for compatibility. +# - Murmur3Partitioner is similar to RandomPartioner but uses Murmur3_128 +# Hash Function instead of md5. When in doubt, this is the best option. +# - ByteOrderedPartitioner orders rows lexically by key bytes. BOP allows +# scanning rows in key order, but the ordering can generate hot spots +# for sequential insertion workloads. +# - OrderPreservingPartitioner is an obsolete form of BOP, that stores +# - keys in a less-efficient format and only works with keys that are +# UTF8-encoded Strings. +# - CollatingOPP collates according to EN,US rules rather than lexical byte +# ordering. Use this as an example if you need custom collation. +# +# See http://wiki.apache.org/cassandra/Operations for more on +# partitioners and token selection. +partitioner: org.apache.cassandra.dht.Murmur3Partitioner + +# directories where Cassandra should store data on disk. +data_file_directories: + - target/embeddedCassandra/data + +# commit log +commitlog_directory: target/embeddedCassandra/commitlog + +cdc_raw_directory: target/embeddedCassandra/cdc + +# policy for data disk failures: +# stop: shut down gossip and Thrift, leaving the node effectively dead, but +# can still be inspected via JMX. +# best_effort: stop using the failed disk and respond to requests based on +# remaining available sstables. This means you WILL see obsolete +# data at CL.ONE! +# ignore: ignore fatal errors and let requests fail, as in pre-1.2 Cassandra +disk_failure_policy: stop + + +# Maximum size of the key cache in memory. +# +# Each key cache hit saves 1 seek and each row cache hit saves 2 seeks at the +# minimum, sometimes more. The key cache is fairly tiny for the amount of +# time it saves, so it's worthwhile to use it at large numbers. +# The row cache saves even more time, but must store the whole values of +# its rows, so it is extremely space-intensive. It's best to only use the +# row cache if you have hot rows or static rows. +# +# NOTE: if you reduce the size, you may not get you hottest keys loaded on startup. +# +# Default value is empty to make it "auto" (min(5% of Heap (in MB), 100MB)). Set to 0 to disable key cache. +key_cache_size_in_mb: + +# Duration in seconds after which Cassandra should +# safe the keys cache. Caches are saved to saved_caches_directory as +# specified in this configuration file. +# +# Saved caches greatly improve cold-start speeds, and is relatively cheap in +# terms of I/O for the key cache. Row cache saving is much more expensive and +# has limited use. +# +# Default is 14400 or 4 hours. +key_cache_save_period: 14400 + +# Number of keys from the key cache to save +# Disabled by default, meaning all keys are going to be saved +# key_cache_keys_to_save: 100 + +# Maximum size of the row cache in memory. +# NOTE: if you reduce the size, you may not get you hottest keys loaded on startup. +# +# Default value is 0, to disable row caching. +row_cache_size_in_mb: 0 + +# Duration in seconds after which Cassandra should +# safe the row cache. Caches are saved to saved_caches_directory as specified +# in this configuration file. +# +# Saved caches greatly improve cold-start speeds, and is relatively cheap in +# terms of I/O for the key cache. Row cache saving is much more expensive and +# has limited use. +# +# Default is 0 to disable saving the row cache. +row_cache_save_period: 0 + +# Number of keys from the row cache to save +# Disabled by default, meaning all keys are going to be saved +# row_cache_keys_to_save: 100 + +# saved caches +saved_caches_directory: target/embeddedCassandra/saved_caches + +# commitlog_sync may be either "periodic" or "batch." +# When in batch mode, Cassandra won't ack writes until the commit log +# has been fsynced to disk. It will wait up to +# commitlog_sync_batch_window_in_ms milliseconds for other writes, before +# performing the sync. +# +# commitlog_sync: batch +# commitlog_sync_batch_window_in_ms: 50 +# +# the other option is "periodic" where writes may be acked immediately +# and the CommitLog is simply synced every commitlog_sync_period_in_ms +# milliseconds. +commitlog_sync: periodic +commitlog_sync_period_in_ms: 10000 + +# The size of the individual commitlog file segments. A commitlog +# segment may be archived, deleted, or recycled once all the data +# in it (potentially from each columnfamily in the system) has been +# flushed to sstables. +# +# The default size is 32, which is almost always fine, but if you are +# archiving commitlog segments (see commitlog_archiving.properties), +# then you probably want a finer granularity of archiving; 8 or 16 MB +# is reasonable. +commitlog_segment_size_in_mb: 32 + +# any class that implements the SeedProvider interface and has a +# constructor that takes a Map of parameters will do. +seed_provider: + # Addresses of hosts that are deemed contact points. + # Cassandra nodes use this list of hosts to find each other and learn + # the topology of the ring. You must change this if you are running + # multiple nodes! + - class_name: org.apache.cassandra.locator.SimpleSeedProvider + parameters: + # seeds is actually a comma-delimited list of addresses. + # Ex: ",," + - seeds: "127.0.0.1" + + +# For workloads with more data than can fit in memory, Cassandra's +# bottleneck will be reads that need to fetch data from +# disk. "concurrent_reads" should be set to (16 * number_of_drives) in +# order to allow the operations to enqueue low enough in the stack +# that the OS and drives can reorder them. +# +# On the other hand, since writes are almost never IO bound, the ideal +# number of "concurrent_writes" is dependent on the number of cores in +# your system; (8 * number_of_cores) is a good rule of thumb. +concurrent_reads: 32 +concurrent_writes: 32 + +# Total memory to use for memtables. Cassandra will flush the largest +# memtable when this much memory is used. +# If omitted, Cassandra will set it to 1/3 of the heap. +# memtable_total_space_in_mb: 2048 + +# Total space to use for commitlogs. +# If space gets above this value (it will round up to the next nearest +# segment multiple), Cassandra will flush every dirty CF in the oldest +# segment and remove it. +# commitlog_total_space_in_mb: 4096 + +# This sets the amount of memtable flush writer threads. These will +# be blocked by disk io, and each one will hold a memtable in memory +# while blocked. If you have a large heap and many data directories, +# you can increase this value for better flush performance. +# By default this will be set to the amount of data directories defined. +#memtable_flush_writers: 1 + +# the number of full memtables to allow pending flush, that is, +# waiting for a writer thread. At a minimum, this should be set to +# the maximum number of secondary indexes created on a single CF. +#memtable_flush_queue_size: 4 + +# Whether to, when doing sequential writing, fsync() at intervals in +# order to force the operating system to flush the dirty +# buffers. Enable this to avoid sudden dirty buffer flushing from +# impacting read latencies. Almost always a good idea on SSD:s; not +# necessarily on platters. +trickle_fsync: false +trickle_fsync_interval_in_kb: 10240 + +# TCP port, for commands and data +storage_port: 7010 + +# SSL port, for encrypted communication. Unused unless enabled in +# encryption_options +ssl_storage_port: 7011 + +# Address to bind to and tell other Cassandra nodes to connect to. You +# _must_ change this if you want multiple nodes to be able to +# communicate! +# +# Leaving it blank leaves it up to InetAddress.getLocalHost(). This +# will always do the Right Thing *if* the node is properly configured +# (hostname, name resolution, etc), and the Right Thing is to use the +# address associated with the hostname (it might not be). +# +# Setting this to 0.0.0.0 is always wrong. +listen_address: 127.0.0.1 + +start_native_transport: true +# port for the CQL native transport to listen for clients on +native_transport_port: 9042 + +# Whether to start the thrift rpc server. +start_rpc: true + +# Address to broadcast to other Cassandra nodes +# Leaving this blank will set it to the same value as listen_address +# broadcast_address: 1.2.3.4 + +# The address to bind the Thrift RPC service to -- clients connect +# here. Unlike ListenAddress above, you *can* specify 0.0.0.0 here if +# you want Thrift to listen on all interfaces. +# +# Leaving this blank has the same effect it does for ListenAddress, +# (i.e. it will be based on the configured hostname of the node). +rpc_address: localhost +# port for Thrift to listen for clients on +rpc_port: 9171 + +# enable or disable keepalive on rpc connections +rpc_keepalive: true + +# Cassandra provides three options for the RPC Server: +# +# sync -> One connection per thread in the rpc pool (see below). +# For a very large number of clients, memory will be your limiting +# factor; on a 64 bit JVM, 128KB is the minimum stack size per thread. +# Connection pooling is very, very strongly recommended. +# +# async -> Nonblocking server implementation with one thread to serve +# rpc connections. This is not recommended for high throughput use +# cases. Async has been tested to be about 50% slower than sync +# or hsha and is deprecated: it will be removed in the next major release. +# +# hsha -> Stands for "half synchronous, half asynchronous." The rpc thread pool +# (see below) is used to manage requests, but the threads are multiplexed +# across the different clients. +# +# The default is sync because on Windows hsha is about 30% slower. On Linux, +# sync/hsha performance is about the same, with hsha of course using less memory. +rpc_server_type: sync + +# Uncomment rpc_min|max|thread to set request pool size. +# You would primarily set max for the sync server to safeguard against +# misbehaved clients; if you do hit the max, Cassandra will block until one +# disconnects before accepting more. The defaults for sync are min of 16 and max +# unlimited. +# +# For the Hsha server, the min and max both default to quadruple the number of +# CPU cores. +# +# This configuration is ignored by the async server. +# +# rpc_min_threads: 16 +# rpc_max_threads: 2048 + +# uncomment to set socket buffer sizes on rpc connections +# rpc_send_buff_size_in_bytes: +# rpc_recv_buff_size_in_bytes: + +# Frame size for thrift (maximum field length). +# 0 disables TFramedTransport in favor of TSocket. This option +# is deprecated; we strongly recommend using Framed mode. +thrift_framed_transport_size_in_mb: 15 + +# The max length of a thrift message, including all fields and +# internal thrift overhead. +thrift_max_message_length_in_mb: 16 + +# Set to true to have Cassandra create a hard link to each sstable +# flushed or streamed locally in a backups/ subdirectory of the +# Keyspace data. Removing these links is the operator's +# responsibility. +incremental_backups: false + +# Whether or not to take a snapshot before each compaction. Be +# careful using this option, since Cassandra won't clean up the +# snapshots for you. Mostly useful if you're paranoid when there +# is a data format change. +snapshot_before_compaction: false + +# Whether or not a snapshot is taken of the data before keyspace truncation +# or dropping of column families. The STRONGLY advised default of true +# should be used to provide data safety. If you set this flag to false, you will +# lose data on truncation or drop. +auto_snapshot: false + +# Add column indexes to a row after its contents reach this size. +# Increase if your column values are large, or if you have a very large +# number of columns. The competing causes are, Cassandra has to +# deserialize this much of the row to read a single column, so you want +# it to be small - at least if you do many partial-row reads - but all +# the index data is read for each access, so you don't want to generate +# that wastefully either. +column_index_size_in_kb: 64 + +# Size limit for rows being compacted in memory. Larger rows will spill +# over to disk and use a slower two-pass compaction process. A message +# will be logged specifying the row key. +#in_memory_compaction_limit_in_mb: 64 + +# Number of simultaneous compactions to allow, NOT including +# validation "compactions" for anti-entropy repair. Simultaneous +# compactions can help preserve read performance in a mixed read/write +# workload, by mitigating the tendency of small sstables to accumulate +# during a single long running compactions. The default is usually +# fine and if you experience problems with compaction running too +# slowly or too fast, you should look at +# compaction_throughput_mb_per_sec first. +# +# This setting has no effect on LeveledCompactionStrategy. +# +# concurrent_compactors defaults to the number of cores. +# Uncomment to make compaction mono-threaded, the pre-0.8 default. +#concurrent_compactors: 1 + +# Multi-threaded compaction. When enabled, each compaction will use +# up to one thread per core, plus one thread per sstable being merged. +# This is usually only useful for SSD-based hardware: otherwise, +# your concern is usually to get compaction to do LESS i/o (see: +# compaction_throughput_mb_per_sec), not more. +#multithreaded_compaction: false + +# Throttles compaction to the given total throughput across the entire +# system. The faster you insert data, the faster you need to compact in +# order to keep the sstable count down, but in general, setting this to +# 16 to 32 times the rate you are inserting data is more than sufficient. +# Setting this to 0 disables throttling. Note that this account for all types +# of compaction, including validation compaction. +compaction_throughput_mb_per_sec: 16 + +# Track cached row keys during compaction, and re-cache their new +# positions in the compacted sstable. Disable if you use really large +# key caches. +#compaction_preheat_key_cache: true + +# Throttles all outbound streaming file transfers on this node to the +# given total throughput in Mbps. This is necessary because Cassandra does +# mostly sequential IO when streaming data during bootstrap or repair, which +# can lead to saturating the network connection and degrading rpc performance. +# When unset, the default is 200 Mbps or 25 MB/s. +# stream_throughput_outbound_megabits_per_sec: 200 + +# How long the coordinator should wait for read operations to complete +read_request_timeout_in_ms: 5000 +# How long the coordinator should wait for seq or index scans to complete +range_request_timeout_in_ms: 10000 +# How long the coordinator should wait for writes to complete +write_request_timeout_in_ms: 2000 +# How long a coordinator should continue to retry a CAS operation +# that contends with other proposals for the same row +cas_contention_timeout_in_ms: 1000 +# How long the coordinator should wait for truncates to complete +# (This can be much longer, because unless auto_snapshot is disabled +# we need to flush first so we can snapshot before removing the data.) +truncate_request_timeout_in_ms: 60000 +# The default timeout for other, miscellaneous operations +request_timeout_in_ms: 10000 + +# Enable operation timeout information exchange between nodes to accurately +# measure request timeouts. If disabled, replicas will assume that requests +# were forwarded to them instantly by the coordinator, which means that +# under overload conditions we will waste that much extra time processing +# already-timed-out requests. +# +# Warning: before enabling this property make sure to ntp is installed +# and the times are synchronized between the nodes. +cross_node_timeout: false + +# Enable socket timeout for streaming operation. +# When a timeout occurs during streaming, streaming is retried from the start +# of the current file. This _can_ involve re-streaming an important amount of +# data, so you should avoid setting the value too low. +# Default value is 0, which never timeout streams. +# streaming_socket_timeout_in_ms: 0 + +# phi value that must be reached for a host to be marked down. +# most users should never need to adjust this. +# phi_convict_threshold: 8 + +# endpoint_snitch -- Set this to a class that implements +# IEndpointSnitch. The snitch has two functions: +# - it teaches Cassandra enough about your network topology to route +# requests efficiently +# - it allows Cassandra to spread replicas around your cluster to avoid +# correlated failures. It does this by grouping machines into +# "datacenters" and "racks." Cassandra will do its best not to have +# more than one replica on the same "rack" (which may not actually +# be a physical location) +# +# IF YOU CHANGE THE SNITCH AFTER DATA IS INSERTED INTO THE CLUSTER, +# YOU MUST RUN A FULL REPAIR, SINCE THE SNITCH AFFECTS WHERE REPLICAS +# ARE PLACED. +# +# Out of the box, Cassandra provides +# - SimpleSnitch: +# Treats Strategy order as proximity. This improves cache locality +# when disabling read repair, which can further improve throughput. +# Only appropriate for single-datacenter deployments. +# - PropertyFileSnitch: +# Proximity is determined by rack and data center, which are +# explicitly configured in cassandra-topology.properties. +# - RackInferringSnitch: +# Proximity is determined by rack and data center, which are +# assumed to correspond to the 3rd and 2nd octet of each node's +# IP address, respectively. Unless this happens to match your +# deployment conventions (as it did Facebook's), this is best used +# as an example of writing a custom Snitch class. +# - Ec2Snitch: +# Appropriate for EC2 deployments in a single Region. Loads Region +# and Availability Zone information from the EC2 API. The Region is +# treated as the Datacenter, and the Availability Zone as the rack. +# Only private IPs are used, so this will not work across multiple +# Regions. +# - Ec2MultiRegionSnitch: +# Uses public IPs as broadcast_address to allow cross-region +# connectivity. (Thus, you should set seed addresses to the public +# IP as well.) You will need to open the storage_port or +# ssl_storage_port on the public IP firewall. (For intra-Region +# traffic, Cassandra will switch to the private IP after +# establishing a connection.) +# +# You can use a custom Snitch by setting this to the full class name +# of the snitch, which will be assumed to be on your classpath. +endpoint_snitch: SimpleSnitch + +# controls how often to perform the more expensive part of host score +# calculation +dynamic_snitch_update_interval_in_ms: 100 +# controls how often to reset all host scores, allowing a bad host to +# possibly recover +dynamic_snitch_reset_interval_in_ms: 600000 +# if set greater than zero and read_repair_chance is < 1.0, this will allow +# 'pinning' of replicas to hosts in order to increase cache capacity. +# The badness threshold will control how much worse the pinned host has to be +# before the dynamic snitch will prefer other replicas over it. This is +# expressed as a double which represents a percentage. Thus, a value of +# 0.2 means Cassandra would continue to prefer the static snitch values +# until the pinned host was 20% worse than the fastest. +dynamic_snitch_badness_threshold: 0.1 + +# request_scheduler -- Set this to a class that implements +# RequestScheduler, which will schedule incoming client requests +# according to the specific policy. This is useful for multi-tenancy +# with a single Cassandra cluster. +# NOTE: This is specifically for requests from the client and does +# not affect inter node communication. +# org.apache.cassandra.scheduler.NoScheduler - No scheduling takes place +# org.apache.cassandra.scheduler.RoundRobinScheduler - Round robin of +# client requests to a node with a separate queue for each +# request_scheduler_id. The scheduler is further customized by +# request_scheduler_options as described below. +request_scheduler: org.apache.cassandra.scheduler.NoScheduler + +# Scheduler Options vary based on the type of scheduler +# NoScheduler - Has no options +# RoundRobin +# - throttle_limit -- The throttle_limit is the number of in-flight +# requests per client. Requests beyond +# that limit are queued up until +# running requests can complete. +# The value of 80 here is twice the number of +# concurrent_reads + concurrent_writes. +# - default_weight -- default_weight is optional and allows for +# overriding the default which is 1. +# - weights -- Weights are optional and will default to 1 or the +# overridden default_weight. The weight translates into how +# many requests are handled during each turn of the +# RoundRobin, based on the scheduler id. +# +# request_scheduler_options: +# throttle_limit: 80 +# default_weight: 5 +# weights: +# Keyspace1: 1 +# Keyspace2: 5 + +# request_scheduler_id -- An identifer based on which to perform +# the request scheduling. Currently the only valid option is keyspace. +# request_scheduler_id: keyspace + +# index_interval controls the sampling of entries from the primrary +# row index in terms of space versus time. The larger the interval, +# the smaller and less effective the sampling will be. In technicial +# terms, the interval coresponds to the number of index entries that +# are skipped between taking each sample. All the sampled entries +# must fit in memory. Generally, a value between 128 and 512 here +# coupled with a large key cache size on CFs results in the best trade +# offs. This value is not often changed, however if you have many +# very small rows (many to an OS page), then increasing this will +# often lower memory usage without a impact on performance. +index_interval: 128 + +# Enable or disable inter-node encryption +# Default settings are TLS v1, RSA 1024-bit keys (it is imperative that +# users generate their own keys) TLS_RSA_WITH_AES_128_CBC_SHA as the cipher +# suite for authentication, key exchange and encryption of the actual data transfers. +# NOTE: No custom encryption options are enabled at the moment +# The available internode options are : all, none, dc, rack +# +# If set to dc cassandra will encrypt the traffic between the DCs +# If set to rack cassandra will encrypt the traffic between the racks +# +# The passwords used in these options must match the passwords used when generating +# the keystore and truststore. For instructions on generating these files, see: +# http://download.oracle.com/javase/6/docs/technotes/guides/security/jsse/JSSERefGuide.html#CreateKeystore +# +encryption_options: + internode_encryption: none + keystore: conf/.keystore + keystore_password: cassandra + truststore: conf/.truststore + truststore_password: cassandra + # More advanced defaults below: + # protocol: TLS + # algorithm: SunX509 + # store_type: JKS + # cipher_suites: [TLS_RSA_WITH_AES_128_CBC_SHA,TLS_RSA_WITH_AES_256_CBC_SHA] \ No newline at end of file diff --git a/persistence-modules/spring-data-redis/pom.xml b/persistence-modules/spring-data-redis/pom.xml index fb80b0413f..4ae8ac0a87 100644 --- a/persistence-modules/spring-data-redis/pom.xml +++ b/persistence-modules/spring-data-redis/pom.xml @@ -79,6 +79,21 @@ + + + + org.apache.maven.plugins + maven-surefire-plugin + ${maven-surefire-plugin.version} + + true + true + -Xmx1024m + + + + + 3.2.4 2.9.0 diff --git a/persistence-modules/spring-data-redis/src/test/java/com/baeldung/spring/data/reactive/redis/template/RedisKeyCommandsIntegrationTest.java b/persistence-modules/spring-data-redis/src/test/java/com/baeldung/spring/data/reactive/redis/template/RedisKeyCommandsIntegrationTest.java index e48aa1e06a..1333f94653 100644 --- a/persistence-modules/spring-data-redis/src/test/java/com/baeldung/spring/data/reactive/redis/template/RedisKeyCommandsIntegrationTest.java +++ b/persistence-modules/spring-data-redis/src/test/java/com/baeldung/spring/data/reactive/redis/template/RedisKeyCommandsIntegrationTest.java @@ -2,6 +2,9 @@ package com.baeldung.spring.data.reactive.redis.template; import com.baeldung.spring.data.reactive.redis.SpringRedisReactiveApplication; + +import org.junit.AfterClass; +import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; @@ -9,22 +12,40 @@ import org.springframework.boot.test.context.SpringBootTest; import org.springframework.data.redis.connection.ReactiveKeyCommands; import org.springframework.data.redis.connection.ReactiveStringCommands; import org.springframework.data.redis.connection.ReactiveStringCommands.SetCommand; +import org.springframework.test.annotation.DirtiesContext; +import org.springframework.test.annotation.DirtiesContext.ClassMode; import org.springframework.test.context.junit4.SpringRunner; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.test.StepVerifier; +import redis.embedded.RedisServerBuilder; +import java.io.IOException; import java.nio.ByteBuffer; @RunWith(SpringRunner.class) @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT, classes = SpringRedisReactiveApplication.class) +@DirtiesContext(classMode = ClassMode.BEFORE_CLASS) public class RedisKeyCommandsIntegrationTest { + + private static redis.embedded.RedisServer redisServer; @Autowired private ReactiveKeyCommands keyCommands; @Autowired private ReactiveStringCommands stringCommands; + + @BeforeClass + public static void startRedisServer() throws IOException { + redisServer = new RedisServerBuilder().port(6379).setting("maxheap 256M").build(); + redisServer.start(); + } + + @AfterClass + public static void stopRedisServer() throws IOException { + redisServer.stop(); + } @Test public void givenFluxOfKeys_whenPerformOperations_thenPerformOperations() { diff --git a/persistence-modules/spring-data-redis/src/test/java/com/baeldung/spring/data/reactive/redis/template/RedisTemplateListOpsIntegrationTest.java b/persistence-modules/spring-data-redis/src/test/java/com/baeldung/spring/data/reactive/redis/template/RedisTemplateListOpsIntegrationTest.java index 3ebeff87b1..88c4fa6eed 100644 --- a/persistence-modules/spring-data-redis/src/test/java/com/baeldung/spring/data/reactive/redis/template/RedisTemplateListOpsIntegrationTest.java +++ b/persistence-modules/spring-data-redis/src/test/java/com/baeldung/spring/data/reactive/redis/template/RedisTemplateListOpsIntegrationTest.java @@ -2,27 +2,48 @@ package com.baeldung.spring.data.reactive.redis.template; import com.baeldung.spring.data.reactive.redis.SpringRedisReactiveApplication; + +import java.io.IOException; + +import org.junit.AfterClass; import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.data.redis.core.ReactiveListOperations; import org.springframework.data.redis.core.ReactiveRedisTemplate; +import org.springframework.test.annotation.DirtiesContext; +import org.springframework.test.annotation.DirtiesContext.ClassMode; import org.springframework.test.context.junit4.SpringRunner; import reactor.core.publisher.Mono; import reactor.test.StepVerifier; +import redis.embedded.RedisServerBuilder; @RunWith(SpringRunner.class) @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT, classes = SpringRedisReactiveApplication.class) +@DirtiesContext(classMode = ClassMode.BEFORE_CLASS) public class RedisTemplateListOpsIntegrationTest { private static final String LIST_NAME = "demo_list"; + private static redis.embedded.RedisServer redisServer; @Autowired private ReactiveRedisTemplate redisTemplate; private ReactiveListOperations reactiveListOps; + + @BeforeClass + public static void startRedisServer() throws IOException { + redisServer = new RedisServerBuilder().port(6379).setting("maxheap 128M").build(); + redisServer.start(); + } + + @AfterClass + public static void stopRedisServer() throws IOException { + redisServer.stop(); + } @Before public void setup() { diff --git a/persistence-modules/spring-data-redis/src/test/java/com/baeldung/spring/data/reactive/redis/template/RedisTemplateValueOpsIntegrationTest.java b/persistence-modules/spring-data-redis/src/test/java/com/baeldung/spring/data/reactive/redis/template/RedisTemplateValueOpsIntegrationTest.java index 9490568089..afa5267582 100644 --- a/persistence-modules/spring-data-redis/src/test/java/com/baeldung/spring/data/reactive/redis/template/RedisTemplateValueOpsIntegrationTest.java +++ b/persistence-modules/spring-data-redis/src/test/java/com/baeldung/spring/data/reactive/redis/template/RedisTemplateValueOpsIntegrationTest.java @@ -1,29 +1,51 @@ package com.baeldung.spring.data.reactive.redis.template; -import com.baeldung.spring.data.reactive.redis.SpringRedisReactiveApplication; -import com.baeldung.spring.data.reactive.redis.model.Employee; +import java.io.IOException; +import java.time.Duration; + +import org.junit.AfterClass; import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.data.redis.core.ReactiveRedisTemplate; import org.springframework.data.redis.core.ReactiveValueOperations; +import org.springframework.test.annotation.DirtiesContext; +import org.springframework.test.annotation.DirtiesContext.ClassMode; import org.springframework.test.context.junit4.SpringRunner; + +import com.baeldung.spring.data.reactive.redis.SpringRedisReactiveApplication; +import com.baeldung.spring.data.reactive.redis.model.Employee; + import reactor.core.publisher.Mono; import reactor.test.StepVerifier; - -import java.time.Duration; +import redis.embedded.RedisServerBuilder; @RunWith(SpringRunner.class) @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT, classes = SpringRedisReactiveApplication.class) +@DirtiesContext(classMode = ClassMode.BEFORE_CLASS) public class RedisTemplateValueOpsIntegrationTest { + + private static redis.embedded.RedisServer redisServer; @Autowired private ReactiveRedisTemplate redisTemplate; private ReactiveValueOperations reactiveValueOps; + + @BeforeClass + public static void startRedisServer() throws IOException { + redisServer = new RedisServerBuilder().port(6379).setting("maxheap 256M").build(); + redisServer.start(); + } + + @AfterClass + public static void stopRedisServer() throws IOException { + redisServer.stop(); + } @Before public void setup() { diff --git a/persistence-modules/spring-data-redis/src/test/java/com/baeldung/spring/data/redis/RedisMessageListenerIntegrationTest.java b/persistence-modules/spring-data-redis/src/test/java/com/baeldung/spring/data/redis/RedisMessageListenerIntegrationTest.java index 99febb6430..1c69b63c09 100644 --- a/persistence-modules/spring-data-redis/src/test/java/com/baeldung/spring/data/redis/RedisMessageListenerIntegrationTest.java +++ b/persistence-modules/spring-data-redis/src/test/java/com/baeldung/spring/data/redis/RedisMessageListenerIntegrationTest.java @@ -19,9 +19,11 @@ import com.baeldung.spring.data.redis.config.RedisConfig; import com.baeldung.spring.data.redis.queue.RedisMessagePublisher; import com.baeldung.spring.data.redis.queue.RedisMessageSubscriber; +import redis.embedded.RedisServerBuilder; + @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(classes = RedisConfig.class) -@DirtiesContext(classMode = ClassMode.AFTER_CLASS) +@DirtiesContext(classMode = ClassMode.BEFORE_CLASS) public class RedisMessageListenerIntegrationTest { private static redis.embedded.RedisServer redisServer; @@ -31,7 +33,7 @@ public class RedisMessageListenerIntegrationTest { @BeforeClass public static void startRedisServer() throws IOException { - redisServer = new redis.embedded.RedisServer(6380); + redisServer = new RedisServerBuilder().port(6379).setting("maxheap 256M").build(); redisServer.start(); } diff --git a/persistence-modules/spring-data-redis/src/test/java/com/baeldung/spring/data/redis/repo/StudentRepositoryIntegrationTest.java b/persistence-modules/spring-data-redis/src/test/java/com/baeldung/spring/data/redis/repo/StudentRepositoryIntegrationTest.java index 43aadefc01..b1a36475c3 100644 --- a/persistence-modules/spring-data-redis/src/test/java/com/baeldung/spring/data/redis/repo/StudentRepositoryIntegrationTest.java +++ b/persistence-modules/spring-data-redis/src/test/java/com/baeldung/spring/data/redis/repo/StudentRepositoryIntegrationTest.java @@ -20,9 +20,11 @@ import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import com.baeldung.spring.data.redis.config.RedisConfig; import com.baeldung.spring.data.redis.model.Student; +import redis.embedded.RedisServerBuilder; + @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(classes = RedisConfig.class) -@DirtiesContext(classMode = ClassMode.AFTER_CLASS) +@DirtiesContext(classMode = ClassMode.BEFORE_CLASS) public class StudentRepositoryIntegrationTest { @Autowired @@ -32,7 +34,7 @@ public class StudentRepositoryIntegrationTest { @BeforeClass public static void startRedisServer() throws IOException { - redisServer = new redis.embedded.RedisServer(6380); + redisServer = new RedisServerBuilder().port(6379).setting("maxheap 128M").build(); redisServer.start(); } diff --git a/pom.xml b/pom.xml index a5c506b0c5..9760e06bc1 100644 --- a/pom.xml +++ b/pom.xml @@ -646,6 +646,7 @@ spring-boot-ops-2 spring-boot-rest spring-boot-data + spring-boot-parent spring-boot-property-exp spring-boot-security spring-boot-testing @@ -1311,6 +1312,7 @@ spring-boot-ops-2 spring-boot-rest spring-boot-data + spring-boot-parent spring-boot-property-exp spring-boot-security spring-boot-vue diff --git a/spf4j/pom.xml b/spf4j/pom.xml new file mode 100644 index 0000000000..43a8028dc4 --- /dev/null +++ b/spf4j/pom.xml @@ -0,0 +1,21 @@ + + + 4.0.0 + com.baeldung.spf4j + spf4j + spf4j + pom + + + parent-modules + com.baeldung + 1.0.0-SNAPSHOT + + + + spf4j-core-app + spf4j-aspects-app + + + diff --git a/spf4j/spf4j-aspects-app/pom.xml b/spf4j/spf4j-aspects-app/pom.xml new file mode 100644 index 0000000000..9fccec673a --- /dev/null +++ b/spf4j/spf4j-aspects-app/pom.xml @@ -0,0 +1,83 @@ + + 4.0.0 + spf4j-aspects-app + 0.0.1-SNAPSHOT + jar + spf4j-aspects-app + + parent-modules + com.baeldung + 1.0.0-SNAPSHOT + ../../ + + + + org.spf4j + spf4j-aspects + ${spf4j.version} + + + org.spf4j + spf4j-ui + ${spf4j.version} + + + org.slf4j + slf4j-api + ${org.slf4j.version} + + + + spf4j-aspects-app + + + org.apache.maven.plugins + maven-compiler-plugin + 3.8.0 + + 1.8 + 1.8 + + + + org.apache.maven.plugins + maven-dependency-plugin + 3.1.1 + + + copy-dependencies + package + + copy-dependencies + + + + ${project.build.directory}/dependency-jars/ + + + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + com.baeldung.spf4j.aspects.App + true + dependency-jars/ + + + + + + + + UTF-8 + 8.6.10 + 1.7.21 + + diff --git a/spf4j/spf4j-aspects-app/src/main/java/com/baeldung/spf4j/aspects/App.java b/spf4j/spf4j-aspects-app/src/main/java/com/baeldung/spf4j/aspects/App.java new file mode 100644 index 0000000000..6d74292c75 --- /dev/null +++ b/spf4j/spf4j-aspects-app/src/main/java/com/baeldung/spf4j/aspects/App.java @@ -0,0 +1,28 @@ +package com.baeldung.spf4j.aspects; + +import java.util.Random; + +import org.spf4j.annotations.PerformanceMonitor; + +public class App { + + public static void main(String[] args) throws InterruptedException { + Spf4jConfig.initialize(); + Random random = new Random(); + for (int i = 0; i < 100; i++) { + long numberToCheck = random.nextInt(999_999_999 - 100_000_000 + 1) + 100_000_000; + isPrimeNumber(numberToCheck); + } + System.exit(0); + } + + @PerformanceMonitor(warnThresholdMillis = 1, errorThresholdMillis = 100, recorderSource = Spf4jConfig.RecorderSourceForIsPrimeNumber.class) + public static boolean isPrimeNumber(long number) { + for (long i = 2; i <= number / 2; i++) { + if (number % i == 0) + return false; + } + return true; + } + +} diff --git a/spf4j/spf4j-aspects-app/src/main/java/com/baeldung/spf4j/aspects/Spf4jConfig.java b/spf4j/spf4j-aspects-app/src/main/java/com/baeldung/spf4j/aspects/Spf4jConfig.java new file mode 100644 index 0000000000..a12213f0cd --- /dev/null +++ b/spf4j/spf4j-aspects-app/src/main/java/com/baeldung/spf4j/aspects/Spf4jConfig.java @@ -0,0 +1,37 @@ +package com.baeldung.spf4j.aspects; + +import java.io.File; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.spf4j.annotations.RecorderSourceInstance; +import org.spf4j.perf.MeasurementRecorderSource; +import org.spf4j.perf.impl.RecorderFactory; + +public class Spf4jConfig { + private static final Logger LOGGER = LoggerFactory.getLogger(App.class); + + public static void initialize() { + String tsDbFile = System.getProperty("user.dir") + File.separator + "spf4j-performance-monitoring.tsdb2"; + String tsTextFile = System.getProperty("user.dir") + File.separator + "spf4j-performance-monitoring.txt"; + + LOGGER.info("\nTime Series DB (TSDB) : {}\nTime Series text file : {}", tsDbFile, tsTextFile); + System.setProperty("spf4j.perf.ms.config", "TSDB@" + tsDbFile + "," + "TSDB_TXT@" + tsTextFile); + } + + public static final class RecorderSourceForIsPrimeNumber extends RecorderSourceInstance { + public static final MeasurementRecorderSource INSTANCE; + + static { + Object forWhat = App.class + " isPrimeNumber"; + String unitOfMeasurement = "ms"; + int sampleTimeMillis = 1_000; + int factor = 10; + int lowerMagnitude = 0; + int higherMagnitude = 4; + int quantasPerMagnitude = 10; + INSTANCE = RecorderFactory.createScalableQuantizedRecorderSource(forWhat, unitOfMeasurement, + sampleTimeMillis, factor, lowerMagnitude, higherMagnitude, quantasPerMagnitude); + } + } +} diff --git a/spf4j/spf4j-aspects-app/src/main/resources/META-INF/aop.xml b/spf4j/spf4j-aspects-app/src/main/resources/META-INF/aop.xml new file mode 100644 index 0000000000..1643f5fba9 --- /dev/null +++ b/spf4j/spf4j-aspects-app/src/main/resources/META-INF/aop.xml @@ -0,0 +1,12 @@ + + + + + + + + + + \ No newline at end of file diff --git a/spf4j/spf4j-aspects-app/src/main/resources/logback.xml b/spf4j/spf4j-aspects-app/src/main/resources/logback.xml new file mode 100644 index 0000000000..4677fac9bf --- /dev/null +++ b/spf4j/spf4j-aspects-app/src/main/resources/logback.xml @@ -0,0 +1,11 @@ + + + + [%level] %msg%n + + + + + + \ No newline at end of file diff --git a/spf4j/spf4j-core-app/pom.xml b/spf4j/spf4j-core-app/pom.xml new file mode 100644 index 0000000000..ae346065ef --- /dev/null +++ b/spf4j/spf4j-core-app/pom.xml @@ -0,0 +1,83 @@ + + 4.0.0 + spf4j-core-app + 0.0.1-SNAPSHOT + jar + spf4j-core-app + + parent-modules + com.baeldung + 1.0.0-SNAPSHOT + ../../ + + + + org.spf4j + spf4j-core + ${spf4j.version} + + + org.spf4j + spf4j-ui + ${spf4j.version} + + + org.slf4j + slf4j-api + ${org.slf4j.version} + + + + spf4j-core-app + + + org.apache.maven.plugins + maven-compiler-plugin + 3.8.0 + + 1.8 + 1.8 + + + + org.apache.maven.plugins + maven-dependency-plugin + 3.1.1 + + + copy-dependencies + package + + copy-dependencies + + + + ${project.build.directory}/dependency-jars/ + + + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + com.baeldung.spf4j.core.App + true + dependency-jars/ + + + + + + + + UTF-8 + 8.6.10 + 1.7.21 + + diff --git a/spf4j/spf4j-core-app/src/main/java/com/baeldung/spf4j/core/App.java b/spf4j/spf4j-core-app/src/main/java/com/baeldung/spf4j/core/App.java new file mode 100644 index 0000000000..fa107d8e4f --- /dev/null +++ b/spf4j/spf4j-core-app/src/main/java/com/baeldung/spf4j/core/App.java @@ -0,0 +1,34 @@ +package com.baeldung.spf4j.core; + +import java.util.Random; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.spf4j.perf.MeasurementRecorder; + +public class App { + private static final Logger LOGGER = LoggerFactory.getLogger(App.class); + + public static void main(String[] args) throws InterruptedException { + Spf4jConfig.initialize(); + MeasurementRecorder measurementRecorder = Spf4jConfig.getMeasurementRecorder(App.class + " isPrimeNumber"); + Random random = new Random(); + for (int i = 0; i < 100; i++) { + long numberToCheck = random.nextInt(999_999_999 - 100_000_000 + 1) + 100_000_000; + long startTime = System.currentTimeMillis(); + boolean isPrime = isPrimeNumber(numberToCheck); + measurementRecorder.record(System.currentTimeMillis() - startTime); + LOGGER.info("{}. {} is prime? {}", i + 1, numberToCheck, isPrime); + } + System.exit(0); + } + + private static boolean isPrimeNumber(long number) { + for (long i = 2; i <= number / 2; i++) { + if (number % i == 0) + return false; + } + return true; + } + +} diff --git a/spf4j/spf4j-core-app/src/main/java/com/baeldung/spf4j/core/Spf4jConfig.java b/spf4j/spf4j-core-app/src/main/java/com/baeldung/spf4j/core/Spf4jConfig.java new file mode 100644 index 0000000000..0f806e1576 --- /dev/null +++ b/spf4j/spf4j-core-app/src/main/java/com/baeldung/spf4j/core/Spf4jConfig.java @@ -0,0 +1,31 @@ +package com.baeldung.spf4j.core; + +import java.io.File; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.spf4j.perf.MeasurementRecorder; +import org.spf4j.perf.impl.RecorderFactory; + +public class Spf4jConfig { + private static final Logger LOGGER = LoggerFactory.getLogger(App.class); + + public static void initialize() { + String tsDbFile = System.getProperty("user.dir") + File.separator + "spf4j-performance-monitoring.tsdb2"; + String tsTextFile = System.getProperty("user.dir") + File.separator + "spf4j-performance-monitoring.txt"; + + LOGGER.info("\nTime Series DB (TSDB) : {}\nTime Series text file : {}", tsDbFile, tsTextFile); + System.setProperty("spf4j.perf.ms.config", "TSDB@" + tsDbFile + "," + "TSDB_TXT@" + tsTextFile); + } + + public static MeasurementRecorder getMeasurementRecorder(Object forWhat) { + String unitOfMeasurement = "ms"; + int sampleTimeMillis = 1_000; + int factor = 10; + int lowerMagnitude = 0; + int higherMagnitude = 4; + int quantasPerMagnitude = 10; + + return RecorderFactory.createScalableQuantizedRecorder(forWhat, unitOfMeasurement, sampleTimeMillis, factor, lowerMagnitude, higherMagnitude, quantasPerMagnitude); + } +} diff --git a/spf4j/spf4j-core-app/src/main/resources/logback.xml b/spf4j/spf4j-core-app/src/main/resources/logback.xml new file mode 100644 index 0000000000..4677fac9bf --- /dev/null +++ b/spf4j/spf4j-core-app/src/main/resources/logback.xml @@ -0,0 +1,11 @@ + + + + [%level] %msg%n + + + + + + \ No newline at end of file diff --git a/spring-5-reactive/src/test/java/com/baeldung/reactive/errorhandling/ErrorHandlingIntegrationTest.java b/spring-5-reactive/src/test/java/com/baeldung/reactive/errorhandling/ErrorHandlingIntegrationTest.java index 10cfaffce4..42da90ecd5 100644 --- a/spring-5-reactive/src/test/java/com/baeldung/reactive/errorhandling/ErrorHandlingIntegrationTest.java +++ b/spring-5-reactive/src/test/java/com/baeldung/reactive/errorhandling/ErrorHandlingIntegrationTest.java @@ -1,10 +1,13 @@ package com.baeldung.reactive.errorhandling; -import java.io.IOException; import static org.junit.Assert.assertEquals; + +import java.io.IOException; + import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.autoconfigure.web.reactive.AutoConfigureWebTestClient; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.context.SpringBootTest.WebEnvironment; import org.springframework.http.MediaType; @@ -15,6 +18,7 @@ import org.springframework.test.web.reactive.server.WebTestClient; @RunWith(SpringRunner.class) @SpringBootTest(webEnvironment = WebEnvironment.DEFINED_PORT) @WithMockUser +@AutoConfigureWebTestClient(timeout = "10000") public class ErrorHandlingIntegrationTest { @Autowired diff --git a/spring-5-reactive/src/test/java/com/baeldung/reactive/filters/PlayerHandlerIntegrationTest.java b/spring-5-reactive/src/test/java/com/baeldung/reactive/filters/PlayerHandlerIntegrationTest.java index fbf46a93cc..c1523cb5ee 100644 --- a/spring-5-reactive/src/test/java/com/baeldung/reactive/filters/PlayerHandlerIntegrationTest.java +++ b/spring-5-reactive/src/test/java/com/baeldung/reactive/filters/PlayerHandlerIntegrationTest.java @@ -3,6 +3,7 @@ package com.baeldung.reactive.filters; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.autoconfigure.web.reactive.AutoConfigureWebTestClient; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.security.test.context.support.WithMockUser; import org.springframework.test.context.junit4.SpringRunner; @@ -14,6 +15,7 @@ import static org.junit.Assert.assertEquals; @RunWith(SpringRunner.class) @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) @WithMockUser +@AutoConfigureWebTestClient(timeout = "10000") public class PlayerHandlerIntegrationTest { @Autowired diff --git a/spring-5-reactive/src/test/java/com/baeldung/stepverifier/PostExecutionUnitTest.java b/spring-5-reactive/src/test/java/com/baeldung/stepverifier/PostExecutionUnitTest.java index 17fea6b50b..4395e0b048 100644 --- a/spring-5-reactive/src/test/java/com/baeldung/stepverifier/PostExecutionUnitTest.java +++ b/spring-5-reactive/src/test/java/com/baeldung/stepverifier/PostExecutionUnitTest.java @@ -28,7 +28,7 @@ public class PostExecutionUnitTest { .expectComplete() .verifyThenAssertThat() .hasDropped(4) - .tookLessThan(Duration.ofMillis(1050)); + .tookLessThan(Duration.ofMillis(1500)); } } diff --git a/spring-activiti/src/test/java/com/baeldung/SpringContextIntegrationTest.java b/spring-activiti/src/test/java/com/baeldung/SpringContextIntegrationTest.java index ce48080753..89411df976 100644 --- a/spring-activiti/src/test/java/com/baeldung/SpringContextIntegrationTest.java +++ b/spring-activiti/src/test/java/com/baeldung/SpringContextIntegrationTest.java @@ -2,6 +2,7 @@ package com.baeldung; import org.junit.Test; import org.junit.runner.RunWith; +import org.springframework.boot.test.autoconfigure.jdbc.AutoConfigureTestDatabase; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.junit4.SpringRunner; @@ -9,6 +10,7 @@ import com.baeldung.activitiwithspring.ActivitiWithSpringApplication; @RunWith(SpringRunner.class) @SpringBootTest(classes = ActivitiWithSpringApplication.class) +@AutoConfigureTestDatabase public class SpringContextIntegrationTest { @Test diff --git a/spring-activiti/src/test/java/com/baeldung/activitiwithspring/ActivitiSpringSecurityIntegrationTest.java b/spring-activiti/src/test/java/com/baeldung/activitiwithspring/ActivitiSpringSecurityIntegrationTest.java index 53bdcee888..7f99483fcf 100644 --- a/spring-activiti/src/test/java/com/baeldung/activitiwithspring/ActivitiSpringSecurityIntegrationTest.java +++ b/spring-activiti/src/test/java/com/baeldung/activitiwithspring/ActivitiSpringSecurityIntegrationTest.java @@ -4,6 +4,7 @@ import org.activiti.engine.IdentityService; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.autoconfigure.jdbc.AutoConfigureTestDatabase; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.security.core.userdetails.UsernameNotFoundException; import org.springframework.test.context.junit4.SpringRunner; @@ -14,6 +15,7 @@ import com.baeldung.activiti.security.withspring.ActivitiSpringSecurityApplicati @RunWith(SpringRunner.class) @SpringBootTest(classes = ActivitiSpringSecurityApplication.class) @WebAppConfiguration +@AutoConfigureTestDatabase public class ActivitiSpringSecurityIntegrationTest { @Autowired private IdentityService identityService; diff --git a/spring-activiti/src/test/java/com/baeldung/activitiwithspring/ActivitiWithSpringApplicationIntegrationTest.java b/spring-activiti/src/test/java/com/baeldung/activitiwithspring/ActivitiWithSpringApplicationIntegrationTest.java index 8c1e400215..d289693a73 100644 --- a/spring-activiti/src/test/java/com/baeldung/activitiwithspring/ActivitiWithSpringApplicationIntegrationTest.java +++ b/spring-activiti/src/test/java/com/baeldung/activitiwithspring/ActivitiWithSpringApplicationIntegrationTest.java @@ -2,11 +2,13 @@ package com.baeldung.activitiwithspring; import org.junit.Test; import org.junit.runner.RunWith; +import org.springframework.boot.test.autoconfigure.jdbc.AutoConfigureTestDatabase; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.junit4.SpringRunner; @RunWith(SpringRunner.class) -@SpringBootTest +@SpringBootTest(classes = ActivitiWithSpringApplication.class) +@AutoConfigureTestDatabase public class ActivitiWithSpringApplicationIntegrationTest { @Test diff --git a/spring-activiti/src/test/java/com/baeldung/activitiwithspring/ProcessEngineCreationIntegrationTest.java b/spring-activiti/src/test/java/com/baeldung/activitiwithspring/ProcessEngineCreationIntegrationTest.java index 00538f8e6e..5052f84d6a 100644 --- a/spring-activiti/src/test/java/com/baeldung/activitiwithspring/ProcessEngineCreationIntegrationTest.java +++ b/spring-activiti/src/test/java/com/baeldung/activitiwithspring/ProcessEngineCreationIntegrationTest.java @@ -4,7 +4,7 @@ import org.activiti.engine.ProcessEngine; import org.activiti.engine.ProcessEngineConfiguration; import org.activiti.engine.ProcessEngines; import org.junit.Test; - +import org.junit.After; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; @@ -62,4 +62,9 @@ public class ProcessEngineCreationIntegrationTest { assertNotNull(processEngine); assertEquals("sa", processEngine.getProcessEngineConfiguration().getJdbcUsername()); } + + @After + public void cleanup() { + ProcessEngines.destroy(); + } } diff --git a/spring-boot-parent/pom.xml b/spring-boot-parent/pom.xml new file mode 100644 index 0000000000..0924917505 --- /dev/null +++ b/spring-boot-parent/pom.xml @@ -0,0 +1,26 @@ + + + + 4.0.0 + com.baeldung + spring-boot-parent + 1.0.0-SNAPSHOT + spring-boot-parent + spring-boot-parent + pom + + + com.baeldung + parent-modules + 1.0.0-SNAPSHOT + .. + + + + spring-boot-with-starter-parent + spring-boot-with-custom-parent + + + diff --git a/spring-boot-parent/spring-boot-with-custom-parent/pom.xml b/spring-boot-parent/spring-boot-with-custom-parent/pom.xml new file mode 100644 index 0000000000..de2946fbb2 --- /dev/null +++ b/spring-boot-parent/spring-boot-with-custom-parent/pom.xml @@ -0,0 +1,41 @@ + + + + 4.0.0 + spring-boot-with-custom-parent + 1.0.0-SNAPSHOT + spring-boot-with-custom-parent + + + com.baeldung + spring-boot-parent + 1.0.0-SNAPSHOT + + + + + + org.springframework.boot + spring-boot-dependencies + ${spring-boot.version} + pom + import + + + + + + + org.springframework.boot + spring-boot-starter-web + + + + + 1.8 + 2.1.5.RELEASE + + + diff --git a/spring-boot-parent/spring-boot-with-custom-parent/src/main/java/com/baeldung/customparent/SpringBootStarterCustomParentApplication.java b/spring-boot-parent/spring-boot-with-custom-parent/src/main/java/com/baeldung/customparent/SpringBootStarterCustomParentApplication.java new file mode 100644 index 0000000000..169717d7bb --- /dev/null +++ b/spring-boot-parent/spring-boot-with-custom-parent/src/main/java/com/baeldung/customparent/SpringBootStarterCustomParentApplication.java @@ -0,0 +1,13 @@ +package com.baeldung.customparent; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; + +@SpringBootApplication +public class SpringBootStarterCustomParentApplication { + + public static void main(String[] args) { + SpringApplication.run(SpringBootStarterCustomParentApplication.class, args); + System.out.println("Spring boot application running without starter parent"); + } +} diff --git a/spring-boot-parent/spring-boot-with-starter-parent/pom.xml b/spring-boot-parent/spring-boot-with-starter-parent/pom.xml new file mode 100644 index 0000000000..1c6479ca60 --- /dev/null +++ b/spring-boot-parent/spring-boot-with-starter-parent/pom.xml @@ -0,0 +1,45 @@ + + + + 4.0.0 + com.baeldung + spring-boot-with-starter-parent + 1.0.0-SNAPSHOT + spring-boot-with-starter-parent + + + org.springframework.boot + spring-boot-starter-parent + 2.1.5.RELEASE + + + + + + + org.springframework.boot + spring-boot-starter-data-jpa + 2.1.1.RELEASE + + + + + + + org.springframework.boot + spring-boot-starter-web + + + junit + junit + + + + + 1.8 + 4.11 + + + diff --git a/spring-boot-parent/spring-boot-with-starter-parent/src/main/java/com/baeldung/starterparent/SpringBootStarterParentApplication.java b/spring-boot-parent/spring-boot-with-starter-parent/src/main/java/com/baeldung/starterparent/SpringBootStarterParentApplication.java new file mode 100644 index 0000000000..f987165ce0 --- /dev/null +++ b/spring-boot-parent/spring-boot-with-starter-parent/src/main/java/com/baeldung/starterparent/SpringBootStarterParentApplication.java @@ -0,0 +1,14 @@ +package com.baeldung.starterparent; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; + +@SpringBootApplication +public class SpringBootStarterParentApplication { + + public static void main(String[] args) { + SpringApplication.run(SpringBootStarterParentApplication.class, args); + System.out.println("Spring boot application running with starter parent"); + } + +} diff --git a/spring-core/pom.xml b/spring-core/pom.xml index 814addecdd..75e9fd7131 100644 --- a/spring-core/pom.xml +++ b/spring-core/pom.xml @@ -56,6 +56,12 @@ spring-boot-test ${mockito.spring.boot.version} test + + + org.assertj + assertj-core + ${assertj.version} + test commons-io @@ -85,6 +91,7 @@ 2.5 1.5.2.RELEASE 1.10.19 + 3.12.2 \ No newline at end of file diff --git a/spring-core/src/main/resources/constructordi.xml b/spring-core/src/main/resources/constructordi.xml index 231e72adcb..983a00a80f 100644 --- a/spring-core/src/main/resources/constructordi.xml +++ b/spring-core/src/main/resources/constructordi.xml @@ -1,19 +1,21 @@ + https://www.springframework.org/schema/beans/spring-beans.xsd"> - + - + diff --git a/spring-core/src/test/java/com/baeldung/constructordi/ConstructorDependencyInjectionIntegrationTest.java b/spring-core/src/test/java/com/baeldung/constructordi/ConstructorDependencyInjectionIntegrationTest.java new file mode 100644 index 0000000000..7bd0ad0c86 --- /dev/null +++ b/spring-core/src/test/java/com/baeldung/constructordi/ConstructorDependencyInjectionIntegrationTest.java @@ -0,0 +1,33 @@ +package com.baeldung.constructordi; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.support.ClassPathXmlApplicationContext; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.support.AnnotationConfigContextLoader; + +import com.baeldung.constructordi.domain.Car; + +@RunWith(SpringJUnit4ClassRunner.class) +@ContextConfiguration(loader = AnnotationConfigContextLoader.class, classes = Config.class) +public class ConstructorDependencyInjectionIntegrationTest { + + @Test + public void givenPrototypeInjection_WhenObjectFactory_ThenNewInstanceReturn() { + ApplicationContext context = new AnnotationConfigApplicationContext(Config.class); + Car firstContextCar = context.getBean(Car.class); + + ApplicationContext xmlContext = new ClassPathXmlApplicationContext("constructordi.xml"); + Car secondContextCar = xmlContext.getBean(Car.class); + + assertThat(firstContextCar).isNotSameAs(secondContextCar); + assertThat(firstContextCar).hasToString("Engine: v8 5 Transmission: sliding"); + assertThat(secondContextCar).hasToString("Engine: v4 2 Transmission: sliding"); + } + +} diff --git a/spring-resttemplate/src/test/java/org/baeldung/resttemplate/LargeFileDownloadIntegrationTest.java b/spring-resttemplate/src/test/java/org/baeldung/resttemplate/LargeFileDownloadIntegrationTest.java new file mode 100644 index 0000000000..21639818db --- /dev/null +++ b/spring-resttemplate/src/test/java/org/baeldung/resttemplate/LargeFileDownloadIntegrationTest.java @@ -0,0 +1,109 @@ +package org.baeldung.resttemplate; + +import org.assertj.core.api.Assertions; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.springframework.core.io.FileSystemResource; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpMethod; +import org.springframework.http.converter.HttpMessageConverter; +import org.springframework.util.StreamUtils; +import org.springframework.web.client.RestTemplate; + +import java.io.File; +import java.io.FileOutputStream; +import java.util.ArrayList; +import java.util.List; + +public class LargeFileDownloadIntegrationTest { + + static String FILE_URL = "http://ovh.net/files/1Mio.dat"; + + RestTemplate restTemplate; + + @Before + public void setUp() { + restTemplate = new RestTemplate(); + } + + @Test + public void givenResumableUrl_whenUrlCalledByHeadOption_thenExpectHeadersAvailable() { + HttpHeaders headers = restTemplate.headForHeaders(FILE_URL); + Assertions + .assertThat(headers.get("Accept-Ranges")) + .contains("bytes"); + Assertions + .assertThat(headers.getContentLength()) + .isGreaterThan(0); + } + + @Test + public void givenResumableUrl_whenDownloadCompletely_thenExpectCorrectFileSize() { + HttpHeaders headers = restTemplate.headForHeaders(FILE_URL); + long contentLength = headers.getContentLength(); + File file = restTemplate.execute(FILE_URL, HttpMethod.GET, null, clientHttpResponse -> { + File ret = File.createTempFile("download", "tmp"); + StreamUtils.copy(clientHttpResponse.getBody(), new FileOutputStream(ret)); + return ret; + }); + + Assert.assertNotNull(file); + Assertions + .assertThat(file.length()) + .isEqualTo(contentLength); + } + + @Test + public void givenResumableUrl_whenDownloadRange_thenExpectFileSizeEqualOrLessThanRange() { + int range = 10; + File file = restTemplate.execute(FILE_URL, HttpMethod.GET, clientHttpRequest -> clientHttpRequest + .getHeaders() + .set("Range", String.format("bytes=0-%d", range - 1)), clientHttpResponse -> { + File ret = File.createTempFile("download", "tmp"); + StreamUtils.copy(clientHttpResponse.getBody(), new FileOutputStream(ret)); + return ret; + }); + + Assert.assertNotNull(file); + Assertions + .assertThat(file.length()) + .isLessThanOrEqualTo(range); + } + + @Test + public void givenResumableUrl_whenPauseDownloadAndResume_thenExpectCorrectFileSize() { + + int range = 10; + + HttpHeaders headers = restTemplate.headForHeaders(FILE_URL); + long contentLength = headers.getContentLength(); + + File file = restTemplate.execute(FILE_URL, HttpMethod.GET, clientHttpRequest -> clientHttpRequest + .getHeaders() + .set("Range", String.format("bytes=0-%d", range - 1)), clientHttpResponse -> { + File ret = File.createTempFile("download", "tmp"); + StreamUtils.copy(clientHttpResponse.getBody(), new FileOutputStream(ret)); + return ret; + }); + + Assert.assertNotNull(file); + + Assertions + .assertThat(file.length()) + .isLessThanOrEqualTo(range); + + restTemplate.execute(FILE_URL, HttpMethod.GET, clientHttpRequest -> clientHttpRequest + .getHeaders() + .set("Range", String.format("bytes=%d-%d", file.length(), contentLength)), clientHttpResponse -> { + StreamUtils.copy(clientHttpResponse.getBody(), new FileOutputStream(file, true)); + return file; + }); + + Assertions + .assertThat(file.length()) + .isEqualTo(contentLength); + + } + +} diff --git a/spring-security-rest-custom/src/main/java/org/baeldung/config/child/MethodSecurityConfig.java b/spring-security-rest-custom/src/main/java/org/baeldung/config/child/MethodSecurityConfig.java new file mode 100644 index 0000000000..bc9a9f161b --- /dev/null +++ b/spring-security-rest-custom/src/main/java/org/baeldung/config/child/MethodSecurityConfig.java @@ -0,0 +1,37 @@ +package org.baeldung.config.child; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.security.access.intercept.RunAsImplAuthenticationProvider; +import org.springframework.security.access.intercept.RunAsManager; +import org.springframework.security.access.intercept.RunAsManagerImpl; +import org.springframework.security.authentication.AuthenticationProvider; +import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder; +import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity; +import org.springframework.security.config.annotation.method.configuration.GlobalMethodSecurityConfiguration; + + +@Configuration +@EnableGlobalMethodSecurity(securedEnabled = true) +public class MethodSecurityConfig extends GlobalMethodSecurityConfiguration { + + @Override + protected RunAsManager runAsManager() { + RunAsManagerImpl runAsManager = new RunAsManagerImpl(); + runAsManager.setKey("MyRunAsKey"); + return runAsManager; + } + + @Autowired + public void configureGlobal(AuthenticationManagerBuilder auth) throws Exception { + auth.authenticationProvider(runAsAuthenticationProvider()); + } + + @Bean + public AuthenticationProvider runAsAuthenticationProvider() { + RunAsImplAuthenticationProvider authProvider = new RunAsImplAuthenticationProvider(); + authProvider.setKey("MyRunAsKey"); + return authProvider; + } +} \ No newline at end of file diff --git a/spring-security-rest-custom/src/main/java/org/baeldung/service/RunAsService.java b/spring-security-rest-custom/src/main/java/org/baeldung/service/RunAsService.java new file mode 100644 index 0000000000..a6320f8b81 --- /dev/null +++ b/spring-security-rest-custom/src/main/java/org/baeldung/service/RunAsService.java @@ -0,0 +1,17 @@ +package org.baeldung.service; + +import org.springframework.security.access.annotation.Secured; +import org.springframework.security.core.Authentication; +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.stereotype.Service; + +@Service +public class RunAsService { + + @Secured({ "ROLE_RUN_AS_REPORTER" }) + public Authentication getCurrentUser() { + Authentication authentication = + SecurityContextHolder.getContext().getAuthentication(); + return authentication; + } +} \ No newline at end of file diff --git a/spring-security-rest-custom/src/main/java/org/baeldung/web/controller/RunAsController.java b/spring-security-rest-custom/src/main/java/org/baeldung/web/controller/RunAsController.java new file mode 100644 index 0000000000..08f39aa5f2 --- /dev/null +++ b/spring-security-rest-custom/src/main/java/org/baeldung/web/controller/RunAsController.java @@ -0,0 +1,23 @@ +package org.baeldung.web.controller; + +import org.springframework.security.access.annotation.Secured; +import org.springframework.security.core.Authentication; +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.stereotype.Controller; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.ResponseBody; + + +@Controller +@RequestMapping("/runas") +public class RunAsController { + + @Secured({ "ROLE_USER", "RUN_AS_REPORTER" }) + @RequestMapping + @ResponseBody + public String tryRunAs() { + Authentication auth = SecurityContextHolder.getContext().getAuthentication(); + return "Current User Authorities inside this RunAS method only " + + auth.getAuthorities().toString(); + } +} diff --git a/spring-security-rest-custom/src/main/java/org/baeldung/web/controller/ViewController.java b/spring-security-rest-custom/src/main/java/org/baeldung/web/controller/ViewController.java index 83c0292d98..fbcb9b979e 100644 --- a/spring-security-rest-custom/src/main/java/org/baeldung/web/controller/ViewController.java +++ b/spring-security-rest-custom/src/main/java/org/baeldung/web/controller/ViewController.java @@ -10,4 +10,9 @@ public class ViewController { public String index() { return "index"; } + + @RequestMapping({ "/runashome" }) + public String run() { + return "runas"; + } } diff --git a/spring-security-rest-custom/src/main/webapp/WEB-INF/templates/runas.html b/spring-security-rest-custom/src/main/webapp/WEB-INF/templates/runas.html new file mode 100644 index 0000000000..c7c3b2e0e4 --- /dev/null +++ b/spring-security-rest-custom/src/main/webapp/WEB-INF/templates/runas.html @@ -0,0 +1,23 @@ + + + + Current user authorities: + user +
+ + Generate Report As Super User + + + + + \ No newline at end of file diff --git a/testing-modules/junit-5-advanced/pom.xml b/testing-modules/junit-5-advanced/pom.xml new file mode 100644 index 0000000000..f65f7e2a2f --- /dev/null +++ b/testing-modules/junit-5-advanced/pom.xml @@ -0,0 +1,32 @@ + + + 4.0.0 + junit-5-advanced + 1.0-SNAPSHOT + junit-5-advanced + Advanced JUnit 5 Topics + + + com.baeldung + parent-modules + 1.0.0-SNAPSHOT + ../../ + + + + + org.junit.jupiter + junit-jupiter + ${junit-jupiter.version} + test + + + + + 5.4.2 + 2.21.0 + + + diff --git a/testing-modules/junit-5-advanced/src/test/java/com/baeldung/displayname/DisplayNameGeneratorUnitTest.java b/testing-modules/junit-5-advanced/src/test/java/com/baeldung/displayname/DisplayNameGeneratorUnitTest.java new file mode 100644 index 0000000000..311539f760 --- /dev/null +++ b/testing-modules/junit-5-advanced/src/test/java/com/baeldung/displayname/DisplayNameGeneratorUnitTest.java @@ -0,0 +1,87 @@ +package com.baeldung.displayname; + +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +import java.lang.reflect.Method; + +@DisplayNameGeneration(DisplayNameGeneratorUnitTest.ReplaceCamelCase.class) +class DisplayNameGeneratorUnitTest { + + @Test + void camelCaseName() { + } + + @Nested + @DisplayNameGeneration(DisplayNameGeneratorUnitTest.IndicativeSentences.class) + class ANumberIsFizz { + @Test + void ifItIsDivisibleByThree() { + } + + @ParameterizedTest(name = "Number {0} is fizz.") + @ValueSource(ints = { 3, 12, 18 }) + void ifItIsOneOfTheFollowingNumbers(int number) { + } + } + + @Nested + @DisplayNameGeneration(DisplayNameGeneratorUnitTest.IndicativeSentences.class) + class ANumberIsBuzz { + @Test + void ifItIsDivisibleByFive() { + } + + @ParameterizedTest(name = "Number {0} is buzz.") + @ValueSource(ints = { 5, 10, 20 }) + void ifItIsOneOfTheFollowingNumbers(int number) { + } + } + + static class IndicativeSentences extends ReplaceCamelCase { + @Override + public String generateDisplayNameForNestedClass(Class nestedClass) { + return super.generateDisplayNameForNestedClass(nestedClass) + "..."; + } + + @Override + public String generateDisplayNameForMethod(Class testClass, Method testMethod) { + return replaceCamelCase(testClass.getSimpleName() + " " + testMethod.getName()) + "."; + } + } + + static class ReplaceCamelCase extends DisplayNameGenerator.Standard { + @Override + public String generateDisplayNameForClass(Class testClass) { + return replaceCamelCase(super.generateDisplayNameForClass(testClass)); + } + + @Override + public String generateDisplayNameForNestedClass(Class nestedClass) { + return replaceCamelCase(super.generateDisplayNameForNestedClass(nestedClass)); + } + + @Override + public String generateDisplayNameForMethod(Class testClass, Method testMethod) { + return this.replaceCamelCase(testMethod.getName()) + DisplayNameGenerator.parameterTypesAsString(testMethod); + } + + String replaceCamelCase(String camelCase) { + StringBuilder result = new StringBuilder(); + result.append(camelCase.charAt(0)); + for (int i=1; i - - org.junit.platform - junit-platform-engine - ${junit.platform.version} - - - org.junit.jupiter - junit-jupiter-params - ${junit.jupiter.version} - org.junit.platform junit-platform-runner - ${junit.platform.version} + ${junit-platform.version} test @@ -44,6 +34,24 @@ ${junit.vintage.version} test + + org.junit.jupiter + junit-jupiter-engine + ${junit-jupiter.version} + test + + + org.junit.jupiter + junit-jupiter-params + ${junit-jupiter.version} + test + + + org.junit.jupiter + junit-jupiter-api + ${junit-jupiter.version} + test + com.h2database h2 @@ -92,13 +100,7 @@ maven-surefire-plugin ${maven-surefire-plugin.version} - - - org.junit.platform - junit-platform-surefire-provider - ${junit.platform.version} - - + **/*IntegrationTest.java @@ -148,9 +150,9 @@ - 5.4.2 - 1.2.0 - 5.2.0 + 5.4.2 + 1.2.0 + 5.4.2 1.4.196 5.0.6.RELEASE 2.21.0 diff --git a/testing-modules/junit-5-basics/src/test/java/com/baeldung/extensions/tempdir/SharedTemporaryDirectoryUnitTest.java b/testing-modules/junit-5-basics/src/test/java/com/baeldung/extensions/tempdir/SharedTemporaryDirectoryUnitTest.java new file mode 100644 index 0000000000..9483a33143 --- /dev/null +++ b/testing-modules/junit-5-basics/src/test/java/com/baeldung/extensions/tempdir/SharedTemporaryDirectoryUnitTest.java @@ -0,0 +1,49 @@ +package com.baeldung.extensions.tempdir; + +import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertAll; +import static org.junit.jupiter.api.Assertions.assertLinesMatch; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Arrays; +import java.util.List; + +import org.junit.jupiter.api.Order; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; +import org.junit.jupiter.api.io.TempDir; + +import org.junit.jupiter.api.MethodOrderer.OrderAnnotation; + +@TestMethodOrder(OrderAnnotation.class) +class SharedTemporaryDirectoryUnitTest { + + @TempDir + static Path sharedTempDir; + + @Test + @Order(1) + void givenFieldWithSharedTempDirectoryPath_whenWriteToFile_thenContentIsCorrect() throws IOException { + Path numbers = sharedTempDir.resolve("numbers.txt"); + + List lines = Arrays.asList("1", "2", "3"); + Files.write(numbers, lines); + + assertAll( + () -> assertTrue("File should exist", Files.exists(numbers)), + () -> assertLinesMatch(lines, Files.readAllLines(numbers))); + + Files.createTempDirectory("bpb"); + } + + @Test + @Order(2) + void givenAlreadyWrittenToSharedFile_whenCheckContents_thenContentIsCorrect() throws IOException { + Path numbers = sharedTempDir.resolve("numbers.txt"); + + assertLinesMatch(Arrays.asList("1", "2", "3"), Files.readAllLines(numbers)); + } + +} diff --git a/testing-modules/junit-5-basics/src/test/java/com/baeldung/extensions/tempdir/TemporaryDirectoryUnitTest.java b/testing-modules/junit-5-basics/src/test/java/com/baeldung/extensions/tempdir/TemporaryDirectoryUnitTest.java new file mode 100644 index 0000000000..c6f1a7cd77 --- /dev/null +++ b/testing-modules/junit-5-basics/src/test/java/com/baeldung/extensions/tempdir/TemporaryDirectoryUnitTest.java @@ -0,0 +1,48 @@ +package com.baeldung.extensions.tempdir; + +import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertAll; +import static org.junit.jupiter.api.Assertions.assertLinesMatch; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Arrays; +import java.util.List; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +class TemporaryDirectoryUnitTest { + + @Test + void givenTestMethodWithTempDirectoryPath_whenWriteToFile_thenContentIsCorrect(@TempDir Path tempDir) throws IOException { + Path numbers = tempDir.resolve("numbers.txt"); + + List lines = Arrays.asList("1", "2", "3"); + Files.write(numbers, lines); + + assertAll( + () -> assertTrue("File should exist", Files.exists(numbers)), + () -> assertLinesMatch(lines, Files.readAllLines(numbers))); + } + + @TempDir + File anotherTempDir; + + @Test + void givenFieldWithTempDirectoryFile_whenWriteToFile_thenContentIsCorrect() throws IOException { + assertTrue("Should be a directory ", this.anotherTempDir.isDirectory()); + + File letters = new File(anotherTempDir, "letters.txt"); + List lines = Arrays.asList("x", "y", "z"); + + Files.write(letters.toPath(), lines); + + assertAll( + () -> assertTrue("File should exist", Files.exists(letters.toPath())), + () -> assertLinesMatch(lines, Files.readAllLines(letters.toPath()))); + } + +} diff --git a/testing-modules/pom.xml b/testing-modules/pom.xml index 90ec857b7f..95a19c2557 100644 --- a/testing-modules/pom.xml +++ b/testing-modules/pom.xml @@ -33,5 +33,6 @@ testing testng junit-5-basics + junit-5-advanced