Merge branch 'eugenp:master' into master

This commit is contained in:
edizor 2023-01-06 07:26:13 +08:00 committed by GitHub
commit a9d69a050b
81 changed files with 2515 additions and 194 deletions

View File

@ -0,0 +1,29 @@
plugins {
id 'java'
}
group = "com.baeldung.gradle"
version = "1.0.0-SNAPSHOT"
sourceCompatibility = JavaVersion.VERSION_17
repositories {
mavenLocal()
mavenCentral()
maven {
name = "GitHubPackages"
url = "https://maven.pkg.github.com/eugenp/tutorials"
credentials {
username = project.USERNAME
password = project.GITHUB_TOKEN
}
}
}
dependencies {
implementation('com.baeldung.gradle:publish-package:1.0.0-SNAPSHOT')
testImplementation("org.junit.jupiter:junit-jupiter-engine:5.9.0")
}
tasks.named('test') {
useJUnitPlatform()
}

View File

@ -0,0 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.6-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

View File

@ -0,0 +1,234 @@
#!/bin/sh
#
# Copyright © 2015-2021 the original authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
#
# Gradle start up script for POSIX generated by Gradle.
#
# Important for running:
#
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
# noncompliant, but you have some other compliant shell such as ksh or
# bash, then to run this script, type that shell name before the whole
# command line, like:
#
# ksh Gradle
#
# Busybox and similar reduced shells will NOT work, because this script
# requires all of these POSIX shell features:
# * functions;
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
# * compound commands having a testable exit status, especially «case»;
# * various built-in commands including «command», «set», and «ulimit».
#
# Important for patching:
#
# (2) This script targets any POSIX shell, so it avoids extensions provided
# by Bash, Ksh, etc; in particular arrays are avoided.
#
# The "traditional" practice of packing multiple parameters into a
# space-separated string is a well documented source of bugs and security
# problems, so this is (mostly) avoided, by progressively accumulating
# options in "$@", and eventually passing that to Java.
#
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
# see the in-line comments for details.
#
# There are tweaks for specific operating systems such as AIX, CygWin,
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
#
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
app_path=$0
# Need this for daisy-chained symlinks.
while
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
[ -h "$app_path" ]
do
ls=$( ls -ld "$app_path" )
link=${ls#*' -> '}
case $link in #(
/*) app_path=$link ;; #(
*) app_path=$APP_HOME$link ;;
esac
done
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
APP_NAME="Gradle"
APP_BASE_NAME=${0##*/}
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
warn () {
echo "$*"
} >&2
die () {
echo
echo "$*"
echo
exit 1
} >&2
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "$( uname )" in #(
CYGWIN* ) cygwin=true ;; #(
Darwin* ) darwin=true ;; #(
MSYS* | MINGW* ) msys=true ;; #(
NONSTOP* ) nonstop=true ;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD=$JAVA_HOME/jre/sh/java
else
JAVACMD=$JAVA_HOME/bin/java
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD=java
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
case $MAX_FD in #(
'' | soft) :;; #(
*)
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
fi
# Collect all arguments for the java command, stacking in reverse order:
# * args from the command line
# * the main class name
# * -classpath
# * -D...appname settings
# * --module-path (only if needed)
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
# For Cygwin or MSYS, switch paths to Windows format before running java
if "$cygwin" || "$msys" ; then
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
JAVACMD=$( cygpath --unix "$JAVACMD" )
# Now convert the arguments - kludge to limit ourselves to /bin/sh
for arg do
if
case $arg in #(
-*) false ;; # don't mess with options #(
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
[ -e "$t" ] ;; #(
*) false ;;
esac
then
arg=$( cygpath --path --ignore --mixed "$arg" )
fi
# Roll the args list around exactly as many times as the number of
# args, so each arg winds up back in the position where it started, but
# possibly modified.
#
# NB: a `for` loop captures its iteration list before it begins, so
# changing the positional parameters here affects neither the number of
# iterations, nor the values presented in `arg`.
shift # remove old arg
set -- "$@" "$arg" # push replacement arg
done
fi
# Collect all arguments for the java command;
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
# shell script including quotes and variable substitutions, so put them in
# double quotes to make sure that they get re-expanded; and
# * put everything else in single quotes, so that it's not re-expanded.
set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \
-classpath "$CLASSPATH" \
org.gradle.wrapper.GradleWrapperMain \
"$@"
# Use "xargs" to parse quoted args.
#
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
#
# In Bash we could simply go:
#
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
# set -- "${ARGS[@]}" "$@"
#
# but POSIX shell has neither arrays nor command substitution, so instead we
# post-process each arg (as a line of input to sed) to backslash-escape any
# character that might be a shell metacharacter, then use eval to reverse
# that process (while maintaining the separation between arguments), and wrap
# the whole thing up as a single "set" statement.
#
# This will of course break if any of these variables contains a newline or
# an unmatched quote.
#
eval "set -- $(
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
xargs -n1 |
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
tr '\n' ' '
)" '"$@"'
exec "$JAVACMD" "$@"

View File

@ -0,0 +1,89 @@
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@ -0,0 +1 @@
rootProject.name = "multiple-repositories"

View File

@ -0,0 +1,26 @@
package com.baeldung.gradle.multiplerepositories;
import com.baeldung.gradle.publishPackage.User;
public class Student extends User {
private String studentCode;
private String lastInstitution;
public String getStudentCode() {
return studentCode;
}
public void setStudentCode(String studentCode) {
this.studentCode = studentCode;
}
public String getLastInstitution() {
return lastInstitution;
}
public void setLastInstitution(String lastInstitution) {
this.lastInstitution = lastInstitution;
}
}

View File

@ -0,0 +1,19 @@
package com.baeldung.gradle.multiplerepositories;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class MultipleRepositoryTest {
@Test
public void testPublishedPackage() {
Student student = new Student();
student.setId(1);
student.setStudentCode("CD-875");
student.setName("John Doe");
student.setLastInstitution("Institute of Technology");
assertEquals("John Doe", student.getName());
}
}

View File

@ -0,0 +1,33 @@
plugins {
id "maven-publish"
id "java"
}
group = "com.baeldung.gradle"
version = "1.0.0-SNAPSHOT"
repositories {
mavenLocal()
mavenCentral()
}
publishing {
publications {
register("jar", MavenPublication) {
from(components["java"])
pom {
url.set("https://github.com/eugenp/tutorials.git")
}
}
}
repositories {
maven {
name = "GitHubPackages"
url = "https://maven.pkg.github.com/eugenp/tutorials"
credentials {
username = project.USERNAME
password = project.GITHUB_TOKEN
}
}
}
}

View File

@ -0,0 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.6-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

View File

@ -0,0 +1,234 @@
#!/bin/sh
#
# Copyright © 2015-2021 the original authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
#
# Gradle start up script for POSIX generated by Gradle.
#
# Important for running:
#
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
# noncompliant, but you have some other compliant shell such as ksh or
# bash, then to run this script, type that shell name before the whole
# command line, like:
#
# ksh Gradle
#
# Busybox and similar reduced shells will NOT work, because this script
# requires all of these POSIX shell features:
# * functions;
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
# * compound commands having a testable exit status, especially «case»;
# * various built-in commands including «command», «set», and «ulimit».
#
# Important for patching:
#
# (2) This script targets any POSIX shell, so it avoids extensions provided
# by Bash, Ksh, etc; in particular arrays are avoided.
#
# The "traditional" practice of packing multiple parameters into a
# space-separated string is a well documented source of bugs and security
# problems, so this is (mostly) avoided, by progressively accumulating
# options in "$@", and eventually passing that to Java.
#
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
# see the in-line comments for details.
#
# There are tweaks for specific operating systems such as AIX, CygWin,
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
#
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
app_path=$0
# Need this for daisy-chained symlinks.
while
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
[ -h "$app_path" ]
do
ls=$( ls -ld "$app_path" )
link=${ls#*' -> '}
case $link in #(
/*) app_path=$link ;; #(
*) app_path=$APP_HOME$link ;;
esac
done
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
APP_NAME="Gradle"
APP_BASE_NAME=${0##*/}
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
warn () {
echo "$*"
} >&2
die () {
echo
echo "$*"
echo
exit 1
} >&2
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "$( uname )" in #(
CYGWIN* ) cygwin=true ;; #(
Darwin* ) darwin=true ;; #(
MSYS* | MINGW* ) msys=true ;; #(
NONSTOP* ) nonstop=true ;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD=$JAVA_HOME/jre/sh/java
else
JAVACMD=$JAVA_HOME/bin/java
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD=java
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
case $MAX_FD in #(
'' | soft) :;; #(
*)
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
fi
# Collect all arguments for the java command, stacking in reverse order:
# * args from the command line
# * the main class name
# * -classpath
# * -D...appname settings
# * --module-path (only if needed)
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
# For Cygwin or MSYS, switch paths to Windows format before running java
if "$cygwin" || "$msys" ; then
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
JAVACMD=$( cygpath --unix "$JAVACMD" )
# Now convert the arguments - kludge to limit ourselves to /bin/sh
for arg do
if
case $arg in #(
-*) false ;; # don't mess with options #(
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
[ -e "$t" ] ;; #(
*) false ;;
esac
then
arg=$( cygpath --path --ignore --mixed "$arg" )
fi
# Roll the args list around exactly as many times as the number of
# args, so each arg winds up back in the position where it started, but
# possibly modified.
#
# NB: a `for` loop captures its iteration list before it begins, so
# changing the positional parameters here affects neither the number of
# iterations, nor the values presented in `arg`.
shift # remove old arg
set -- "$@" "$arg" # push replacement arg
done
fi
# Collect all arguments for the java command;
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
# shell script including quotes and variable substitutions, so put them in
# double quotes to make sure that they get re-expanded; and
# * put everything else in single quotes, so that it's not re-expanded.
set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \
-classpath "$CLASSPATH" \
org.gradle.wrapper.GradleWrapperMain \
"$@"
# Use "xargs" to parse quoted args.
#
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
#
# In Bash we could simply go:
#
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
# set -- "${ARGS[@]}" "$@"
#
# but POSIX shell has neither arrays nor command substitution, so instead we
# post-process each arg (as a line of input to sed) to backslash-escape any
# character that might be a shell metacharacter, then use eval to reverse
# that process (while maintaining the separation between arguments), and wrap
# the whole thing up as a single "set" statement.
#
# This will of course break if any of these variables contains a newline or
# an unmatched quote.
#
eval "set -- $(
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
xargs -n1 |
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
tr '\n' ' '
)" '"$@"'
exec "$JAVACMD" "$@"

View File

@ -0,0 +1,89 @@
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@ -0,0 +1 @@
rootProject.name = "publish-package"

View File

@ -0,0 +1,35 @@
package com.baeldung.gradle.publish_package;
import java.util.Date;
public class User {
private Integer id;
private String name;
private Date dob;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Date getDob() {
return dob;
}
public void setDob(Date dob) {
this.dob = dob;
}
}

View File

@ -0,0 +1,11 @@
pipeline {
agent any
stages {
stage('Build') {
steps {
dir('/var/jenkins_home/workspace/SamplePipeline/scripts') {
}
}
}
}
}

View File

@ -0,0 +1,11 @@
pipeline {
agent any
stages {
stage('Build') {
steps {
dir('scripts') {
}
}
}
}
}

View File

@ -0,0 +1,10 @@
pipeline {
agent any
stages {
stage('Build') {
steps {
sh 'cd scripts'
}
}
}
}

View File

@ -0,0 +1,10 @@
pipeline {
agent any
stages {
stage('Build') {
steps {
mvn 'clean install'
}
}
}
}

View File

@ -72,6 +72,11 @@
<version>${jersey.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.connectors</groupId>
<artifactId>jersey-apache-connector</artifactId>
<version>${jersey.version}</version>
</dependency>
</dependencies>
<build>
@ -95,7 +100,7 @@
</build>
<properties>
<jersey.version>2.26</jersey.version>
<jersey.version>2.38</jersey.version>
</properties>
</project>

View File

@ -1,6 +1,8 @@
package com.baeldung.jersey.client;
import com.baeldung.jersey.client.filter.AddHeaderOnRequestFilter;
import org.glassfish.jersey.apache.connector.ApacheConnectorProvider;
import org.glassfish.jersey.client.ClientConfig;
import org.glassfish.jersey.client.authentication.HttpAuthenticationFeature;
import org.glassfish.jersey.client.oauth1.AccessToken;
import org.glassfish.jersey.client.oauth1.ConsumerCredentials;
@ -155,7 +157,8 @@ public class JerseyClientHeaders {
}
public static Response sendRestrictedHeaderThroughDefaultTransportConnector(String headerKey, String headerValue) {
Client client = ClientBuilder.newClient();
ClientConfig clientConfig = new ClientConfig().connectorProvider(new ApacheConnectorProvider());
Client client = ClientBuilder.newClient(clientConfig);
System.setProperty("sun.net.http.allowRestrictedHeaders", "true");
return client.target(TARGET)

View File

@ -2,10 +2,8 @@ package com.baeldung.jersey.server;
import com.baeldung.jersey.client.JerseyClientHeaders;
import com.baeldung.jersey.client.filter.AddHeaderOnRequestFilter;
import org.glassfish.jersey.media.sse.SseFeature;
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.test.JerseyTest;
import org.junit.Ignore;
import org.junit.Test;
import javax.ws.rs.core.Application;
@ -17,7 +15,6 @@ import java.util.Map;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
@Ignore
public class EchoHeadersUnitTest extends JerseyTest {
private static final String SIMPLE_HEADER_KEY = "my-header-key";

View File

@ -24,6 +24,10 @@
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-artemis</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-cache</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jpa</artifactId>

View File

@ -0,0 +1,16 @@
package com.baeldung.tasksservice;
import java.util.List;
import org.springframework.boot.autoconfigure.cache.CacheManagerCustomizer;
import org.springframework.cache.concurrent.ConcurrentMapCacheManager;
import org.springframework.stereotype.Component;
@Component
public class SimpleCacheCustomizer implements CacheManagerCustomizer<ConcurrentMapCacheManager> {
@Override
public void customize(ConcurrentMapCacheManager cacheManager) {
cacheManager.setCacheNames(List.of("tasks"));
}
}

View File

@ -2,8 +2,10 @@ package com.baeldung.tasksservice;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cache.annotation.EnableCaching;
@SpringBootApplication
@EnableCaching
public class TasksServiceApplication {
public static void main(String[] args) {

View File

@ -19,6 +19,7 @@ import java.util.UUID;
import javax.transaction.Transactional;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.stereotype.Service;
import com.baeldung.tasksservice.adapters.repository.TaskRecord;
@ -29,6 +30,7 @@ public class TasksService {
@Autowired
private TasksRepository tasksRepository;
@Cacheable("tasks")
public TaskRecord getTaskById(String id) {
return tasksRepository.findById(id)
.orElseThrow(() -> new UnknownTaskException(id));

View File

@ -24,6 +24,11 @@
<artifactId>logback-classic</artifactId>
<version>${logback.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-core</artifactId>
<version>${logback.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback.contrib</groupId>
<artifactId>logback-json-classic</artifactId>
@ -34,6 +39,11 @@
<artifactId>logback-jackson</artifactId>
<version>${logback.contrib.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
@ -55,13 +65,13 @@
</exclusions>
</dependency>
<dependency>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
<version>${javax.mail.version}</version>
<groupId>com.sun.mail</groupId>
<artifactId>javax.mail</artifactId>
<version>${javax.mail.version}</version>
</dependency>
<dependency>
<groupId>javax.activation</groupId>
<artifactId>activation</artifactId>
<artifactId>javax.activation-api</artifactId>
<version>${javax.activation.version}</version>
<scope>runtime</scope>
</dependency>
@ -106,8 +116,10 @@
<json.version>20180130</json.version>
<logback.contrib.version>0.1.5</logback.contrib.version>
<docx4j.version>3.3.5</docx4j.version>
<javax.mail.version>1.4.7</javax.mail.version>
<javax.activation.version>1.1.1</javax.activation.version>
<javax.mail.version>1.6.2</javax.mail.version>
<javax.activation.version>1.2.0</javax.activation.version>
<logback.version>1.3.5</logback.version>
<slf4j.version>2.0.4</slf4j.version>
</properties>
</project>

View File

@ -14,6 +14,9 @@ public interface ArticleRepository extends JpaRepository<Article, Integer> {
List<Article> findAllByPublicationTimeBetween(Date publicationTimeStart,
Date publicationTimeEnd);
Article findByPublicationTimeBetween(Date publicationTimeStart,
Date publicationTimeEnd);
@Query("select a from Article a where a.creationDateTime <= :creationDateTime")
List<Article> findAllWithCreationDateTimeBefore(
@Param("creationDateTime") Date creationDateTime);

View File

@ -0,0 +1,35 @@
package com.baeldung.spring.data.jpa.query.nonuniqueresultexception;
import com.baeldung.spring.data.jpa.query.datetime.ArticleRepository;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
import org.springframework.dao.IncorrectResultSizeDataAccessException;
import org.springframework.test.context.junit4.SpringRunner;
import javax.persistence.NonUniqueResultException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy;
@RunWith(SpringRunner.class)
@DataJpaTest(properties = "spring.sql.init.data-locations=classpath:import_entities.sql", showSql = false)
public class NonUniqueResultExceptionIntegrationTest {
@Autowired
private ArticleRepository repository;
@Test
public void givenImportedArticles_whenFindByPublicationTimeBetween_thenIncorrectResultSizeDataAccessExceptionThrown() {
assertThatThrownBy(() -> repository.findByPublicationTimeBetween(new SimpleDateFormat("HH:mm").parse("15:15"), new SimpleDateFormat("HH:mm").parse("16:30")))
.isInstanceOf(IncorrectResultSizeDataAccessException.class)
.hasCauseInstanceOf(NonUniqueResultException.class);
}
@Test
public void givenImportedArticles_whenFindAllByPublicationTimeBetween_thenSuccess() throws ParseException {
repository.findAllByPublicationTimeBetween(new SimpleDateFormat("HH:mm").parse("15:15"), new SimpleDateFormat("HH:mm").parse("16:30"));
}
}

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>spring-data-jpa-repo-2</artifactId>
<name>spring-data-jpa-repo-2</name>
@ -34,6 +34,14 @@
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
</dependency>
<dependency>
<groupId>com.querydsl</groupId>
<artifactId>querydsl-apt</artifactId>
</dependency>
<dependency>
<groupId>com.querydsl</groupId>
<artifactId>querydsl-jpa</artifactId>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
@ -41,4 +49,53 @@
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>com.mysema.maven</groupId>
<artifactId>apt-maven-plugin</artifactId>
<version>1.1.3</version>
<executions>
<execution>
<phase>generate-sources</phase>
<goals>
<goal>process</goal>
</goals>
<configuration>
<outputDirectory>${project.build.directory}/generated-sources</outputDirectory>
<processor>com.querydsl.apt.jpa.JPAAnnotationProcessor</processor>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.bsc.maven</groupId>
<artifactId>maven-processor-plugin</artifactId>
<version>3.3.3</version>
<executions>
<execution>
<id>process</id>
<goals>
<goal>process</goal>
</goals>
<phase>generate-sources</phase>
<configuration>
<outputDirectory>${project.build.directory}/generated-sources</outputDirectory>
<processors>
<processor>org.hibernate.jpamodelgen.JPAMetaModelEntityProcessor</processor>
</processors>
</configuration>
</execution>
</executions>
<dependencies>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-jpamodelgen</artifactId>
<version>5.6.11.Final</version>
</dependency>
</dependencies>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,76 @@
package com.baeldung.spring.data.persistence.springdatajpadifference.model;
import java.io.Serializable;
import java.util.Objects;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.NamedQuery;
import javax.persistence.Table;
@Entity
@Table(name = "employee")
@NamedQuery(name = "Employee.findById", query = "SELECT e FROM Employee e WHERE e.id = :id")
public class Employee implements Serializable {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
@Column(nullable = false)
private String firstName;
@Column(nullable = false)
private String lastName;
@Column(nullable = false)
private String email;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
Employee employee = (Employee) o;
return Objects.equals(id, employee.id) && Objects.equals(firstName, employee.firstName) && Objects.equals(lastName, employee.lastName) && Objects.equals(email, employee.email);
}
@Override
public int hashCode() {
return Objects.hash(id, firstName, lastName, email);
}
}

View File

@ -0,0 +1,66 @@
package com.baeldung.spring.data.persistence.springdatajpadifference.springdata.config;
import java.util.Properties;
import javax.persistence.EntityManager;
import javax.sql.DataSource;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.JpaVendorAdapter;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import com.baeldung.spring.data.persistence.springdatajpadifference.springdata.repository.EmployeeRepository;
import com.querydsl.jpa.impl.JPAQueryFactory;
@Configuration
@EnableTransactionManagement
@EnableJpaRepositories(basePackageClasses = EmployeeRepository.class)
public class SpringDataJpaConfig {
@Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory(DataSource dataSource) {
LocalContainerEntityManagerFactoryBean em = new LocalContainerEntityManagerFactoryBean();
em.setDataSource(dataSource);
em.setPackagesToScan("com.baeldung.spring.data.persistence.springdata_jpa_difference.model");
JpaVendorAdapter vendorAdapter = new HibernateJpaVendorAdapter();
em.setJpaVendorAdapter(vendorAdapter);
Properties properties = new Properties();
properties.setProperty("hibernate.hbm2ddl.auto", "create-drop");
properties.setProperty("hibernate.dialect", "org.hibernate.dialect.H2Dialect");
em.setJpaProperties(properties);
return em;
}
@Bean
public PlatformTransactionManager transactionManager(LocalContainerEntityManagerFactoryBean entityManagerFactoryBean) {
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setEntityManagerFactory(entityManagerFactoryBean.getObject());
return transactionManager;
}
@Bean
public DataSource dataSource() {
return DataSourceBuilder.create()
.url("jdbc:h2:mem:db;DB_CLOSE_DELAY=-1")
.driverClassName("org.h2.Driver")
.username("sa")
.password("sa")
.build();
}
@Bean
public JPAQueryFactory jpaQueryFactory(EntityManager entityManager) {
return new JPAQueryFactory((entityManager));
}
}

View File

@ -0,0 +1,19 @@
package com.baeldung.spring.data.persistence.springdatajpadifference.springdata.repository;
import java.util.List;
import org.springframework.data.domain.Sort;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.stereotype.Repository;
import com.baeldung.spring.data.persistence.springdatajpadifference.model.Employee;
@Repository
public interface EmployeeRepository extends JpaRepository<Employee, Long> {
List<Employee> findByFirstName(String firstName);
@Query(value = "SELECT e FROM Employee e")
List<Employee> findAllEmployee(Sort sort);
}

View File

@ -0,0 +1,11 @@
package com.baeldung.spring.data.persistence.springdatajpadifference.springdata.repository;
import org.springframework.data.repository.PagingAndSortingRepository;
import org.springframework.stereotype.Repository;
import com.baeldung.spring.data.persistence.springdatajpadifference.model.Employee;
@Repository
public interface EmployeeRepositoryPagingAndSort extends PagingAndSortingRepository<Employee, Long> {
}

View File

@ -0,0 +1,201 @@
package com.baeldung.spring.data.persistence.springdatajpadifference;
import static com.baeldung.spring.data.persistence.springdatajpadifference.TestUtils.employee;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import java.util.Arrays;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import javax.persistence.Query;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.CriteriaUpdate;
import javax.persistence.criteria.Root;
import org.junit.Before;
import org.junit.Test;
import com.baeldung.spring.data.persistence.springdatajpadifference.model.Employee;
import com.baeldung.spring.data.persistence.springdatajpadifference.model.Employee_;
public class JpaDaoIntegrationTest {
private final EntityManagerFactory emf = Persistence.createEntityManagerFactory("pu-test");
private final EntityManager entityManager = emf.createEntityManager();
@Before
public void setup() {
deleteAllEmployees();
}
@Test
public void givenPersistedEmployee_whenFindById_thenEmployeeIsFound() {
Employee employee = employee("John", "Doe");
save(employee);
assertEquals(employee, entityManager.find(Employee.class, employee.getId()));
}
@Test
public void givenPersistedEmployee_whenFindByIdCriteriaQuery_thenEmployeeIsFound() {
Employee employee = employee("John", "Doe");
save(employee);
CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder();
CriteriaQuery<Employee> criteriaQuery = criteriaBuilder.createQuery(Employee.class);
Root<Employee> root = criteriaQuery.from(Employee.class);
criteriaQuery.select(root);
criteriaQuery.where(criteriaBuilder.equal(root.get(Employee_.ID), employee.getId()));
assertEquals(employee, entityManager.createQuery(criteriaQuery)
.getSingleResult());
}
@Test
public void givenPersistedEmployee_whenFindByIdJpql_thenEmployeeIsFound() {
Employee employee = employee("John", "Doe");
save(employee);
Query jpqlQuery = entityManager.createQuery("SELECT e from Employee e WHERE e.id=:id");
jpqlQuery.setParameter("id", employee.getId());
assertEquals(employee, jpqlQuery.getSingleResult());
}
@Test
public void givenPersistedEmployee_whenFindByIdNamedQuery_thenEmployeeIsFound() {
Employee employee = employee("John", "Doe");
save(employee);
Query query = entityManager.createNamedQuery("Employee.findById");
query.setParameter(Employee_.ID, employee.getId());
assertEquals(employee, query.getSingleResult());
}
@Test
public void givenPersistedEmployee_whenFindWithPaginationAndSort_thenEmployeesAreFound() {
Employee john = employee("John", "Doe");
Employee bob = employee("Bob", "Smith");
Employee frank = employee("Frank", "Brown");
Employee james = employee("James", "Smith");
save(john);
save(bob);
save(frank);
save(james);
CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder();
CriteriaQuery<Employee> criteriaQuery = criteriaBuilder.createQuery(Employee.class);
Root<Employee> root = criteriaQuery.from(Employee.class);
criteriaQuery.select(root);
criteriaQuery.orderBy(criteriaBuilder.asc(root.get(Employee_.FIRST_NAME)));
TypedQuery<Employee> query = entityManager.createQuery(criteriaQuery);
query.setFirstResult(0);
query.setMaxResults(3);
List<Employee> employeeList = query.getResultList();
assertEquals(Arrays.asList(bob, frank, james), employeeList);
}
@Test
public void givenPersistedEmployee_whenUpdateEmployeeEmail_thenEmployeeHasUpdatedEmail() {
Employee employee = employee("John", "Doe");
save(employee);
Employee employeeToUpdate = entityManager.find(Employee.class, employee.getId());
String updatedEmail = "email@gmail.com";
employeeToUpdate.setEmail(updatedEmail);
update(employeeToUpdate);
assertEquals(updatedEmail, entityManager.find(Employee.class, employee.getId())
.getEmail());
}
@Test
public void givenPersistedEmployee_whenUpdateEmployeeEmailWithCriteria_thenEmployeeHasUpdatedEmail() {
Employee employee = employee("John", "Doe");
save(employee);
String updatedEmail = "email@gmail.com";
CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder();
CriteriaUpdate<Employee> criteriaUpdate = criteriaBuilder.createCriteriaUpdate(Employee.class);
Root<Employee> root = criteriaUpdate.from(Employee.class);
criteriaUpdate.set(Employee_.EMAIL, updatedEmail);
criteriaUpdate.where(criteriaBuilder.equal(root.get(Employee_.ID), employee.getId()));
assertEquals(1, update(criteriaUpdate));
assertEquals(updatedEmail, entityManager.find(Employee.class, employee.getId())
.getEmail());
}
@Test
public void givenPersistedEmployee_whenRemoveEmployee_thenNoEmployeeIsFound() {
Employee employee = employee("John", "Doe");
save(employee);
delete(employee.getId());
assertNull(entityManager.find(Employee.class, employee.getId()));
}
private void deleteAllEmployees() {
entityManager.getTransaction()
.begin();
entityManager.createNativeQuery("DELETE from Employee")
.executeUpdate();
entityManager.getTransaction()
.commit();
}
public void save(Employee entity) {
entityManager.getTransaction()
.begin();
entityManager.persist(entity);
entityManager.getTransaction()
.commit();
}
public void update(Employee entity) {
entityManager.getTransaction()
.begin();
entityManager.merge(entity);
entityManager.getTransaction()
.commit();
}
public void delete(Long employee) {
entityManager.getTransaction()
.begin();
entityManager.remove(entityManager.find(Employee.class, employee));
entityManager.getTransaction()
.commit();
}
public int update(CriteriaUpdate<Employee> criteriaUpdate) {
entityManager.getTransaction()
.begin();
int result = entityManager.createQuery(criteriaUpdate)
.executeUpdate();
entityManager.getTransaction()
.commit();
entityManager.clear();
return result;
}
}

View File

@ -0,0 +1,153 @@
package com.baeldung.spring.data.persistence.springdatajpadifference;
import static com.baeldung.spring.data.persistence.springdatajpadifference.TestUtils.employee;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertFalse;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.test.annotation.Rollback;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.transaction.annotation.Transactional;
import com.baeldung.spring.data.persistence.springdatajpadifference.model.Employee;
import com.baeldung.spring.data.persistence.springdatajpadifference.model.QEmployee;
import com.baeldung.spring.data.persistence.springdatajpadifference.springdata.config.SpringDataJpaConfig;
import com.baeldung.spring.data.persistence.springdatajpadifference.springdata.repository.EmployeeRepository;
import com.baeldung.spring.data.persistence.springdatajpadifference.springdata.repository.EmployeeRepositoryPagingAndSort;
import com.querydsl.jpa.impl.JPAQueryFactory;
@ContextConfiguration(classes = SpringDataJpaConfig.class)
@RunWith(SpringJUnit4ClassRunner.class)
@Transactional
@Rollback
public class SpringDataJpaIntegrationTest {
@Autowired
private EmployeeRepository employeeRepository;
@Autowired
private EmployeeRepositoryPagingAndSort employeeRepositoryPagingAndSort;
@Autowired
private JPAQueryFactory jpaQueryFactory;
@Test
public void givenPersistedEmployee_whenFindById_thenEmployeeIsFound() {
Employee employee = employee("John", "Doe");
employeeRepository.save(employee);
assertEquals(Optional.of(employee), employeeRepository.findById(employee.getId()));
}
@Test
public void givenPersistedEmployee_whenFindByFirstName_thenEmployeeIsFound() {
Employee employee = employee("John", "Doe");
employeeRepository.save(employee);
assertEquals(employee, employeeRepository.findByFirstName(employee.getFirstName())
.get(0));
}
@Test
public void givenPersistedEmployee_whenUpdateEmployeeEmail_thenEmployeeHasUpdatedEmail() {
Employee employee = employee("John", "Doe");
employeeRepository.save(employee);
Employee employeeToUpdate = employeeRepository.findById(employee.getId())
.orElse(null);
assertNotNull(employeeToUpdate);
assertEquals(employee, employeeToUpdate);
String updatedEmail = "email@gmail.com";
employeeToUpdate.setEmail(updatedEmail);
employeeRepository.save(employeeToUpdate);
assertEquals(Optional.of(employeeToUpdate), employeeRepository.findById(employee.getId()));
}
@Test
public void givenPersistedEmployee_whenRemoveEmployee_thenNoEmployeeIsFound() {
Employee employee = employee("John", "Doe");
employeeRepository.save(employee);
Employee persistedEmployee = employeeRepository.findById(employee.getId())
.orElse(null);
assertNotNull(persistedEmployee);
employeeRepository.delete(persistedEmployee);
assertFalse(employeeRepository.findById(employee.getId())
.isPresent());
}
@Test
public void givenPersistedEmployees_whenFindSortedByFirstName_thenEmployeeAreFoundInOrder() {
Employee john = employee("John", "Doe");
Employee bob = employee("Bob", "Smith");
Employee frank = employee("Frank", "Brown");
employeeRepository.saveAll(Arrays.asList(john, bob, frank));
List<Employee> employees = employeeRepository.findAllEmployee(Sort.by("firstName"));
assertEquals(3, employees.size());
assertEquals(bob, employees.get(0));
assertEquals(frank, employees.get(1));
assertEquals(john, employees.get(2));
}
@Test
public void givenPersistedEmployee_whenFindByQueryDsl_thenEmployeeIsFound() {
Employee john = employee("John", "Doe");
Employee frank = employee("Frank", "Doe");
employeeRepository.saveAll(Arrays.asList(john, frank));
QEmployee employeePath = QEmployee.employee;
List<Employee> employees = jpaQueryFactory.selectFrom(employeePath)
.where(employeePath.firstName.eq("John"), employeePath.lastName.eq("Doe"))
.fetch();
assertEquals(1, employees.size());
assertEquals(john, employees.get(0));
}
@Test
public void givenPersistedEmployee_whenFindBySortAndPagingRepository_thenEmployeeAreFound() {
Employee john = employee("John", "Doe");
Employee bob = employee("Bob", "Smith");
Employee frank = employee("Frank", "Brown");
Employee jimmy = employee("Jimmy", "Armstrong");
employeeRepositoryPagingAndSort.saveAll(Arrays.asList(john, bob, frank, jimmy));
Pageable pageable = PageRequest.of(0, 2, Sort.by("firstName"));
Page<Employee> employees = employeeRepositoryPagingAndSort.findAll(pageable);
assertEquals(Arrays.asList(bob, frank), employees.get()
.collect(Collectors.toList()));
}
}

View File

@ -0,0 +1,15 @@
package com.baeldung.spring.data.persistence.springdatajpadifference;
import com.baeldung.spring.data.persistence.springdatajpadifference.model.Employee;
public class TestUtils {
public static Employee employee(String firstName, String lastname) {
Employee employee = new Employee();
employee.setFirstName(firstName);
employee.setLastName(lastname);
employee.setEmail(firstName + lastname + "@baeldung.com");
return employee;
}
}

View File

@ -0,0 +1,21 @@
<?xml version="1.0" encoding="UTF-8"?>
<persistence xmlns="http://xmlns.jcp.org/xml/ns/persistence"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://xmlns.jcp.org/xml/ns/persistence
http://xmlns.jcp.org/xml/ns/persistence/persistence_2_1.xsd"
version="2.1">
<persistence-unit name="pu-test">
<provider>org.hibernate.jpa.HibernatePersistenceProvider</provider>
<class>com.baeldung.spring.data.persistence.springdatajpadifference.model.Employee</class>
<properties>
<property name="javax.persistence.jdbc.driver" value="org.h2.Driver"/>
<property name="javax.persistence.jdbc.url" value="jdbc:h2:mem:test;DB_CLOSE_DELAY=-1"/>
<property name="javax.persistence.jdbc.user" value="sa"/>
<property name="javax.persistence.jdbc.password" value=""/>
<property name="hibernate.dialect" value="org.hibernate.dialect.H2Dialect"/>
<property name="hibernate.hbm2ddl.auto" value="create-drop"/>
</properties>
</persistence-unit>
</persistence>

View File

@ -1168,6 +1168,7 @@
<module>spring-boot-modules/spring-boot-camel</module>
<module>spring-boot-modules/spring-boot-3</module>
<module>spring-boot-modules/spring-boot-3-native</module>
<module>spring-boot-modules/spring-boot-3-observation</module>
<module>spring-swagger-codegen/custom-validations-opeanpi-codegen</module>
<module>testing-modules/testing-assertions</module>
<module>persistence-modules/fauna</module>
@ -1251,6 +1252,7 @@
<module>spring-boot-modules/spring-boot-camel</module>
<module>spring-boot-modules/spring-boot-3</module>
<module>spring-boot-modules/spring-boot-3-native</module>
<module>spring-boot-modules/spring-boot-3-observation</module>
<module>spring-swagger-codegen/custom-validations-opeanpi-codegen</module>
<module>testing-modules/testing-assertions</module>
<module>persistence-modules/fauna</module>

View File

@ -0,0 +1,66 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>spring-boot-3-observation</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>spring-boot-3-observation</name>
<description>Demo project for Spring Boot 3 Observation</description>
<parent>
<groupId>com.baeldung</groupId>
<artifactId>parent-boot-3</artifactId>
<version>0.0.1-SNAPSHOT</version>
<relativePath>../../parent-boot-3</relativePath>
</parent>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.micrometer</groupId>
<artifactId>micrometer-tracing</artifactId>
</dependency>
<dependency>
<groupId>io.micrometer</groupId>
<artifactId>micrometer-tracing-bridge-brave</artifactId>
<!--artifactId>micrometer-tracing-bridge-otel</artifactId-->
</dependency>
<dependency>
<groupId>io.micrometer</groupId>
<artifactId>micrometer-observation-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.micrometer</groupId>
<artifactId>micrometer-tracing-test</artifactId>
<scope>test</scope>
</dependency>
<!-- get ObservationRegistry configured -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-actuator</artifactId>
</dependency>
<!-- @Observed -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-aop</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-devtools</artifactId>
<scope>runtime</scope>
<optional>true</optional>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,13 @@
package com.baeldung.samples;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class GreetingApplication {
public static void main(String[] args) {
SpringApplication.run(GreetingApplication.class, args);
}
}

View File

@ -0,0 +1,66 @@
package com.baeldung.samples;
import io.micrometer.core.instrument.Measurement;
import io.micrometer.core.instrument.Statistic;
import io.micrometer.core.instrument.observation.DefaultMeterObservationHandler;
import io.micrometer.core.instrument.simple.SimpleMeterRegistry;
import io.micrometer.observation.Observation;
import io.micrometer.observation.ObservationRegistry;
import io.micrometer.observation.ObservationTextPublisher;
import java.util.Optional;
import java.util.stream.StreamSupport;
public class SimpleObservationApplication {
// we can run this as a simple command line application
public static void main(String[] args) {
// create registry
final var observationRegistry = ObservationRegistry.create();
// create meter registry and observation handler
final var meterRegistry = new SimpleMeterRegistry();
final var meterObservationHandler = new DefaultMeterObservationHandler(meterRegistry);
// create simple logging observation handler
final var loggingObservationHandler = new ObservationTextPublisher(System.out::println);
// register observation handlers
observationRegistry
.observationConfig()
.observationHandler(meterObservationHandler)
.observationHandler(loggingObservationHandler);
// make an observation
Observation.Context context = new Observation.Context();
String observationName = "obs1";
Observation observation = Observation
.createNotStarted(observationName, () -> context, observationRegistry)
.lowCardinalityKeyValue("gender", "male")
.highCardinalityKeyValue("age", "41");
for (int i = 0; i < 10; i++) {
observation.observe(SimpleObservationApplication::doSomeAction);
}
meterRegistry.getMeters().forEach(m -> {
System.out.println(m.getId() + "\n============");
m.measure().forEach(ms -> System.out.println(ms.getValue() + " [" + ms.getStatistic() + "]"));
System.out.println("----------------------------");
});
Optional<Double> maximumDuration = meterRegistry.getMeters().stream()
.filter(m -> "obs1".equals(m.getId().getName()))
.flatMap(m -> StreamSupport.stream(m.measure().spliterator(), false))
.filter(ms -> ms.getStatistic() == Statistic.MAX)
.findFirst()
.map(Measurement::getValue);
System.out.println(maximumDuration);
}
private static void doSomeAction() {
try {
Thread.sleep(Math.round(Math.random() * 1000));
System.out.println("Hello World!");
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}

View File

@ -0,0 +1,26 @@
package com.baeldung.samples.boundary;
import com.baeldung.samples.domain.GreetingService;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
@Controller
@RequestMapping("/greet")
public class GreetingController {
private final GreetingService service;
public GreetingController(GreetingService service) {
this.service = service;
}
@GetMapping(produces = MediaType.TEXT_PLAIN_VALUE)
@ResponseBody
public String sayHello() {
return this.service.sayHello();
}
}

View File

@ -0,0 +1,22 @@
package com.baeldung.samples.boundary;
import io.micrometer.observation.ObservationRegistry;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.filter.ServerHttpObservationFilter;
@Configuration
public class ObservationFilterConfiguration {
// if an ObservationRegistry is already configured
@ConditionalOnBean(ObservationRegistry.class)
// if we do not use Actuator
@ConditionalOnMissingBean(ServerHttpObservationFilter.class)
@Bean
public ServerHttpObservationFilter observationFilter(ObservationRegistry registry) {
return new ServerHttpObservationFilter(registry);
}
}

View File

@ -0,0 +1,28 @@
package com.baeldung.samples.config;
import io.micrometer.observation.ObservationHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.event.ContextRefreshedEvent;
import org.springframework.context.event.EventListener;
import org.springframework.stereotype.Component;
@Component
public class ObservationHandlerLogger {
private static final Logger log = LoggerFactory.getLogger(ObservationHandlerLogger.class);
private static String toString(ObservationHandler<?> handler) {
return handler.getClass().getName() + " [ " + handler + "]";
}
@EventListener(ContextRefreshedEvent.class)
public void logObservationHandlers(ContextRefreshedEvent evt) {
evt.getApplicationContext().getBeansOfType(ObservationHandler.class)
.values()
.stream()
.map(ObservationHandlerLogger::toString)
.forEach(log::info);
}
}

View File

@ -0,0 +1,21 @@
package com.baeldung.samples.config;
import io.micrometer.observation.Observation;
import io.micrometer.observation.ObservationHandler;
import io.micrometer.observation.ObservationTextPublisher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class ObservationTextPublisherConfiguration {
private static final Logger log = LoggerFactory.getLogger(ObservationTextPublisherConfiguration.class);
@Bean
public ObservationHandler<Observation.Context> observationTextPublisher() {
return new ObservationTextPublisher(log::info);
}
}

View File

@ -0,0 +1,20 @@
package com.baeldung.samples.config;
import io.micrometer.observation.ObservationRegistry;
import io.micrometer.observation.aop.ObservedAspect;
import org.springframework.boot.autoconfigure.AutoConfiguration;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.context.annotation.Bean;
@AutoConfiguration
@ConditionalOnClass(ObservedAspect.class)
public class ObservedAspectConfiguration {
@Bean
@ConditionalOnMissingBean
public ObservedAspect observedAspect(ObservationRegistry observationRegistry) {
return new ObservedAspect(observationRegistry);
}
}

View File

@ -0,0 +1,59 @@
package com.baeldung.samples.config;
import io.micrometer.observation.Observation;
import io.micrometer.observation.ObservationHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
@Component
public class SimpleLoggingHandler implements ObservationHandler<Observation.Context> {
private static final Logger log = LoggerFactory.getLogger(SimpleLoggingHandler.class);
private static String toString(Observation.Context context) {
return null == context ? "(no context)" : context.getName()
+ " (" + context.getClass().getName() + "@" + System.identityHashCode(context) + ")";
}
private static String toString(Observation.Event event) {
return null == event ? "(no event)" : event.getName();
}
@Override
public boolean supportsContext(Observation.Context context) {
return true;
}
@Override
public void onStart(Observation.Context context) {
log.info("Starting context " + toString(context));
}
@Override
public void onError(Observation.Context context) {
log.info("Error for context " + toString(context));
}
@Override
public void onEvent(Observation.Event event, Observation.Context context) {
log.info("Event for context " + toString(context) + " [" + toString(event) + "]");
}
@Override
public void onScopeOpened(Observation.Context context) {
log.info("Scope opened for context " + toString(context));
}
@Override
public void onScopeClosed(Observation.Context context) {
log.info("Scope closed for context " + toString(context));
}
@Override
public void onStop(Observation.Context context) {
log.info("Stopping context " + toString(context));
}
}

View File

@ -0,0 +1,14 @@
package com.baeldung.samples.domain;
import io.micrometer.observation.annotation.Observed;
import org.springframework.stereotype.Service;
@Observed(name = "greetingService")
@Service
public class GreetingService {
public String sayHello() {
return "Hello World!";
}
}

View File

@ -0,0 +1,6 @@
management:
endpoints:
web:
exposure:
include: '*'
#health,info,beans,metrics,startup

View File

@ -0,0 +1,17 @@
package com.baeldung.samples.config;
import io.micrometer.observation.Observation;
import io.micrometer.observation.ObservationHandler;
import io.micrometer.observation.tck.AnyContextObservationHandlerCompatibilityKit;
class SimpleLoggingHandlerUnitTest
extends AnyContextObservationHandlerCompatibilityKit {
SimpleLoggingHandler handler = new SimpleLoggingHandler();
@Override
public ObservationHandler<Observation.Context> handler() {
return handler;
}
}

View File

@ -0,0 +1,44 @@
package com.baeldung.samples.domain;
import com.baeldung.samples.config.ObservedAspectConfiguration;
import io.micrometer.observation.tck.TestObservationRegistry;
import io.micrometer.tracing.test.simple.SimpleTracer;
import org.springframework.boot.test.autoconfigure.actuate.observability.AutoConfigureObservability;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Import;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Documented
@Inherited
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@AutoConfigureObservability
@Import({
ObservedAspectConfiguration.class,
EnableTestObservation.ObservationTestConfiguration.class
})
public @interface EnableTestObservation {
@TestConfiguration
class ObservationTestConfiguration {
@Bean
TestObservationRegistry observationRegistry() {
return TestObservationRegistry.create();
}
@Bean
SimpleTracer simpleTracer() {
return new SimpleTracer();
}
}
}

View File

@ -0,0 +1,35 @@
package com.baeldung.samples.domain;
import io.micrometer.observation.tck.TestObservationRegistry;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import static io.micrometer.observation.tck.TestObservationRegistryAssert.assertThat;
@ExtendWith(SpringExtension.class)
@ComponentScan(basePackageClasses = GreetingService.class)
@EnableAutoConfiguration
@EnableTestObservation
class GreetingServiceObservationIntegrationTest {
@Autowired
GreetingService service;
@Autowired
TestObservationRegistry registry;
@Test
void testObservation() {
// invoke service
service.sayHello();
assertThat(registry)
.hasObservationWithNameEqualTo("greetingService")
.that()
.hasBeenStarted()
.hasBeenStopped();
}
}

View File

@ -0,0 +1,42 @@
package com.baeldung.samples.domain;
import io.micrometer.tracing.test.simple.SimpleTracer;
import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import static io.micrometer.tracing.test.simple.TracerAssert.assertThat;
@ExtendWith(SpringExtension.class)
@ComponentScan(basePackageClasses = GreetingService.class)
@EnableAutoConfiguration
@EnableTestObservation
class GreetingServiceTracingIntegrationTest {
@Autowired
GreetingService service;
@Value("${management.tracing.enabled:true}")
boolean tracingEnabled;
@Autowired
SimpleTracer tracer;
@Test
void testEnabledTracing() {
Assertions.assertThat(tracingEnabled).isTrue();
}
@Test
void testTracingForGreeting() {
service.sayHello();
assertThat(tracer)
.onlySpan()
.hasNameEqualTo("greeting-service#say-hello")
.isEnded();
}
}

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>spring-boot-graphql</artifactId>
<name>spring-boot-graphql</name>
@ -13,6 +13,47 @@
<version>1.0.0-SNAPSHOT</version>
</parent>
<build>
<extensions>
<extension>
<groupId>kr.motd.maven</groupId>
<artifactId>os-maven-plugin</artifactId>
<version>1.7.0</version>
</extension>
</extensions>
<plugins>
<plugin>
<groupId>org.xolstice.maven.plugins</groupId>
<artifactId>protobuf-maven-plugin</artifactId>
<version>${protobuf-plugin.version}</version>
<configuration>
<protocArtifact>com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier}</protocArtifact>
<pluginId>grpc-java</pluginId>
<pluginArtifact>io.grpc:protoc-gen-grpc-java:${grpc.version}:exe:${os.detected.classifier}</pluginArtifact>
</configuration>
<executions>
<execution>
<goals>
<goal>compile</goal>
<goal>compile-custom</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
<properties>
<protobuf.version>3.19.2</protobuf.version>
<protobuf-plugin.version>0.6.1</protobuf-plugin.version>
<grpc.version>1.43.2</grpc.version>
<grpc.spring.version>2.13.1.RELEASE</grpc.spring.version>
<jsonassert.version>1.5.1</jsonassert.version>
<jakarta.annotation.version>1.3.5</jakarta.annotation.version>
<os-maven-plugin.version>1.6.2</os-maven-plugin.version>
<maven-war-plugin.version>3.3.2</maven-war-plugin.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
@ -78,45 +119,4 @@
</dependency>
</dependencies>
<build>
<extensions>
<extension>
<groupId>kr.motd.maven</groupId>
<artifactId>os-maven-plugin</artifactId>
<version>1.7.0</version>
</extension>
</extensions>
<plugins>
<plugin>
<groupId>org.xolstice.maven.plugins</groupId>
<artifactId>protobuf-maven-plugin</artifactId>
<version>${protobuf-plugin.version}</version>
<configuration>
<protocArtifact>com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier}</protocArtifact>
<pluginId>grpc-java</pluginId>
<pluginArtifact>io.grpc:protoc-gen-grpc-java:${grpc.version}:exe:${os.detected.classifier}</pluginArtifact>
</configuration>
<executions>
<execution>
<goals>
<goal>compile</goal>
<goal>compile-custom</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
<properties>
<protobuf.version>3.19.2</protobuf.version>
<protobuf-plugin.version>0.6.1</protobuf-plugin.version>
<grpc.version>1.43.2</grpc.version>
<grpc.spring.version>2.13.1.RELEASE</grpc.spring.version>
<jsonassert.version>1.5.1</jsonassert.version>
<jakarta.annotation.version>1.3.5</jakarta.annotation.version>
<os-maven-plugin.version>1.6.2</os-maven-plugin.version>
<maven-war-plugin.version>3.3.2</maven-war-plugin.version>
</properties>
</project>

View File

@ -2,8 +2,10 @@ server:
port: 8082
spring:
main:
allow-bean-definition-overriding: true
graphql:
graphiql:
enabled: true
schema:
locations: classpath:chooseapi/
locations: classpath:chooseapi/

View File

@ -11,7 +11,9 @@ import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT, classes = ChooseApiApp.class)
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT,
properties = { "grpc.server.port=-1" }, // Disable gRPC external server
classes = ChooseApiApp.class)
@ActiveProfiles("chooseapi")
class BooksControllerGraphQLIntegrationTest {

View File

@ -9,13 +9,15 @@ import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.web.servlet.MockMvc;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
@SpringBootTest
@SpringBootTest(properties = { "grpc.server.port=-1" }) // Disable gRPC external server
@ActiveProfiles("chooseapi")
@AutoConfigureMockMvc
class BooksControllerRestIntegrationTest {

View File

@ -1,44 +1,54 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>spring-cloud-stream-kinesis</artifactId>
<name>spring-cloud-stream-kinesis</name>
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>spring-cloud-stream-kinesis</artifactId>
<name>spring-cloud-stream-kinesis</name>
<parent>
<groupId>com.baeldung</groupId>
<artifactId>spring-cloud-stream</artifactId>
<version>1.0.0-SNAPSHOT</version>
</parent>
<parent>
<groupId>com.baeldung</groupId>
<artifactId>spring-cloud-stream</artifactId>
<version>1.0.0-SNAPSHOT</version>
</parent>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kinesis</artifactId>
<version>${spring-cloud-stream-kinesis-binder.version}</version>
</dependency>
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-kinesis</artifactId>
<version>${aws-sdk.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-test-support</artifactId>
<version>${spring-cloud-stream-test.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-kinesis</artifactId>
<version>${aws-sdk.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-test-support</artifactId>
<version>${spring-cloud-stream-test.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>amazon-kinesis-producer</artifactId>
<version>0.13.1</version>
</dependency>
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>amazon-kinesis-client</artifactId>
<version>1.11.2</version>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kinesis</artifactId>
<version>${spring-cloud-stream-kinesis-binder.version}</version>
</dependency>
</dependencies>
<properties>
<aws-sdk.version>1.11.632</aws-sdk.version>
<spring-cloud-stream-kinesis-binder.version>2.0.2.RELEASE</spring-cloud-stream-kinesis-binder.version>
<spring-cloud-stream-test.version>2.2.1.RELEASE</spring-cloud-stream-test.version>
</properties>
<properties>
<aws-sdk.version>1.11.632</aws-sdk.version>
<spring-cloud-stream-kinesis-binder.version>2.0.2.RELEASE</spring-cloud-stream-kinesis-binder.version>
<spring-cloud-stream-test.version>2.2.1.RELEASE</spring-cloud-stream-test.version>
</properties>
</project>

View File

@ -1,53 +0,0 @@
package com.baeldung;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.cloud.stream.messaging.Processor;
import org.springframework.context.annotation.Bean;
import org.springframework.messaging.support.MessageBuilder;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.kinesis.AmazonKinesis;
import com.amazonaws.services.kinesis.AmazonKinesisClientBuilder;
@SpringBootApplication
@EnableBinding(Processor.class)
public class KinesisApplication {
@Value("${aws.access.key}")
private String accessKey;
@Value("${aws.secret.key}")
private String secretKey;
@Autowired
private Processor processor;
public static void main(String[] args) {
SpringApplication.run(KinesisApplication.class, args);
}
@Bean
public AmazonKinesis buildAmazonKinesis() {
BasicAWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey);
return AmazonKinesisClientBuilder.standard()
.withCredentials(new AWSStaticCredentialsProvider(awsCredentials))
.withRegion(Regions.EU_CENTRAL_1)
.build();
}
@StreamListener(Processor.INPUT)
public void consume(String val) {
System.out.println(val);
}
public void produce(String val) {
processor.output().send(MessageBuilder.withPayload(val).build());
}
}

View File

@ -0,0 +1,16 @@
package com.baeldung.binder;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.cloud.stream.messaging.Sink;
import org.springframework.stereotype.Component;
@Component
@EnableBinding(Sink.class)
public class ConsumerBinder {
@StreamListener(Sink.INPUT)
public void consume(String ip) {
System.out.println(ip);
}
}

View File

@ -0,0 +1,12 @@
package com.baeldung.binder;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class KinesisBinderApplication {
public static void main(String[] args) {
SpringApplication.run(KinesisBinderApplication.class, args);
}
}

View File

@ -0,0 +1,24 @@
package com.baeldung.binder;
import java.util.stream.IntStream;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.messaging.Source;
import org.springframework.messaging.support.MessageBuilder;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
@Component
@EnableBinding(Source.class)
public class ProducerBinder {
@Autowired
private Source source;
@Scheduled(fixedDelay = 3000L)
private void produce() {
IntStream.range(1, 200).mapToObj(ipSuffix -> "192.168.0." + ipSuffix)
.forEach(entry -> source.output().send(MessageBuilder.withPayload(entry).build()));
}
}

View File

@ -1,4 +1,4 @@
package com.baeldung;
package com.baeldung.kclkpl;
import com.amazonaws.services.kinesis.clientlibrary.interfaces.v2.IRecordProcessor;
import com.amazonaws.services.kinesis.clientlibrary.types.InitializationInput;

View File

@ -1,4 +1,4 @@
package com.baeldung;
package com.baeldung.kclkpl;
import com.amazonaws.services.kinesis.clientlibrary.interfaces.v2.IRecordProcessor;
import com.amazonaws.services.kinesis.clientlibrary.interfaces.v2.IRecordProcessorFactory;

View File

@ -0,0 +1,30 @@
package com.baeldung.kclkpl;
import java.nio.ByteBuffer;
import java.util.stream.IntStream;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import com.amazonaws.services.kinesis.producer.KinesisProducer;
@Component
public class IpProducer {
@Value("${ips.stream}")
private String IPS_STREAM;
@Value("${ips.partition.key}")
private String IPS_PARTITION_KEY;
@Autowired
private KinesisProducer kinesisProducer;
@Scheduled(fixedDelay = 3000L)
private void produce() {
IntStream.range(1, 200).mapToObj(ipSuffix -> ByteBuffer.wrap(("192.168.0." + ipSuffix).getBytes()))
.forEach(entry -> kinesisProducer.addUserRecord(IPS_STREAM, IPS_PARTITION_KEY, entry));
}
}

View File

@ -0,0 +1,48 @@
package com.baeldung.kclkpl;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.ApplicationRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.kinesis.clientlibrary.lib.worker.KinesisClientLibConfiguration;
import com.amazonaws.services.kinesis.clientlibrary.lib.worker.Worker;
@SpringBootApplication
public class KinesisKCLApplication implements ApplicationRunner {
@Value("${aws.access.key}")
private String accessKey;
@Value("${aws.secret.key}")
private String secretKey;
@Value("${ips.stream}")
private String IPS_STREAM;
public static void main(String[] args) {
SpringApplication.run(KinesisKCLApplication.class, args);
}
@Override
public void run(ApplicationArguments args) throws Exception {
BasicAWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey);
KinesisClientLibConfiguration consumerConfig = new KinesisClientLibConfiguration(
"KinesisKCLConsumer",
IPS_STREAM,
new AWSStaticCredentialsProvider(awsCredentials),
"KinesisKCLConsumer")
.withRegionName(Regions.EU_CENTRAL_1.getName());
new Worker.Builder()
.recordProcessorFactory(new IpProcessorFactory())
.config(consumerConfig)
.build()
.run();
}
}

View File

@ -0,0 +1,38 @@
package com.baeldung.kclkpl;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.kinesis.producer.KinesisProducer;
import com.amazonaws.services.kinesis.producer.KinesisProducerConfiguration;
@SpringBootApplication
public class KinesisKPLApplication {
@Value("${aws.access.key}")
private String accessKey;
@Value("${aws.secret.key}")
private String secretKey;
public static void main(String[] args) {
SpringApplication.run(KinesisKPLApplication.class, args);
}
@Bean
public KinesisProducer kinesisProducer() {
BasicAWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey);
KinesisProducerConfiguration producerConfig = new KinesisProducerConfiguration()
.setCredentialsProvider(new AWSStaticCredentialsProvider(awsCredentials))
.setVerifyCertificate(false)
.setRegion(Regions.EU_CENTRAL_1.getName());
return new KinesisProducer(producerConfig);
}
}

View File

@ -1,12 +1,9 @@
package com.baeldung;
package com.baeldung.sdk;
import javax.annotation.PostConstruct;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.cloud.stream.messaging.Sink;
import org.springframework.stereotype.Component;
import com.amazonaws.services.kinesis.AmazonKinesis;
@ -17,8 +14,7 @@ import com.amazonaws.services.kinesis.model.GetShardIteratorResult;
import com.amazonaws.services.kinesis.model.ShardIteratorType;
@Component
@EnableBinding(Sink.class)
public class IpConsumer {
public class ConsumerSDK {
@Value("${ips.stream}")
private String IPS_STREAM;
@ -31,12 +27,7 @@ public class IpConsumer {
private GetShardIteratorResult shardIterator;
@StreamListener(Sink.INPUT)
public void consume(String ip) {
System.out.println(ip);
}
private void consumeWithKinesis() {
public void consumeWithKinesis() {
GetRecordsRequest recordsRequest = new GetRecordsRequest();
recordsRequest.setShardIterator(shardIterator.getShardIterator());
recordsRequest.setLimit(25);

View File

@ -0,0 +1,35 @@
package com.baeldung.sdk;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.kinesis.AmazonKinesis;
import com.amazonaws.services.kinesis.AmazonKinesisClientBuilder;
@SpringBootApplication
public class KinesisSDKApplication {
@Value("${aws.access.key}")
private String accessKey;
@Value("${aws.secret.key}")
private String secretKey;
public static void main(String[] args) {
SpringApplication.run(KinesisSDKApplication.class, args);
}
@Bean
public AmazonKinesis buildAmazonKinesis() {
BasicAWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey);
return AmazonKinesisClientBuilder.standard()
.withCredentials(new AWSStaticCredentialsProvider(awsCredentials))
.withRegion(Regions.EU_CENTRAL_1)
.build();
}
}

View File

@ -1,4 +1,4 @@
package com.baeldung;
package com.baeldung.sdk;
import java.nio.ByteBuffer;
import java.util.List;
@ -7,9 +7,6 @@ import java.util.stream.IntStream;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.messaging.Source;
import org.springframework.messaging.support.MessageBuilder;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
@ -18,8 +15,7 @@ import com.amazonaws.services.kinesis.model.PutRecordsRequest;
import com.amazonaws.services.kinesis.model.PutRecordsRequestEntry;
@Component
@EnableBinding(Source.class)
public class IpProducer {
public class ProducerSDK {
@Value("${ips.partition.key}")
private String IPS_PARTITION_KEY;
@ -27,17 +23,9 @@ public class IpProducer {
@Value("${ips.stream}")
private String IPS_STREAM;
@Autowired
private Source source;
@Autowired
private AmazonKinesis kinesis;
@Scheduled(fixedDelay = 3000L)
private void produce() {
IntStream.range(1, 200).mapToObj(ipSuffix -> "192.168.0." + ipSuffix)
.forEach(entry -> source.output().send(MessageBuilder.withPayload(entry).build()));
}
@Scheduled(fixedDelay = 3000L)
private void produceWithKinesis() {
List<PutRecordsRequestEntry> entries = IntStream.range(1, 200).mapToObj(ipSuffix -> {

View File

@ -1,6 +1,12 @@
# configurations for AWS SDK consumer and producer
aws.access.key=my-aws-access-key-goes-here
aws.secret.key=my-aws-secret-key-goes-here
ips.partition.key=ips-partition-key
ips.stream=ips-stream
ips.shard.id=1
# configurations for Spring Cloud Stream Kineses Binder consumer and producer
cloud.aws.credentials.access-key=my-aws-access-key
cloud.aws.credentials.secret-key=my-aws-secret-key
cloud.aws.region.static=eu-central-1
@ -11,8 +17,4 @@ spring.cloud.stream.bindings.input.group=live-ips-group
spring.cloud.stream.bindings.input.content-type=text/plain
spring.cloud.stream.bindings.output.destination=myStream
spring.cloud.stream.bindings.output.content-type=text/plain
ips.partition.key=ips-partition-key
ips.stream=ips-stream
ips.shard.id=1
spring.cloud.stream.bindings.output.content-type=text/plain

View File

@ -5,11 +5,13 @@ import org.junit.runner.RunWith;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import com.baeldung.kclkpl.KinesisKPLApplication;
/**
* Manual Test - this test needs correct AWS Access Key and Secret to build the Amazon Kinesis and complete successfully
*/
@RunWith(SpringRunner.class)
@SpringBootTest(classes = KinesisApplication.class)
@SpringBootTest(classes = KinesisKPLApplication.class)
public class KinesisApplicationManualTest {
@Test
public void whenSpringContextIsBootstrapped_thenNoExceptions() {

View File

@ -1,6 +1,12 @@
# configurations for AWS SDK consumer and producer
aws.access.key=my-aws-access-key-goes-here
aws.secret.key=my-aws-secret-key-goes-here
ips.partition.key=ips-partition-key
ips.stream=ips-stream
ips.shard.id=1
# configurations for Spring Cloud Stream Kineses Binder consumer and producer
cloud.aws.credentials.access-key=my-aws-access-key
cloud.aws.credentials.secret-key=my-aws-secret-key
cloud.aws.region.static=eu-central-1
@ -11,8 +17,4 @@ spring.cloud.stream.bindings.input.group=live-ips-group
spring.cloud.stream.bindings.input.content-type=text/plain
spring.cloud.stream.bindings.output.destination=myStream
spring.cloud.stream.bindings.output.content-type=text/plain
ips.partition.key=ips-partition-key
ips.stream=ips-stream
ips.shard.id=1
spring.cloud.stream.bindings.output.content-type=text/plain

View File

@ -1,5 +1,6 @@
package com.baeldung.spring.kafka;
import java.net.SocketTimeoutException;
import java.util.HashMap;
import java.util.Map;
@ -8,15 +9,20 @@ import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ContainerProperties;
import org.springframework.kafka.listener.DefaultErrorHandler;
import org.springframework.kafka.support.converter.RecordMessageConverter;
import org.springframework.kafka.support.converter.StringJsonMessageConverter;
import org.springframework.kafka.support.mapping.DefaultJackson2JavaTypeMapper;
import org.springframework.kafka.support.mapping.Jackson2JavaTypeMapper;
import org.springframework.kafka.support.serializer.JsonDeserializer;
import org.springframework.util.backoff.BackOff;
import org.springframework.util.backoff.FixedBackOff;
@EnableKafka
@Configuration
@ -25,6 +31,12 @@ public class KafkaConsumerConfig {
@Value(value = "${spring.kafka.bootstrap-servers}")
private String bootstrapAddress;
@Value(value = "${kafka.backoff.interval}")
private Long interval;
@Value(value = "${kafka.backoff.max_failure}")
private Long maxAttempts;
public ConsumerFactory<String, String> consumerFactory(String groupId) {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
@ -71,7 +83,7 @@ public class KafkaConsumerConfig {
public ConcurrentKafkaListenerContainerFactory<String, String> filterKafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory = kafkaListenerContainerFactory("filter");
factory.setRecordFilterStrategy(record -> record.value()
.contains("World"));
.contains("World"));
return factory;
}
@ -83,7 +95,7 @@ public class KafkaConsumerConfig {
}
@Bean
public ConcurrentKafkaListenerContainerFactory<String, Greeting> greetingKafkaListenerContainerFactory() {
public ConcurrentKafkaListenerContainerFactory<String, Greeting> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, Greeting> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(greetingConsumerFactory());
return factory;
@ -109,15 +121,32 @@ public class KafkaConsumerConfig {
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
props.put(ConsumerConfig.GROUP_ID_CONFIG, "group_id_test");
return new DefaultKafkaConsumerFactory<>(props);
}
@Bean
public ConcurrentKafkaListenerContainerFactory<String, Object> multiTypeKafkaListenerContainerFactory() {
@Primary
public ConcurrentKafkaListenerContainerFactory<String, Object> greetingKafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, Object> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(multiTypeConsumerFactory());
factory.setMessageConverter(multiTypeConverter());
factory.setCommonErrorHandler(errorHandler());
factory.getContainerProperties()
.setAckMode(ContainerProperties.AckMode.RECORD);
return factory;
}
@Bean
public DefaultErrorHandler errorHandler() {
BackOff fixedBackOff = new FixedBackOff(interval, maxAttempts);
DefaultErrorHandler errorHandler = new DefaultErrorHandler((consumerRecord, e) -> {
System.out.println(String.format("consumed record %s because this exception was thrown",consumerRecord.toString(),e.getClass().getName()));
}, fixedBackOff);
//Commented because of the test
//errorHandler.addRetryableExceptions(SocketTimeoutException.class,RuntimeException.class);
errorHandler.addNotRetryableExceptions(NullPointerException.class);
return errorHandler;
}
}

View File

@ -2,6 +2,7 @@ package com.baeldung.spring.kafka;
import org.springframework.kafka.annotation.KafkaHandler;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.messaging.MessagingException;
import org.springframework.stereotype.Component;
@Component
@ -9,7 +10,12 @@ import org.springframework.stereotype.Component;
public class MultiTypeKafkaListener {
@KafkaHandler
//@RetryableTopic(backoff = @Backoff(value = 3000L), attempts = "5", autoCreateTopics = "false",include = SocketTimeoutException.class, exclude = NullPointerException.class)
public void handleGreeting(Greeting greeting) {
if (greeting.getName()
.equalsIgnoreCase("test")) {
throw new MessagingException("test not allowed");
}
System.out.println("Greeting received: " + greeting);
}

View File

@ -0,0 +1,14 @@
package com.baeldung.spring.kafka;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Import;
@SpringBootApplication
@Import(value = { KafkaTopicConfig.class, KafkaConsumerConfig.class, KafkaProducerConfig.class })
public class RetryableApplicationKafkaApp {
public static void main(String[] args) {
SpringApplication.run(RetryableApplicationKafkaApp.class, args);
}
}

View File

@ -14,4 +14,7 @@ monitor.producer.simulate=true
monitor.consumer.simulate=true
monitor.kafka.consumer.groupid.simulate=baeldungGrpSimulate
test.topic=testtopic1
kafka.backoff.interval=9000
kafka.backoff.max_failure=5

View File

@ -1,7 +1,6 @@
package com.baeldung.kafka.embedded;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.assertTrue;
@ -16,6 +15,8 @@ import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.annotation.DirtiesContext;
import com.fasterxml.jackson.databind.ObjectMapper;
@SpringBootTest
@DirtiesContext
@EmbeddedKafka(partitions = 1, brokerProperties = { "listeners=PLAINTEXT://localhost:9092", "port=9092" })
@ -33,6 +34,8 @@ class EmbeddedKafkaIntegrationTest {
@Value("${test.topic}")
private String topic;
private ObjectMapper objectMapper = new ObjectMapper();
@BeforeEach
void setup() {
consumer.resetLatch();
@ -44,7 +47,8 @@ class EmbeddedKafkaIntegrationTest {
template.send(topic, data);
boolean messageConsumed = consumer.getLatch().await(10, TimeUnit.SECONDS);
boolean messageConsumed = consumer.getLatch()
.await(10, TimeUnit.SECONDS);
assertTrue(messageConsumed);
assertThat(consumer.getPayload(), containsString(data));
}
@ -55,7 +59,8 @@ class EmbeddedKafkaIntegrationTest {
producer.send(topic, data);
boolean messageConsumed = consumer.getLatch().await(10, TimeUnit.SECONDS);
boolean messageConsumed = consumer.getLatch()
.await(10, TimeUnit.SECONDS);
assertTrue(messageConsumed);
assertThat(consumer.getPayload(), containsString(data));
}

View File

@ -0,0 +1,84 @@
package com.baeldung.spring.kafka;
import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.kafka.config.KafkaListenerEndpointRegistry;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.listener.AcknowledgingConsumerAwareMessageListener;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.context.EmbeddedKafka;
import com.fasterxml.jackson.databind.ObjectMapper;
@SpringBootTest(classes = RetryableApplicationKafkaApp.class)
@EmbeddedKafka(partitions = 1, brokerProperties = { "listeners=PLAINTEXT://localhost:9092", "port=9092" })
public class KafkaRetryableIntegrationTest {
@ClassRule
public static EmbeddedKafkaBroker embeddedKafka = new EmbeddedKafkaBroker(1, true, "multitype");
@Autowired
private KafkaListenerEndpointRegistry registry;
@Autowired
private KafkaTemplate<String, String> template;
private ObjectMapper objectMapper = new ObjectMapper();
private static final String CONTAINER_GROUP = "multiGroup";
private static final String TOPIC = "topic";
@Before
public void setup() {
System.setProperty("spring.kafka.bootstrap-servers", embeddedKafka.getBrokersAsString());
}
@Test
public void givenEmbeddedKafkaBroker_whenSendingAWellFormedMessage_thenMessageIsConsumed() throws Exception {
ConcurrentMessageListenerContainer<?, ?> container = (ConcurrentMessageListenerContainer<?, ?>) registry.getListenerContainer(CONTAINER_GROUP);
container.stop();
@SuppressWarnings("unchecked") AcknowledgingConsumerAwareMessageListener<String, String> messageListener = (AcknowledgingConsumerAwareMessageListener<String, String>) container.getContainerProperties()
.getMessageListener();
CountDownLatch latch = new CountDownLatch(1);
container.getContainerProperties()
.setMessageListener((AcknowledgingConsumerAwareMessageListener<String, String>) (data, acknowledgment, consumer) -> {
messageListener.onMessage(data, acknowledgment, consumer);
latch.countDown();
});
Greeting greeting = new Greeting("test1", "test2");
container.start();
template.send(TOPIC, objectMapper.writeValueAsString(greeting));
assertThat(latch.await(10, TimeUnit.SECONDS)).isTrue();
}
@Test
public void givenEmbeddedKafkaBroker_whenSendingAMalFormedMessage_thenMessageIsConsumedAfterRetry() throws Exception {
ConcurrentMessageListenerContainer<?, ?> container = (ConcurrentMessageListenerContainer<?, ?>) registry.getListenerContainer(CONTAINER_GROUP);
container.stop();
@SuppressWarnings("unchecked") AcknowledgingConsumerAwareMessageListener<String, String> messageListener = (AcknowledgingConsumerAwareMessageListener<String, String>) container.getContainerProperties()
.getMessageListener();
CountDownLatch latch = new CountDownLatch(1);
container.getContainerProperties()
.setMessageListener((AcknowledgingConsumerAwareMessageListener<String, String>) (data, acknowledgment, consumer) -> {
messageListener.onMessage(data, acknowledgment, consumer);
latch.countDown();
});
container.start();
Greeting greeting = new Greeting("test", "test");
template.send(TOPIC, objectMapper.writeValueAsString(greeting));
//this message will go on error
Greeting greeting2 = new Greeting("test2", "test2");
template.send(TOPIC, objectMapper.writeValueAsString(greeting2));
assertThat(latch.await(10, TimeUnit.SECONDS)).isTrue();
}
}

View File

@ -51,4 +51,12 @@ public class OrderAgnosticListComparisonUnitTest {
assertThat(a).hasSameElementsAs(b);
}
@Test
void whenTestingForOrderAgnosticEqualityWithDuplicateElementsBothList_ShouldBeEqual() {
List<String> a = Arrays.asList("a", "a", "b", "c");
List<String> b = Arrays.asList("a", "b", "a", "c");
assertThat(a).containsExactlyInAnyOrderElementsOf(b);
}
}