Hibernate SQL log filtering feature (#5594)
* Hibernate SQL log filtering feature * changelog * Add exception msg codes * Improve test which failed in pipeline * Join tests to avoid threading issues * Fix comment * Add review suggestions * Add extra documentation including example. * Update hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/appendix/logging.md Co-authored-by: James Agnew <jamesagnew@gmail.com> --------- Co-authored-by: juan.marchionatto <juan.marchionatto@smilecdr.com> Co-authored-by: James Agnew <jamesagnew@gmail.com>
This commit is contained in:
parent
28a1a4d4df
commit
2c2afd4a83
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: add
|
||||
issue: 5593
|
||||
title: "Hibernate SQL log filtering capability was added.
|
||||
See [Hibernate SQL Log Filtering](/hapi-fhir/docs/appendix/logging.html#hibernate-sql-log-filtering)."
|
||||
|
||||
|
|
@ -64,3 +64,48 @@ To enable detailed logging of client requests and responses (what URL is being r
|
|||
|
||||
To enable detailed logging of server requests and responses, an interceptor may be added to the server which logs each transaction. See [Logging Interceptor](/docs/interceptors/built_in_server_interceptors.html#logging_interceptor) for more information.
|
||||
|
||||
# Hibernate SQL Log Filtering
|
||||
|
||||
<div class="helpWarningCalloutBox">
|
||||
Hibernate SQL debug logging can potentially affect your system performance.
|
||||
This filtering function reduces the amount of logging generated by hibernate SQL logging function, `after` the logging code is executed, so hibernate SQL logging performance degradation still applies when using it.
|
||||
</div>
|
||||
|
||||
Hibernate logs SQL statements from a single class, which makes hard to obtain SQL logs only for a specific feature, as logging includes all background processes.
|
||||
|
||||
Hibernate SQL log filtering feature allows you to filter out Hibernate SQL logging entries by adding blacklist filters in a `hibernate-sql-log-filters.txt` classpath file.
|
||||
|
||||
Note that Hibernate SQL log filtering feature works by `filtering out` SQL logs, meaning that each filter you add, will reduce SQL debug logging generated by any process running in the system, like background processes or other user interactions.
|
||||
|
||||
Hibernate SQL log filtering and its filter-refreshing task activate when `org.hibernate.SQL` logging is set to `debug` level or higher, and deactivate when logging is set to `info` level or lower.
|
||||
|
||||
The feature doesn't affect performance in any way while inactive, also shutting down the background filter-refreshing process. It can affect performance when active, so the feature is intended only as a debugging tool.
|
||||
|
||||
The filter lines must start with one of:
|
||||
1. `stack: ` to filter log entries produced by code which stack trace has a line which starts with filter string,
|
||||
2. `sw: ` to filter log entries which start with filter string, or
|
||||
3. `frag: ` to filter log entries which contain filter string
|
||||
|
||||
A sample file is provided with filter lines commented out. These filter lines, once uncommented, filter out most background processes logging.
|
||||
|
||||
## Hibernate SQL Log Filtering example
|
||||
|
||||
The sample use case is to be able to identify the SQL queries which are being run for some operation.
|
||||
The steps would be:
|
||||
|
||||
1. Change your logging configuration entry `org.hibernate.SQL` to `DEBUG`
|
||||
|
||||
As soon as your logging system refreshes its configuration, you will see in the console a vast amount of SQL logging produced by you system background tasks.
|
||||
|
||||
2. Uncomment the filters defined in your classpath file `hibernate-sql-log-filters.txt`.
|
||||
|
||||
As soon as the filtering feature refreshes (5 seconds delay), the console SQL logging should stop.
|
||||
|
||||
3. Run the operation which SQL queries you need to debug.
|
||||
|
||||
Your search operation SQL logs should be logged.
|
||||
|
||||
Note: This example was set to allow you to observe the effect of the filters on the log output, however swapping steps 1 and 2 would work better by avoiding the initial vast logging output.
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
package ca.uhn.fhir.jpa.logging;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
public abstract class BaseSqlLoggerFilterImpl implements ISqlLoggerFilter {
|
||||
protected final List<String> myFilterDefinitions = new LinkedList<>();
|
||||
|
||||
@Override
|
||||
public boolean evaluateFilterLine(String theFilterLine) {
|
||||
boolean matched = theFilterLine.startsWith(getPrefix());
|
||||
if (matched) {
|
||||
myFilterDefinitions.add(
|
||||
theFilterLine.substring(getPrefix().length()).trim());
|
||||
}
|
||||
return matched;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clearDefinitions() {
|
||||
myFilterDefinitions.clear();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getLockingObject() {
|
||||
return myFilterDefinitions;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setFilterDefinitions(List<String> theFilterDefinitions) {
|
||||
synchronized (myFilterDefinitions) {
|
||||
myFilterDefinitions.clear();
|
||||
myFilterDefinitions.addAll(theFilterDefinitions);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
package ca.uhn.fhir.jpa.logging;
|
||||
|
||||
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
|
||||
import org.hibernate.engine.jdbc.spi.SqlStatementLogger;
|
||||
import org.hibernate.service.spi.ServiceContributor;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class FilteringSqlLoggerImplContributor implements ServiceContributor {
|
||||
private static final Logger logger = LoggerFactory.getLogger(FilteringSqlLoggerImplContributor.class);
|
||||
|
||||
@Override
|
||||
public void contribute(StandardServiceRegistryBuilder serviceRegistryBuilder) {
|
||||
logger.info("Adding service: SqlStatementFilteringLogger");
|
||||
|
||||
serviceRegistryBuilder.addService(
|
||||
SqlStatementLogger.class, new SqlStatementFilteringLogger(SqlLoggerFilteringUtil.getInstance()));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
package ca.uhn.fhir.jpa.logging;
|
||||
|
||||
/**
|
||||
* Contract for Filters used by the utility class SqlStatementFilteringLogger
|
||||
*/
|
||||
public interface ISqlLoggerFilter {
|
||||
|
||||
boolean match(String theStatement);
|
||||
|
||||
boolean evaluateFilterLine(String theFilterLine);
|
||||
|
||||
String getPrefix();
|
||||
|
||||
void clearDefinitions();
|
||||
|
||||
Object getLockingObject();
|
||||
}
|
|
@ -0,0 +1,175 @@
|
|||
package ca.uhn.fhir.jpa.logging;
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.core.io.ClassPathResource;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.ScheduledThreadPoolExecutor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Utility for the Hibernate SQL log filtering feature
|
||||
*/
|
||||
public class SqlLoggerFilteringUtil {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(SqlLoggerFilteringUtil.class);
|
||||
|
||||
@SuppressWarnings("FieldMayBeFinal") // so test can inject mocks
|
||||
public static int FILTER_UPDATE_INTERVAL_SECS = 5;
|
||||
|
||||
public static final String FILTER_FILE_PATH = "hibernate-sql-log-filters.txt";
|
||||
private static final AtomicInteger ourRefreshCount = new AtomicInteger();
|
||||
|
||||
private final Logger hibernateLogger = LoggerFactory.getLogger("org.hibernate.SQL");
|
||||
|
||||
private final List<ISqlLoggerFilter> mySqlLoggerFilters;
|
||||
|
||||
private CountDownLatch myRefreshDoneLatch;
|
||||
private volatile ScheduledThreadPoolExecutor myRefreshExecutor;
|
||||
|
||||
private static final SqlLoggerFilteringUtil myInstance = new SqlLoggerFilteringUtil();
|
||||
// singleton
|
||||
public static SqlLoggerFilteringUtil getInstance() {
|
||||
return myInstance;
|
||||
}
|
||||
|
||||
private SqlLoggerFilteringUtil() {
|
||||
ourRefreshCount.set(0);
|
||||
mySqlLoggerFilters = List.of(
|
||||
new SqlLoggerStartsWithFilter(), new SqlLoggerFragmentFilter(), new SqlLoggerStackTraceFilter());
|
||||
}
|
||||
|
||||
public boolean allowLog(String theStatement) {
|
||||
// Invoked when org.hibernate.SQL logger is DEBUG enabled
|
||||
// Only initialize if method ever invoked, as it is usually not the case.
|
||||
if (myRefreshExecutor == null || myRefreshExecutor.isShutdown()) {
|
||||
startFilterRefreshExecutor();
|
||||
}
|
||||
|
||||
boolean allowLog = isStatementAllowed(theStatement);
|
||||
ourLog.trace("SQL log {}: {}", allowLog ? "allowed" : "filtered out", theStatement);
|
||||
return allowLog;
|
||||
}
|
||||
|
||||
private boolean isStatementAllowed(String theStatement) {
|
||||
return mySqlLoggerFilters.stream().noneMatch(f -> f.match(theStatement));
|
||||
}
|
||||
|
||||
private synchronized void startFilterRefreshExecutor() {
|
||||
if (myRefreshExecutor != null && !myRefreshExecutor.isShutdown()) {
|
||||
ourLog.debug(
|
||||
"myRefreshExecutor terminated state: {}, terminating state: {}",
|
||||
myRefreshExecutor.isTerminated(),
|
||||
myRefreshExecutor.isTerminating());
|
||||
return;
|
||||
}
|
||||
|
||||
myRefreshDoneLatch = new CountDownLatch(1);
|
||||
myRefreshExecutor = new ScheduledThreadPoolExecutor(1);
|
||||
myRefreshExecutor.scheduleAtFixedRate(
|
||||
new UpdateFiltersTask(), 0, FILTER_UPDATE_INTERVAL_SECS, TimeUnit.SECONDS);
|
||||
ourLog.info("Starting SQL log filters refresh executor");
|
||||
|
||||
// wait for first refresh cycle to complete
|
||||
try {
|
||||
// reset to use in case executor is restarted
|
||||
myRefreshDoneLatch.await();
|
||||
|
||||
} catch (InterruptedException ignored) {
|
||||
ourLog.warn("Interrupted from sleep");
|
||||
}
|
||||
}
|
||||
|
||||
private synchronized void stopFilterRefreshExecutor() {
|
||||
if (myRefreshExecutor == null || myRefreshExecutor.isShutdown()) {
|
||||
return;
|
||||
}
|
||||
ourLog.info("Stopping SQL log filters refresh executor");
|
||||
myRefreshExecutor.shutdown();
|
||||
}
|
||||
|
||||
private class UpdateFiltersTask implements Runnable {
|
||||
@Override
|
||||
public void run() {
|
||||
ourLog.debug("\n\n\t\t\tRefreshing hibernate SQL filters!\n");
|
||||
|
||||
try {
|
||||
refreshFilters(FILTER_FILE_PATH);
|
||||
|
||||
} catch (Exception theE) {
|
||||
ourLog.error("Hibernate SQL log filters not refreshed. Exception: {} \n{}", theE, theE.getStackTrace());
|
||||
throw new RuntimeException(Msg.code(2478) + theE);
|
||||
} finally {
|
||||
myRefreshDoneLatch.countDown();
|
||||
}
|
||||
|
||||
int count = ourRefreshCount.getAndIncrement();
|
||||
ourLog.debug("SQL logging filters {}. Refresh count: {}", count == 0 ? "initialized" : "refreshed", count);
|
||||
}
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void refreshFilters(String theFilterFilePath) throws IOException {
|
||||
ourLog.debug("SQL log DEBUG enabled: {}", hibernateLogger.isDebugEnabled());
|
||||
if (!hibernateLogger.isDebugEnabled()) {
|
||||
// in case startFilterRefreshExecutor is waiting for refresh to finish
|
||||
myRefreshDoneLatch.countDown();
|
||||
stopFilterRefreshExecutor();
|
||||
return;
|
||||
}
|
||||
|
||||
ourLog.debug("Starting filters refresh");
|
||||
File resource = new ClassPathResource(theFilterFilePath).getFile();
|
||||
List<String> filterDefinitionLines = Files.readAllLines(resource.toPath());
|
||||
|
||||
for (ISqlLoggerFilter filter : mySqlLoggerFilters) {
|
||||
synchronized (filter.getLockingObject()) {
|
||||
filter.clearDefinitions();
|
||||
filterDefinitionLines.stream()
|
||||
.filter(l -> !l.startsWith("#"))
|
||||
.filter(filterDef -> StringUtils.isNotBlank(filterDef.trim()))
|
||||
.forEach(filterLine -> presentFilterDefinitionLineToFilters(filterLine, mySqlLoggerFilters));
|
||||
}
|
||||
}
|
||||
ourLog.debug("Ended filter refresh");
|
||||
}
|
||||
|
||||
private void presentFilterDefinitionLineToFilters(String theFilterLine, List<ISqlLoggerFilter> theFilterList) {
|
||||
for (ISqlLoggerFilter filterRef : theFilterList) {
|
||||
if (filterRef.evaluateFilterLine(theFilterLine)) {
|
||||
// only one filter takes a filter line
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
int spaceIdx = theFilterLine.indexOf(" ");
|
||||
ourLog.warn(
|
||||
"SQL log filtering line prefix not recognized: '{}'. Must be one of: '#', {}",
|
||||
theFilterLine.substring(0, spaceIdx == -1 ? theFilterLine.length() : spaceIdx),
|
||||
theFilterList.stream().map(ISqlLoggerFilter::getPrefix).collect(Collectors.joining("', '", "'", "'")));
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public static int getRefreshCountForTests() {
|
||||
return ourRefreshCount.get();
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public static void setFilterUpdateIntervalSecs(int theFilterUpdateIntervalSecs) {
|
||||
FILTER_UPDATE_INTERVAL_SECS = theFilterUpdateIntervalSecs;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public List<ISqlLoggerFilter> getSqlLoggerFilters() {
|
||||
return mySqlLoggerFilters;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,21 @@
|
|||
package ca.uhn.fhir.jpa.logging;
|
||||
|
||||
/**
|
||||
* When filtering active, Filters hibernate SQL log lines containing the defined fragment
|
||||
*/
|
||||
public class SqlLoggerFragmentFilter extends BaseSqlLoggerFilterImpl implements ISqlLoggerFilter {
|
||||
|
||||
public static final String PREFIX = "frag:";
|
||||
|
||||
@Override
|
||||
public boolean match(String theStatement) {
|
||||
synchronized (myFilterDefinitions) {
|
||||
return myFilterDefinitions.stream().anyMatch(theStatement::contains);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPrefix() {
|
||||
return PREFIX;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,69 @@
|
|||
package ca.uhn.fhir.jpa.logging;
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.LinkedList;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
/**
|
||||
* When filtering active, filters hibernate SQL log lines which generating code stack trace
|
||||
* contains a line which starts with the defined string
|
||||
*/
|
||||
public class SqlLoggerStackTraceFilter extends BaseSqlLoggerFilterImpl implements ISqlLoggerFilter {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(SqlLoggerStackTraceFilter.class);
|
||||
public static final String PREFIX = "stack:";
|
||||
|
||||
@Override
|
||||
public boolean match(String theStatement) {
|
||||
// safe copy to shorten synchronized time
|
||||
LinkedList<String> filterDefinitionsSafeCopy;
|
||||
synchronized (myFilterDefinitions) {
|
||||
filterDefinitionsSafeCopy = new LinkedList<>(myFilterDefinitions);
|
||||
}
|
||||
|
||||
Set<String> cdrClassesInStack =
|
||||
getStackTraceStream().map(StackTraceElement::getClassName).collect(Collectors.toSet());
|
||||
|
||||
if (cdrClassesInStack.isEmpty()) {
|
||||
ourLog.trace("No CDR or HAPI-FHIR class found in stack");
|
||||
return false;
|
||||
}
|
||||
|
||||
boolean matched;
|
||||
try {
|
||||
matched = cdrClassesInStack.stream().anyMatch(clName -> filterDefinitionsSafeCopy.stream()
|
||||
.filter(Objects::nonNull)
|
||||
.anyMatch(clName::startsWith));
|
||||
|
||||
} catch (Exception theE) {
|
||||
ourLog.debug(
|
||||
"myFilterDefinitions: {}",
|
||||
filterDefinitionsSafeCopy.stream()
|
||||
.map(String::valueOf)
|
||||
.collect(Collectors.joining("\n", "\n", "")));
|
||||
ourLog.debug(
|
||||
"cdrClassesInStack: {}",
|
||||
cdrClassesInStack.stream().map(String::valueOf).collect(Collectors.joining("\n", "\n", "")));
|
||||
throw new RuntimeException(Msg.code(2479) + theE);
|
||||
}
|
||||
|
||||
return matched;
|
||||
}
|
||||
|
||||
@VisibleForTesting // Thread can't be mocked
|
||||
public Stream<StackTraceElement> getStackTraceStream() {
|
||||
return Arrays.stream(Thread.currentThread().getStackTrace());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPrefix() {
|
||||
return PREFIX;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
package ca.uhn.fhir.jpa.logging;
|
||||
|
||||
/**
|
||||
* When filtering active, Filters hibernate SQL log lines starting with the defined string
|
||||
*/
|
||||
public class SqlLoggerStartsWithFilter extends BaseSqlLoggerFilterImpl {
|
||||
private static final String PREFIX = "sw:";
|
||||
|
||||
@Override
|
||||
public boolean match(String theStatement) {
|
||||
synchronized (myFilterDefinitions) {
|
||||
return myFilterDefinitions.stream().anyMatch(theStatement::startsWith);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPrefix() {
|
||||
return PREFIX;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
package ca.uhn.fhir.jpa.logging;
|
||||
|
||||
import org.hibernate.engine.jdbc.spi.SqlStatementLogger;
|
||||
import org.hibernate.internal.CoreLogging;
|
||||
import org.hibernate.service.Service;
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
/**
|
||||
* Logger set as a hibernate service to allow filtering out SQL statements based in statement content
|
||||
* instead of package, as hibernate logs always from same class disallowing package discrimination.
|
||||
* Note that when content includes class and package name, it can be used for filtering
|
||||
* <p/>
|
||||
* It self-activates when "org.hibernate.SQL" logger is set to DEBUG.
|
||||
* Deactivates fully (even config-checking executor is shutdown) when "org.hibernate.SQL" logger is set lower than DEBUG.
|
||||
* To use, simply add filtering statement lines to the 'sql-filters/hibernate-sql-log-filters.txt' classpath file
|
||||
* starting with:
|
||||
* <li>
|
||||
* <ul>'sw:' to filter statements which start with the following string</ul>
|
||||
* <ul>'frag:' to filter statements which contain the fragment string</ul>
|
||||
* <ul>'stack:' to filter statements logging which stack trace contain the following string</ul>
|
||||
* </li>
|
||||
*/
|
||||
public class SqlStatementFilteringLogger extends SqlStatementLogger implements Service {
|
||||
|
||||
private static final Logger LOG = CoreLogging.logger("org.hibernate.SQL");
|
||||
|
||||
private final SqlLoggerFilteringUtil myFilteringUtil;
|
||||
|
||||
public SqlStatementFilteringLogger(SqlLoggerFilteringUtil theFilteringUtil) {
|
||||
super();
|
||||
myFilteringUtil = theFilteringUtil;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void logStatement(String statement) {
|
||||
if (LOG.isDebugEnabled() && myFilteringUtil.allowLog(statement)) {
|
||||
super.logStatement(statement);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
ca.uhn.fhir.jpa.logging.FilteringSqlLoggerImplContributor
|
|
@ -0,0 +1,19 @@
|
|||
##############################################################################################
|
||||
# Filters suppress log statement according to prefix when:
|
||||
# _ stack: there is an entry in stack trace which class name starts with filter string
|
||||
# _ sw: log statement starts with filter string
|
||||
# _ frag: log statement contains filter string
|
||||
#
|
||||
# To add a filter extend BaseSqlLoggerFilterImpl.
|
||||
#
|
||||
##############################################################################################
|
||||
# stack: ca.uhn.fhir.jpa.search.SearchUrlJobMaintenanceSvcImpl
|
||||
# stack: ca.uhn.fhir.jpa.subscription.ResourceModifiedMessagePersistenceSvcImpl
|
||||
# stack: ca.uhn.fhir.jpa.term.TermReindexingSvcImpl
|
||||
# stack: ca.uhn.fhir.jpa.bulk.imprt.svc.BulkDataImportSvcImpl
|
||||
# stack: ca.uhn.fhir.jpa.search.reindex.ResourceReindexingSvcImpl
|
||||
# stack: ca.uhn.fhir.jpa.batch2.JpaJobPersistenceImpl
|
||||
# stack: ca.uhn.fhir.jpa.term.TermReadSvcImpl
|
||||
# stack: ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportJobSchedulingHelperImpl
|
||||
# stack: ca.uhn.fhir.jpa.search.StaleSearchDeletingSvcImpl
|
||||
# stack: ca.uhn.fhir.jpa.cache.ResourceChangeListenerCacheRefresherImpl
|
|
@ -0,0 +1 @@
|
|||
# used by FilteringSqlLoggerTest
|
|
@ -0,0 +1,339 @@
|
|||
package ca.uhn.fhir.jpa.logging;
|
||||
|
||||
import ca.uhn.test.util.LogbackCaptureTestExtension;
|
||||
import ch.qos.logback.classic.Level;
|
||||
import ch.qos.logback.classic.LoggerContext;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Spy;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.core.io.ClassPathResource;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.time.Duration;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
|
||||
import static ca.uhn.fhir.jpa.logging.SqlLoggerFilteringUtil.FILTER_FILE_PATH;
|
||||
import static ca.uhn.fhir.jpa.logging.SqlLoggerFilteringUtil.FILTER_UPDATE_INTERVAL_SECS;
|
||||
import static org.awaitility.Awaitility.await;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.hasItems;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.lenient;
|
||||
import static org.mockito.Mockito.never;
|
||||
import static org.mockito.Mockito.spy;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
||||
/**
|
||||
* This test includes tests for two classes SqlStatementFilteringLogger and SqlLoggerFilteringUtil, because the later
|
||||
* uses threading in a way that would make these tests would collide with each other if run in parallel, as it could be
|
||||
* the case if set in different classes. They are separated by top level nested class names.
|
||||
*/
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public class SqlLoggerFilteringAndUtilTest {
|
||||
|
||||
@Nested
|
||||
public class SqlStatementFilteringLoggerTests {
|
||||
@Spy
|
||||
private SqlLoggerFilteringUtil myFilteringUtil;
|
||||
private SqlStatementFilteringLogger myTestedLogger;
|
||||
private ch.qos.logback.classic.Logger myLogger;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
myTestedLogger= new SqlStatementFilteringLogger(myFilteringUtil);
|
||||
|
||||
LoggerContext loggerContext = (LoggerContext) LoggerFactory.getILoggerFactory();
|
||||
myLogger = loggerContext.getLogger("org.hibernate.SQL");
|
||||
|
||||
ch.qos.logback.classic.Logger myTestedclassLogger = loggerContext.getLogger("ca.cdr.api.logging.SqlLoggerFilteringUtil");
|
||||
myTestedclassLogger.setLevel(Level.toLevel("trace"));
|
||||
}
|
||||
|
||||
@Nested
|
||||
public class ActivationTests {
|
||||
|
||||
@Test
|
||||
void doesNotInvokeMustLogWhenLoggerNotDebug() {
|
||||
myLogger.setLevel(Level.toLevel("info"));
|
||||
|
||||
myTestedLogger.logStatement("select * from Patients");
|
||||
|
||||
verify(myFilteringUtil, never()).allowLog(any());
|
||||
}
|
||||
|
||||
@Test
|
||||
void invokesMustLogWhenLoggerDebug() {
|
||||
myLogger.setLevel(Level.toLevel("debug"));
|
||||
|
||||
myTestedLogger.logStatement("select * from Patients");
|
||||
|
||||
verify(myFilteringUtil).allowLog("select * from Patients");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Nested
|
||||
public class FileFiltersTests {
|
||||
|
||||
@RegisterExtension
|
||||
public LogbackCaptureTestExtension myLogCapture = new LogbackCaptureTestExtension("org.hibernate.SQL");
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
myLogger.setLevel(Level.toLevel("debug"));
|
||||
SqlLoggerFilteringUtil.setFilterUpdateIntervalSecs(1);
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
void tearDown() throws IOException {
|
||||
clearFilterFile();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDynamicFiltersUpdate_forStartsWithFilters() throws IOException {
|
||||
// starts with empty filter list
|
||||
myTestedLogger.logStatement("1-must-log-this-statement");
|
||||
myTestedLogger.logStatement("2-must-log-this-statement");
|
||||
myTestedLogger.logStatement("3-must-log-this-statement");
|
||||
assertEquals(3, myLogCapture.getLogEvents().size() );
|
||||
|
||||
addLineToFilterFile("sw: 1-must-log");
|
||||
waitForFiltersRefresh();
|
||||
myLogCapture.clearEvents();
|
||||
|
||||
// log again
|
||||
myTestedLogger.logStatement("1-must-log-this-statement");
|
||||
myTestedLogger.logStatement("2-must-log-this-statement");
|
||||
myTestedLogger.logStatement("3-must-log-this-statement");
|
||||
assertThat(
|
||||
myLogCapture.getLogEvents().stream().map(Object::toString).toList(),
|
||||
hasItems(
|
||||
containsString("2-must-log-this-statement"),
|
||||
containsString("3-must-log-this-statement")));
|
||||
|
||||
addLineToFilterFile("sw: 3-must-log");
|
||||
waitForFiltersRefresh();
|
||||
myLogCapture.clearEvents();
|
||||
|
||||
// log again
|
||||
myTestedLogger.logStatement("1-must-log-this-statement");
|
||||
myTestedLogger.logStatement("2-must-log-this-statement");
|
||||
myTestedLogger.logStatement("3-must-log-this-statement");
|
||||
assertThat(
|
||||
myLogCapture.getLogEvents().stream().map(Object::toString).toList(),
|
||||
hasItems(containsString("2-must-log-this-statement")));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDynamicFiltersUpdate_forEqualsWithFilters() throws IOException {
|
||||
// starts with empty filter list
|
||||
myTestedLogger.logStatement("1-must-log-this-statement");
|
||||
myTestedLogger.logStatement("2-must-log-this-statement");
|
||||
myTestedLogger.logStatement("3-must-log-this-statement");
|
||||
assertEquals(3, myLogCapture.getLogEvents().size() );
|
||||
|
||||
addLineToFilterFile("eq: 1-must-log-this-statement");
|
||||
waitForFiltersRefresh();
|
||||
myLogCapture.clearEvents();
|
||||
|
||||
// log again
|
||||
myTestedLogger.logStatement("1-must-log-this-statement");
|
||||
myTestedLogger.logStatement("2-must-log-this-statement");
|
||||
myTestedLogger.logStatement("3-must-log-this-statement");
|
||||
assertThat(
|
||||
myLogCapture.getLogEvents().stream().map(Object::toString).toList(),
|
||||
hasItems(
|
||||
containsString("2-must-log-this-statement"),
|
||||
containsString("3-must-log-this-statement")));
|
||||
|
||||
addLineToFilterFile("sw: 3-must-log-this-statement");
|
||||
waitForFiltersRefresh();
|
||||
myLogCapture.clearEvents();
|
||||
|
||||
// log again
|
||||
myTestedLogger.logStatement("1-must-log-this-statement");
|
||||
myTestedLogger.logStatement("2-must-log-this-statement");
|
||||
myTestedLogger.logStatement("3-must-log-this-statement");
|
||||
assertThat(
|
||||
myLogCapture.getLogEvents().stream().map(Object::toString).toList(),
|
||||
hasItems(containsString("2-must-log-this-statement")));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void waitForFiltersRefresh() {
|
||||
int beforeRefreshCount = SqlLoggerFilteringUtil.getRefreshCountForTests();
|
||||
await().atMost(Duration.of(SqlLoggerFilteringUtil.FILTER_UPDATE_INTERVAL_SECS + 1, ChronoUnit.SECONDS))
|
||||
.until(() -> SqlLoggerFilteringUtil.getRefreshCountForTests() > beforeRefreshCount);
|
||||
}
|
||||
|
||||
private void addLineToFilterFile(String theFilterLine) throws IOException {
|
||||
File resource = new ClassPathResource(FILTER_FILE_PATH).getFile();
|
||||
assertNotNull(resource);
|
||||
Files.write(resource.toPath(), (theFilterLine + "\n").getBytes(), StandardOpenOption.APPEND);
|
||||
}
|
||||
|
||||
private void clearFilterFile() throws IOException {
|
||||
File resource = new ClassPathResource(FILTER_FILE_PATH).getFile();
|
||||
assertNotNull(resource);
|
||||
Files.write(resource.toPath(), new byte[0], StandardOpenOption.TRUNCATE_EXISTING);
|
||||
}
|
||||
}
|
||||
|
||||
@Nested
|
||||
public class SqlLoggerFilteringUtilTests {
|
||||
@InjectMocks
|
||||
private SqlLoggerFilteringUtil myTestedUtil;
|
||||
@Spy
|
||||
private SqlLoggerFilteringUtil mySpiedUtil;
|
||||
|
||||
@RegisterExtension
|
||||
public LogbackCaptureTestExtension myLogCapture = new LogbackCaptureTestExtension("org.hibernate.SQL");
|
||||
|
||||
private ch.qos.logback.classic.Logger myHibernateLogger;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
mySpiedUtil = spy(myTestedUtil);
|
||||
SqlLoggerFilteringUtil.setFilterUpdateIntervalSecs(1);
|
||||
|
||||
LoggerContext loggerContext = (LoggerContext) LoggerFactory.getILoggerFactory();
|
||||
myHibernateLogger = loggerContext.getLogger(SqlLoggerFilteringUtil.class);
|
||||
myHibernateLogger = loggerContext.getLogger("org.hibernate.SQL");
|
||||
}
|
||||
|
||||
@Nested
|
||||
public class MustLogTests {
|
||||
|
||||
@BeforeEach
|
||||
void setUp() throws IOException {
|
||||
lenient().doNothing().when(mySpiedUtil).refreshFilters(any());
|
||||
myHibernateLogger.setLevel(Level.DEBUG);
|
||||
}
|
||||
|
||||
@Test
|
||||
void whenNoFilterMatches_mustReturnTrue() {
|
||||
boolean result = mySpiedUtil.allowLog("sql statement 1 with params a , BBB and ccccc");
|
||||
|
||||
assertTrue(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
void whenStartsWithFilterMatches_mustReturnFalse() {
|
||||
setFilter("sw: sql statement 1");
|
||||
|
||||
boolean result = mySpiedUtil.allowLog("sql statement 1 with params a , BBB and ccccc");
|
||||
|
||||
assertFalse(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
void whenFragmentFilterMatches_mustReturnFalse() {
|
||||
setFilter("frag: with params a, BBB");
|
||||
|
||||
boolean result = mySpiedUtil.allowLog("sql statement 1 with params a, BBB and ccccc");
|
||||
|
||||
assertFalse(result);
|
||||
}
|
||||
private void setFilter(String theFilterDefinition) {
|
||||
mySpiedUtil.getSqlLoggerFilters().forEach(f -> f.evaluateFilterLine(theFilterDefinition));
|
||||
}
|
||||
}
|
||||
|
||||
@Nested
|
||||
public class ExecutorActivationTests {
|
||||
|
||||
@AfterEach
|
||||
void tearDown() {
|
||||
// shuts down executor if it was initialized
|
||||
myHibernateLogger.setLevel(Level.INFO);
|
||||
waitForRefreshCycle();
|
||||
assertExecutorState(false);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRefreshIsNotExecutedUntilMustLogIsCalled() {
|
||||
myHibernateLogger.setLevel(Level.INFO);
|
||||
|
||||
assertExecutorState(false);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRefreshIsExecutedWhenMustLogIsCalled() {
|
||||
myHibernateLogger.setLevel(Level.DEBUG);
|
||||
|
||||
mySpiedUtil.allowLog("sql statement");
|
||||
|
||||
assertExecutorState(true);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRefreshIsStoppedWhenDebugLogIsStopped() {
|
||||
// refresh executor is stopped until mustLog is invoked
|
||||
assertExecutorState(false);
|
||||
|
||||
// refresh executor starts once mustLog is called with logger in DEBUG mode
|
||||
myHibernateLogger.setLevel(Level.DEBUG);
|
||||
mySpiedUtil.allowLog("sql statement");
|
||||
assertExecutorState(true);
|
||||
|
||||
// refresh executor stops once mustLog is called when logger DEBUG mode is reset
|
||||
myHibernateLogger.setLevel(Level.INFO);
|
||||
|
||||
// wait for refresh cycle, which should stop executor
|
||||
waitForRefreshCycle();
|
||||
// validate it stopped refreshing
|
||||
assertExecutorState(false);
|
||||
|
||||
// until log is back to DEBUG
|
||||
myHibernateLogger.setLevel(Level.DEBUG);
|
||||
// executor not reactivated until debug log is called
|
||||
mySpiedUtil.allowLog("sql statement");
|
||||
waitForRefreshCycle();
|
||||
// validate started refreshing again
|
||||
assertExecutorState(true);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void waitForRefreshCycle() {
|
||||
try {
|
||||
Thread.sleep(((long) FILTER_UPDATE_INTERVAL_SECS + 1) * 1_000);
|
||||
} catch (InterruptedException ignored) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void assertExecutorState(boolean isActive) {
|
||||
int beforeRefreshCount = SqlLoggerFilteringUtil.getRefreshCountForTests();
|
||||
if (isActive) {
|
||||
await().atMost(Duration.of(FILTER_UPDATE_INTERVAL_SECS + 1, ChronoUnit.SECONDS))
|
||||
.until(() -> beforeRefreshCount < SqlLoggerFilteringUtil.getRefreshCountForTests());
|
||||
} else {
|
||||
waitForRefreshCycle();
|
||||
int newCount = SqlLoggerFilteringUtil.getRefreshCountForTests();
|
||||
assertEquals(beforeRefreshCount, newCount);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,82 @@
|
|||
package ca.uhn.fhir.jpa.logging;
|
||||
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.mockito.Mockito.doReturn;
|
||||
import static org.mockito.Mockito.spy;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class SqlLoggerStackTraceFilterTest {
|
||||
|
||||
private final SqlLoggerStackTraceFilter myTested = new SqlLoggerStackTraceFilter();
|
||||
private SqlLoggerStackTraceFilter mySpiedTested;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
mySpiedTested = spy(myTested);
|
||||
}
|
||||
|
||||
@Test
|
||||
void noMatch() {
|
||||
mySpiedTested.setFilterDefinitions(List.of(
|
||||
"ca.cdr.clustermgr.svc.impl.DatabaseBackedHttpSessionStorageSvcImpl",
|
||||
"ca.uhn.fhir.jpa.cache.CdrResourceChangeListenerCache",
|
||||
"ca.cdr.clustermgr.svc.impl.ModuleStatusControllerSvcImpl",
|
||||
"ca.cdr.clustermgr.svc.impl.StatsHeartbeatSvcImpl"
|
||||
));
|
||||
|
||||
Stream<StackTraceElement> stackTraceStream = Stream.of(
|
||||
stElement("ca.cdr.api.camel.ICamelRouteEndpointSvc"),
|
||||
stElement("ca.cdr.api.transactionlog.ITransactionLogFetchingSvc"),
|
||||
stElement("ca.cdr.cdaxv2.impl.CdaDocumentSvcImpl"),
|
||||
stElement("ca.cdr.endpoint.cdshooks.svc.prefetch.CdsHooksDaoAuthorizationSvc"),
|
||||
stElement("ca.cdr.endpoint.hl7v2.in.converter.Hl7V2InboundConverter")
|
||||
);
|
||||
doReturn(stackTraceStream).when(mySpiedTested).getStackTraceStream();
|
||||
|
||||
// execute
|
||||
boolean matched = mySpiedTested.match("not-used");
|
||||
|
||||
assertFalse(matched);
|
||||
}
|
||||
|
||||
@Test
|
||||
void match() {
|
||||
mySpiedTested.setFilterDefinitions(List.of(
|
||||
"ca.cdr.clustermgr.svc.impl.DatabaseBackedHttpSessionStorageSvcImpl",
|
||||
"ca.uhn.fhir.jpa.cache.CdrResourceChangeListenerCache",
|
||||
"ca.cdr.clustermgr.svc.impl.ModuleStatusControllerSvcImpl",
|
||||
"ca.cdr.clustermgr.svc.impl.StatsHeartbeatSvcImpl"
|
||||
));
|
||||
|
||||
Stream<StackTraceElement> stackTraceStream = Stream.of(
|
||||
stElement("ca.uhn.fhir.jpa.cache.CdrResourceChangeListenerCache"),
|
||||
stElement("ca.cdr.api.camel.ICamelRouteEndpointSvc"),
|
||||
stElement("ca.cdr.api.transactionlog.ITransactionLogFetchingSvc"),
|
||||
stElement("ca.cdr.api.transactionlog.ITransactionLogFetchingSvc"),
|
||||
stElement("ca.cdr.cdaxv2.impl.CdaDocumentSvcImpl"),
|
||||
stElement("ca.cdr.endpoint.cdshooks.svc.prefetch.CdsHooksDaoAuthorizationSvc"),
|
||||
stElement("ca.cdr.endpoint.hl7v2.in.converter.Hl7V2InboundConverter"),
|
||||
stElement("ca.cdr.clustermgr.svc.impl.StatsHeartbeatSvcImpl") // <== match
|
||||
);
|
||||
doReturn(stackTraceStream).when(mySpiedTested).getStackTraceStream();
|
||||
|
||||
// execute
|
||||
boolean matched = mySpiedTested.match("not-used");
|
||||
|
||||
assertTrue(matched);
|
||||
}
|
||||
|
||||
|
||||
private StackTraceElement stElement(String theClassName) {
|
||||
return new StackTraceElement(theClassName, "", null, 0);
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue