Remove some useless logging

This commit is contained in:
James Agnew 2017-09-18 18:04:08 -04:00
parent 85dd76e35e
commit 1402c5e4e2
5 changed files with 6 additions and 20 deletions

View File

@ -1367,7 +1367,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
theEntity.setPublished(theUpdateTime);
}
StopWatch sw = new StopWatch(); // "**
Collection<ResourceIndexedSearchParamString> existingStringParams = new ArrayList<>();
if (theEntity.isParamsStringPopulated()) {
existingStringParams.addAll(theEntity.getParamsString());
@ -1400,13 +1399,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
if (theEntity.isHasLinks()) {
existingResourceLinks.addAll(theEntity.getResourceLinks());
}
ourLog.info("** Get existing in {}ms", sw.getMillis());
sw.getMillisAndRestart();
Collection<ResourceIndexedCompositeStringUnique> existingCompositeStringUniques = new ArrayList<>();
if (theEntity.isParamsCompositeStringUniquePresent()) {
existingCompositeStringUniques.addAll(theEntity.getParamsCompositeStringUnique());
}
ourLog.info("** Get existing composite in {}ms", sw.getMillis());
Set<ResourceIndexedSearchParamString> stringParams = null;
Set<ResourceIndexedSearchParamToken> tokenParams = null;
@ -1547,13 +1544,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
/*
* Handle composites
*/
sw.getMillisAndRestart();
compositeStringUniques = extractCompositeStringUniques(theEntity, stringParams, tokenParams, numberParams, quantityParams, dateParams, uriParams, links);
ourLog.info("** Extract unique strings in {}ms", sw.getMillis());
sw.getMillisAndRestart();
changed = populateResourceIntoEntity(theResource, theEntity, true);
ourLog.info("** Populate resource into entity in {}ms", sw.getMillis());
theEntity.setUpdated(theUpdateTime);
if (theResource instanceof IResource) {
@ -1584,9 +1577,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
} else {
sw.getMillisAndRestart();
changed = populateResourceIntoEntity(theResource, theEntity, false);
ourLog.info("** Populate into entity in {}ms", sw.getMillis());
theEntity.setUpdated(theUpdateTime);
// theEntity.setLanguage(theResource.getLanguage().getValue());

View File

@ -1085,7 +1085,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
if (isNotBlank(theMatchUrl)) {
StopWatch sw = new StopWatch();
Set<Long> match = processMatchUrl(theMatchUrl, myResourceType);
ourLog.info("** Match URL in {}ms", sw.getMillis());
if (match.size() > 1) {
String msg = getContext().getLocalizer().getMessage(BaseHapiFhirDao.class, "transactionOperationWithMultipleMatchFailure", "UPDATE", theMatchUrl, match.size());
throw new PreconditionFailedException(msg);
@ -1134,7 +1133,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
// Perform update
StopWatch sw = new StopWatch();
ResourceTable savedEntity = updateEntity(theResource, entity, null, thePerformIndexing, thePerformIndexing, new Date(), theForceUpdateVersion, thePerformIndexing);
ourLog.info("** Update entity in {}ms", sw.getMillis());
/*
* If we aren't indexing (meaning we're probably executing a sub-operation within a transaction),

View File

@ -91,13 +91,11 @@ public abstract class BaseSearchParamRegistry implements ISearchParamRegistry {
Map<Set<String>, List<JpaRuntimeSearchParam>> paramNamesToParams = myActiveParamNamesToUniqueSearchParams.get(theResourceName);
if (paramNamesToParams == null) {
ourLog.info("** No unique search params for resource name {}", theResourceName);
return Collections.emptyList();
}
List<JpaRuntimeSearchParam> retVal = paramNamesToParams.get(theParamNames);
if (retVal == null) {
ourLog.info("** No unique search params [{}] for {} - Have {}", theParamNames, theResourceName, paramNamesToParams.keySet());
retVal = Collections.emptyList();
}
return Collections.unmodifiableList(retVal);

View File

@ -483,7 +483,6 @@ public class FhirSystemDaoDstu3 extends BaseHapiFhirSystemDao<Bundle, Meta> {
/*
* Perform ID substitutions and then index each resource we have saved
*/
StopWatch sw = new StopWatch();//"**
FhirTerser terser = getContext().newTerser();
for (DaoMethodOutcome nextOutcome : idToPersistedOutcome.values()) {
@ -516,13 +515,14 @@ public class FhirSystemDaoDstu3 extends BaseHapiFhirSystemDao<Bundle, Meta> {
updateEntity(nextResource, nextOutcome.getEntity(), deletedTimestampOrNull, shouldUpdate, false, updateTime, false, true);
}
}
ourLog.info("** Update entity in {}ms", sw.getMillisAndRestart());
SessionImpl session = (SessionImpl) myEntityManager.unwrap(Session.class);
ourLog.info("** Session has {} inserts and {} updates", session.getActionQueue().numberOfInsertions(), session.getActionQueue().numberOfUpdates());
int insertionCount = session.getActionQueue().numberOfInsertions();
int updateCount = session.getActionQueue().numberOfUpdates();
StopWatch sw = new StopWatch();
myEntityManager.flush();
ourLog.info("** Flush in {}ms", sw.getMillis());
ourLog.info("Session flush took {}ms for {} inserts and {} updates", sw.getMillis(), insertionCount, updateCount);
/*
* Double check we didn't allow any duplicates we shouldn't have

View File

@ -27,7 +27,6 @@ public class ConnectionWrapper implements Connection {
@Override
public void close() throws SQLException {
// ourLog.info("** Closing connection");
myWrap.close();
}
@ -286,4 +285,4 @@ public class ConnectionWrapper implements Connection {
return myWrap.unwrap(theIface);
}
}
}