Don't allow JPA server to save duplicate or empty tags, and fix #664 by
preventing multiple threads from loading structure definitions for validation at the same time
This commit is contained in:
parent
52a5fcce17
commit
7c6bb01a8b
|
@ -479,65 +479,77 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
|||
TagList tagList = ResourceMetadataKeyEnum.TAG_LIST.get(theResource);
|
||||
if (tagList != null) {
|
||||
for (Tag next : tagList) {
|
||||
TagDefinition tag = getTag(TagTypeEnum.TAG, next.getScheme(), next.getTerm(), next.getLabel());
|
||||
TagDefinition tag = getTagOrNull(TagTypeEnum.TAG, next.getScheme(), next.getTerm(), next.getLabel());
|
||||
if (tag != null) {
|
||||
allDefs.add(tag);
|
||||
theEntity.addTag(tag);
|
||||
theEntity.setHasTags(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
List<BaseCodingDt> securityLabels = ResourceMetadataKeyEnum.SECURITY_LABELS.get(theResource);
|
||||
if (securityLabels != null) {
|
||||
for (BaseCodingDt next : securityLabels) {
|
||||
TagDefinition tag = getTag(TagTypeEnum.SECURITY_LABEL, next.getSystemElement().getValue(), next.getCodeElement().getValue(), next.getDisplayElement().getValue());
|
||||
TagDefinition tag = getTagOrNull(TagTypeEnum.SECURITY_LABEL, next.getSystemElement().getValue(), next.getCodeElement().getValue(), next.getDisplayElement().getValue());
|
||||
if (tag != null) {
|
||||
allDefs.add(tag);
|
||||
theEntity.addTag(tag);
|
||||
theEntity.setHasTags(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
List<IdDt> profiles = ResourceMetadataKeyEnum.PROFILES.get(theResource);
|
||||
if (profiles != null) {
|
||||
for (IIdType next : profiles) {
|
||||
TagDefinition tag = getTag(TagTypeEnum.PROFILE, NS_JPA_PROFILE, next.getValue(), null);
|
||||
TagDefinition tag = getTagOrNull(TagTypeEnum.PROFILE, NS_JPA_PROFILE, next.getValue(), null);
|
||||
if (tag != null) {
|
||||
allDefs.add(tag);
|
||||
theEntity.addTag(tag);
|
||||
theEntity.setHasTags(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void extractTagsRi(IAnyResource theResource, ResourceTable theEntity, Set<TagDefinition> allDefs) {
|
||||
List<? extends IBaseCoding> tagList = theResource.getMeta().getTag();
|
||||
if (tagList != null) {
|
||||
for (IBaseCoding next : tagList) {
|
||||
TagDefinition tag = getTag(TagTypeEnum.TAG, next.getSystem(), next.getCode(), next.getDisplay());
|
||||
TagDefinition tag = getTagOrNull(TagTypeEnum.TAG, next.getSystem(), next.getCode(), next.getDisplay());
|
||||
if (tag != null) {
|
||||
allDefs.add(tag);
|
||||
theEntity.addTag(tag);
|
||||
theEntity.setHasTags(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
List<? extends IBaseCoding> securityLabels = theResource.getMeta().getSecurity();
|
||||
if (securityLabels != null) {
|
||||
for (IBaseCoding next : securityLabels) {
|
||||
TagDefinition tag = getTag(TagTypeEnum.SECURITY_LABEL, next.getSystem(), next.getCode(), next.getDisplay());
|
||||
TagDefinition tag = getTagOrNull(TagTypeEnum.SECURITY_LABEL, next.getSystem(), next.getCode(), next.getDisplay());
|
||||
if (tag != null) {
|
||||
allDefs.add(tag);
|
||||
theEntity.addTag(tag);
|
||||
theEntity.setHasTags(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
List<? extends IPrimitiveType<String>> profiles = theResource.getMeta().getProfile();
|
||||
if (profiles != null) {
|
||||
for (IPrimitiveType<String> next : profiles) {
|
||||
TagDefinition tag = getTag(TagTypeEnum.PROFILE, NS_JPA_PROFILE, next.getValue(), null);
|
||||
TagDefinition tag = getTagOrNull(TagTypeEnum.PROFILE, NS_JPA_PROFILE, next.getValue(), null);
|
||||
if (tag != null) {
|
||||
allDefs.add(tag);
|
||||
theEntity.addTag(tag);
|
||||
theEntity.setHasTags(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void findMatchingTagIds(String theResourceName, IIdType theResourceId, Set<Long> tagIds, Class<? extends BaseTag> entityClass) {
|
||||
{
|
||||
|
@ -662,28 +674,28 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
|||
return mySearchParamRegistry.getActiveSearchParams(theResourceDef.getName()).values();
|
||||
}
|
||||
|
||||
protected TagDefinition getTag(TagTypeEnum theTagType, String theScheme, String theTerm, String theLabel) {
|
||||
protected TagDefinition getTagOrNull(TagTypeEnum theTagType, String theScheme, String theTerm, String theLabel) {
|
||||
if (isBlank(theScheme) && isBlank(theTerm) && isBlank(theLabel)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
|
||||
CriteriaQuery<TagDefinition> cq = builder.createQuery(TagDefinition.class);
|
||||
Root<TagDefinition> from = cq.from(TagDefinition.class);
|
||||
|
||||
//@formatter:off
|
||||
if (isNotBlank(theScheme)) {
|
||||
cq.where(
|
||||
builder.and(
|
||||
builder.equal(from.get("myTagType"), theTagType),
|
||||
builder.equal(from.get("mySystem"), theScheme),
|
||||
builder.equal(from.get("myCode"), theTerm))
|
||||
);
|
||||
builder.equal(from.get("myCode"), theTerm)));
|
||||
} else {
|
||||
cq.where(
|
||||
builder.and(
|
||||
builder.equal(from.get("myTagType"), theTagType),
|
||||
builder.isNull(from.get("mySystem")),
|
||||
builder.equal(from.get("myCode"), theTerm))
|
||||
);
|
||||
builder.equal(from.get("myCode"), theTerm)));
|
||||
}
|
||||
//@formatter:on
|
||||
|
||||
TypedQuery<TagDefinition> q = myEntityManager.createQuery(cq);
|
||||
try {
|
||||
|
@ -938,12 +950,14 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
|||
if (def.isStandardType() == false) {
|
||||
String profile = def.getResourceProfile("");
|
||||
if (isNotBlank(profile)) {
|
||||
TagDefinition tag = getTag(TagTypeEnum.PROFILE, NS_JPA_PROFILE, profile, null);
|
||||
TagDefinition tag = getTagOrNull(TagTypeEnum.PROFILE, NS_JPA_PROFILE, profile, null);
|
||||
if (tag != null) {
|
||||
allDefs.add(tag);
|
||||
theEntity.addTag(tag);
|
||||
theEntity.setHasTags(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ArrayList<ResourceTag> existingTags = new ArrayList<ResourceTag>();
|
||||
if (theEntity.isHasTags()) {
|
||||
|
|
|
@ -114,11 +114,13 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
entity.setHasTags(true);
|
||||
|
||||
TagDefinition def = getTag(TagTypeEnum.TAG, theScheme, theTerm, theLabel);
|
||||
TagDefinition def = getTagOrNull(TagTypeEnum.TAG, theScheme, theTerm, theLabel);
|
||||
if (def != null) {
|
||||
BaseTag newEntity = entity.addTag(def);
|
||||
|
||||
myEntityManager.persist(newEntity);
|
||||
myEntityManager.merge(entity);
|
||||
}
|
||||
|
||||
ourLog.info("Processed addTag {}/{} on {} in {}ms", new Object[] { theScheme, theTerm, theId, w.getMillisAndRestart() });
|
||||
}
|
||||
|
@ -439,11 +441,13 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
if (!hasTag) {
|
||||
entity.setHasTags(true);
|
||||
|
||||
TagDefinition def = getTag(nextDef.getTagType(), nextDef.getSystem(), nextDef.getCode(), nextDef.getDisplay());
|
||||
TagDefinition def = getTagOrNull(nextDef.getTagType(), nextDef.getSystem(), nextDef.getCode(), nextDef.getDisplay());
|
||||
if (def != null) {
|
||||
BaseTag newEntity = entity.addTag(def);
|
||||
myEntityManager.persist(newEntity);
|
||||
}
|
||||
}
|
||||
}
|
||||
//@formatter:on
|
||||
|
||||
myEntityManager.merge(entity);
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
package ca.uhn.fhir.jpa.dao.data;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2017 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.ResourceTag;
|
||||
|
||||
public interface IResourceTagDao extends JpaRepository<ResourceTag, Long> {
|
||||
// nothing
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
package ca.uhn.fhir.jpa.dao.data;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2017 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TagDefinition;
|
||||
|
||||
public interface ITagDefinitionDao extends JpaRepository<TagDefinition, Long> {
|
||||
// nothing
|
||||
}
|
|
@ -77,8 +77,7 @@ import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet;
|
|||
import ca.uhn.fhir.jpa.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.dao.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.*;
|
||||
import ca.uhn.fhir.jpa.dao.dstu2.FhirResourceDaoDstu2SearchNoFtTest;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamString;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceTable;
|
||||
|
@ -105,6 +104,10 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest {
|
|||
private static JpaValidationSupportChainDstu3 ourJpaValidationSupportChainDstu3;
|
||||
private static IFhirResourceDaoValueSet<ValueSet, Coding, CodeableConcept> ourValueSetDao;
|
||||
|
||||
@Autowired
|
||||
protected ITagDefinitionDao myTagDefinitionDao;
|
||||
@Autowired
|
||||
protected IResourceTagDao myResourceTagDao;
|
||||
@Autowired
|
||||
protected ISearchDao mySearchEntityDao;
|
||||
@Autowired
|
||||
|
|
|
@ -8,6 +8,7 @@ import static org.hamcrest.Matchers.empty;
|
|||
import static org.hamcrest.Matchers.endsWith;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.hasItem;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.matchesPattern;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
@ -154,6 +155,56 @@ public class FhirResourceDaoDstu3Test extends BaseJpaDstu3Test {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateDuplicateTagsDoesNotCauseDuplicates() {
|
||||
Patient p = new Patient();
|
||||
p.setActive(true);
|
||||
|
||||
p.getMeta().addTag().setSystem("FOO").setCode("BAR");
|
||||
p.getMeta().addTag().setSystem("FOO").setCode("BAR");
|
||||
p.getMeta().addTag().setSystem("FOO").setCode("BAR");
|
||||
p.getMeta().addTag().setSystem("FOO").setCode("BAR");
|
||||
p.getMeta().addTag().setSystem("FOO").setCode("BAR");
|
||||
p.getMeta().addTag().setSystem("FOO").setCode("BAR");
|
||||
p.getMeta().addTag().setSystem("FOO").setCode("BAR");
|
||||
|
||||
myPatientDao.create(p);
|
||||
|
||||
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
|
||||
assertThat(myResourceTagDao.findAll(), hasSize(1));
|
||||
assertThat(myTagDefinitionDao.findAll(), hasSize(1));
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateEmptyTagsIsIgnored() {
|
||||
Patient p = new Patient();
|
||||
p.setActive(true);
|
||||
|
||||
// Add an empty tag
|
||||
p.getMeta().addTag();
|
||||
|
||||
// Add another empty tag
|
||||
p.getMeta().addTag().setSystem("");
|
||||
p.getMeta().addTag().setCode("");
|
||||
p.getMeta().addTag().setDisplay("");
|
||||
|
||||
myPatientDao.create(p);
|
||||
|
||||
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
|
||||
assertThat(myResourceTagDao.findAll(), empty());
|
||||
assertThat(myTagDefinitionDao.findAll(), empty());
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* This gets called from assertGone too! Careful about exceptions...
|
||||
*/
|
||||
|
|
|
@ -58,9 +58,10 @@ public class DefaultProfileValidationSupport implements IValidationSupport {
|
|||
}
|
||||
|
||||
private DomainResource fetchCodeSystemOrValueSet(FhirContext theContext, String theSystem, boolean codeSystem) {
|
||||
synchronized (this) {
|
||||
Map<String, CodeSystem> codeSystems = myCodeSystems;
|
||||
Map<String, ValueSet> valueSets = myValueSets;
|
||||
if (codeSystems == null) {
|
||||
if (codeSystems == null || valueSets == null) {
|
||||
codeSystems = new HashMap<String, CodeSystem>();
|
||||
valueSets = new HashMap<String, ValueSet>();
|
||||
|
||||
|
@ -78,6 +79,7 @@ public class DefaultProfileValidationSupport implements IValidationSupport {
|
|||
return valueSets.get(theSystem);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
|
|
|
@ -42,7 +42,6 @@ public class DefaultProfileValidationSupport implements IValidationSupport {
|
|||
myCodeSystems = null;
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public <T extends IBaseResource> T fetchResource(FhirContext theContext, Class<T> theClass, String theUri) {
|
||||
|
@ -93,18 +92,20 @@ public class DefaultProfileValidationSupport implements IValidationSupport {
|
|||
|
||||
@Override
|
||||
public ValueSet fetchCodeSystem(FhirContext theContext, String theSystem) {
|
||||
Map<String, ValueSet> codeSystems = myCodeSystems;
|
||||
if (codeSystems == null) {
|
||||
codeSystems = new HashMap<String, ValueSet>();
|
||||
synchronized (this) {
|
||||
Map<String, ValueSet> valueSets = myCodeSystems;
|
||||
if (valueSets == null) {
|
||||
valueSets = new HashMap<String, ValueSet>();
|
||||
|
||||
loadCodeSystems(theContext, codeSystems, "/org/hl7/fhir/instance/model/valueset/valuesets.xml");
|
||||
loadCodeSystems(theContext, codeSystems, "/org/hl7/fhir/instance/model/valueset/v2-tables.xml");
|
||||
loadCodeSystems(theContext, codeSystems, "/org/hl7/fhir/instance/model/valueset/v3-codesystems.xml");
|
||||
loadCodeSystems(theContext, valueSets, "/org/hl7/fhir/instance/model/valueset/valuesets.xml");
|
||||
loadCodeSystems(theContext, valueSets, "/org/hl7/fhir/instance/model/valueset/v2-tables.xml");
|
||||
loadCodeSystems(theContext, valueSets, "/org/hl7/fhir/instance/model/valueset/v3-codesystems.xml");
|
||||
|
||||
myCodeSystems = codeSystems;
|
||||
myCodeSystems = valueSets;
|
||||
}
|
||||
|
||||
return codeSystems.get(theSystem);
|
||||
return valueSets.get(theSystem);
|
||||
}
|
||||
}
|
||||
|
||||
private void loadCodeSystems(FhirContext theContext, Map<String, ValueSet> codeSystems, String file) {
|
||||
|
|
|
@ -149,6 +149,19 @@
|
|||
<action type="fix">
|
||||
Validator incorrectly rejected references where only an identifier was populated
|
||||
</action>
|
||||
<action type="add">
|
||||
Add a check in JPA server that prevents completely blank tags, profiles, and security labels
|
||||
from being saved to the database. These were filtered out anyhow when the
|
||||
result was returned back to the client but they were persisted which
|
||||
just wasted space.
|
||||
</action>
|
||||
<action type="fix" issue="664">
|
||||
Loading the build-in profile structures (StructureDefinition, ValueSet, etc) is now done in
|
||||
a synchronized block in order to prevent multiple loads happening if the server processes
|
||||
multiple validations in parallel threads right after startup. Previously a heavy load could
|
||||
cause the server to run out of memory and lock up. Thanks to Karl M Davis
|
||||
for analysis and help fixing this!
|
||||
</action>
|
||||
</release>
|
||||
<release version="2.4" date="2017-04-19">
|
||||
<action type="add">
|
||||
|
|
Loading…
Reference in New Issue