Sync with master
This commit is contained in:
parent
45a16fe066
commit
fe25d93fe6
|
@ -0,0 +1,28 @@
|
|||
package ca.uhn.fhir.jpa.dao.data;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptDesignation;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2018 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
public interface ITermConceptDesignationDao extends JpaRepository<TermConceptDesignation, Long> {
|
||||
// nothing
|
||||
}
|
|
@ -53,7 +53,7 @@ public abstract class BaseResourceIndexedSearchParam implements Serializable {
|
|||
private Long myResourcePid;
|
||||
|
||||
@Field()
|
||||
@Column(name = "RES_TYPE", nullable = false)
|
||||
@Column(name = "RES_TYPE", nullable = false, length = 30)
|
||||
private String myResourceType;
|
||||
|
||||
@Field()
|
||||
|
|
|
@ -81,6 +81,9 @@ public class TermConcept implements Serializable {
|
|||
@FieldBridge(impl = TermConceptPropertyFieldBridge.class)
|
||||
private Collection<TermConceptProperty> myProperties;
|
||||
|
||||
@OneToMany(mappedBy = "myConcept", orphanRemoval = true)
|
||||
private Collection<TermConceptDesignation> myDesignations;
|
||||
|
||||
@Id()
|
||||
@SequenceGenerator(name = "SEQ_CONCEPT_PID", sequenceName = "SEQ_CONCEPT_PID")
|
||||
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_CONCEPT_PID")
|
||||
|
@ -123,6 +126,13 @@ public class TermConcept implements Serializable {
|
|||
}
|
||||
}
|
||||
|
||||
public TermConceptDesignation addDesignation() {
|
||||
TermConceptDesignation designation = new TermConceptDesignation();
|
||||
designation.setConcept(this);
|
||||
getDesignations().add(designation);
|
||||
return designation;
|
||||
}
|
||||
|
||||
private TermConceptProperty addProperty(@Nonnull TermConceptPropertyTypeEnum thePropertyType, @Nonnull String thePropertyName, @Nonnull String thePropertyValue) {
|
||||
Validate.notBlank(thePropertyName);
|
||||
|
||||
|
@ -189,6 +199,29 @@ public class TermConcept implements Serializable {
|
|||
}
|
||||
}
|
||||
|
||||
public List<Coding> getCodingProperties(String thePropertyName) {
|
||||
List<Coding> retVal = new ArrayList<>();
|
||||
for (TermConceptProperty next : getProperties()) {
|
||||
if (thePropertyName.equals(next.getKey())) {
|
||||
if (next.getType() == TermConceptPropertyTypeEnum.CODING) {
|
||||
Coding coding = new Coding();
|
||||
coding.setSystem(next.getCodeSystem());
|
||||
coding.setCode(next.getValue());
|
||||
coding.setDisplay(next.getDisplay());
|
||||
retVal.add(coding);
|
||||
}
|
||||
}
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
public Collection<TermConceptDesignation> getDesignations() {
|
||||
if (myDesignations == null) {
|
||||
myDesignations = new ArrayList<>();
|
||||
}
|
||||
return myDesignations;
|
||||
}
|
||||
|
||||
public String getDisplay() {
|
||||
return myDisplay;
|
||||
}
|
||||
|
@ -231,6 +264,14 @@ public class TermConcept implements Serializable {
|
|||
return myProperties;
|
||||
}
|
||||
|
||||
public Integer getSequence() {
|
||||
return mySequence;
|
||||
}
|
||||
|
||||
public void setSequence(Integer theSequence) {
|
||||
mySequence = theSequence;
|
||||
}
|
||||
|
||||
public List<String> getStringProperties(String thePropertyName) {
|
||||
List<String> retVal = new ArrayList<>();
|
||||
for (TermConceptProperty next : getProperties()) {
|
||||
|
@ -243,30 +284,6 @@ public class TermConcept implements Serializable {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
public List<Coding> getCodingProperties(String thePropertyName) {
|
||||
List<Coding> retVal = new ArrayList<>();
|
||||
for (TermConceptProperty next : getProperties()) {
|
||||
if (thePropertyName.equals(next.getKey())) {
|
||||
if (next.getType() == TermConceptPropertyTypeEnum.CODING) {
|
||||
Coding coding = new Coding();
|
||||
coding.setSystem(next.getCodeSystem());
|
||||
coding.setCode(next.getValue());
|
||||
coding.setDisplay(next.getDisplay());
|
||||
retVal.add(coding);
|
||||
}
|
||||
}
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
public Integer getSequence() {
|
||||
return mySequence;
|
||||
}
|
||||
|
||||
public void setSequence(Integer theSequence) {
|
||||
mySequence = theSequence;
|
||||
}
|
||||
|
||||
public String getStringProperty(String thePropertyName) {
|
||||
List<String> properties = getStringProperties(thePropertyName);
|
||||
if (properties.size() > 0) {
|
||||
|
|
|
@ -0,0 +1,103 @@
|
|||
package ca.uhn.fhir.jpa.entity;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2018 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import javax.persistence.*;
|
||||
import java.io.Serializable;
|
||||
|
||||
@Entity
|
||||
@Table(name = "TRM_CONCEPT_DESIG", uniqueConstraints = {
|
||||
}, indexes = {
|
||||
})
|
||||
public class TermConceptDesignation implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
@ManyToOne
|
||||
@JoinColumn(name = "CONCEPT_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTPROP_CONCEPT"))
|
||||
private TermConcept myConcept;
|
||||
@Id()
|
||||
@SequenceGenerator(name = "SEQ_CONCEPT_DESIG_PID", sequenceName = "SEQ_CONCEPT_DESIG_PID")
|
||||
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_CONCEPT_DESIG_PID")
|
||||
@Column(name = "PID")
|
||||
private Long myId;
|
||||
@Column(name = "LANG", length = 50, nullable = true)
|
||||
private String myLanguage;
|
||||
@Column(name = "USE_SYSTEM", length = 50, nullable = true)
|
||||
private String myUseSystem;
|
||||
@Column(name = "USE_CODE", length = 50, nullable = true)
|
||||
private String myUseCode;
|
||||
@Column(name = "USE_DISPLAY", length = 50, nullable = true)
|
||||
private String myUseDisplay;
|
||||
@Column(name = "VAL", length = 50, nullable = false)
|
||||
private String myValue;
|
||||
|
||||
public String getLanguage() {
|
||||
return myLanguage;
|
||||
}
|
||||
|
||||
public TermConceptDesignation setLanguage(String theLanguage) {
|
||||
myLanguage = theLanguage;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getUseCode() {
|
||||
return myUseCode;
|
||||
}
|
||||
|
||||
public TermConceptDesignation setUseCode(String theUseCode) {
|
||||
myUseCode = theUseCode;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getUseDisplay() {
|
||||
return myUseDisplay;
|
||||
}
|
||||
|
||||
public TermConceptDesignation setUseDisplay(String theUseDisplay) {
|
||||
myUseDisplay = theUseDisplay;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getUseSystem() {
|
||||
return myUseSystem;
|
||||
}
|
||||
|
||||
public TermConceptDesignation setUseSystem(String theUseSystem) {
|
||||
myUseSystem = theUseSystem;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getValue() {
|
||||
return myValue;
|
||||
}
|
||||
|
||||
public TermConceptDesignation setValue(String theValue) {
|
||||
myValue = theValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
public TermConceptDesignation setConcept(TermConcept theConcept) {
|
||||
myConcept = theConcept;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -78,6 +78,8 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc
|
|||
@Autowired
|
||||
protected ITermConceptPropertyDao myConceptPropertyDao;
|
||||
@Autowired
|
||||
protected ITermConceptDesignationDao myConceptDesignationDao;
|
||||
@Autowired
|
||||
protected FhirContext myContext;
|
||||
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||
protected EntityManager myEntityManager;
|
||||
|
@ -94,7 +96,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc
|
|||
private boolean myProcessDeferred = true;
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTransactionMgr;
|
||||
@Autowired
|
||||
@Autowired(required = false)
|
||||
private IFhirResourceDaoCodeSystem<?, ?, ?> myCodeSystemResourceDao;
|
||||
|
||||
private void addCodeIfNotAlreadyAdded(String theCodeSystem, ValueSet.ValueSetExpansionComponent theExpansionComponent, Set<String> theAddedCodes, TermConcept theConcept) {
|
||||
|
@ -106,6 +108,19 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc
|
|||
}
|
||||
}
|
||||
|
||||
private void addConceptsToList(ValueSet.ValueSetExpansionComponent theExpansionComponent, Set<String> theAddedCodes, String theSystem, List<CodeSystem.ConceptDefinitionComponent> theConcept) {
|
||||
for (CodeSystem.ConceptDefinitionComponent next : theConcept) {
|
||||
if (!theAddedCodes.contains(next.getCode())) {
|
||||
theAddedCodes.add(next.getCode());
|
||||
ValueSet.ValueSetExpansionContainsComponent contains = theExpansionComponent.addContains();
|
||||
contains.setCode(next.getCode());
|
||||
contains.setSystem(theSystem);
|
||||
contains.setDisplay(next.getDisplay());
|
||||
}
|
||||
addConceptsToList(theExpansionComponent, theAddedCodes, theSystem, next.getConcept());
|
||||
}
|
||||
}
|
||||
|
||||
private void addDisplayFilterExact(QueryBuilder qb, BooleanJunction<?> bool, ValueSet.ConceptSetFilterComponent nextFilter) {
|
||||
bool.must(qb.phrase().onField("myDisplay").sentence(nextFilter.getValue()).createQuery());
|
||||
}
|
||||
|
@ -142,11 +157,35 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc
|
|||
@Override
|
||||
public void deleteCodeSystem(TermCodeSystem theCodeSystem) {
|
||||
ourLog.info(" * Deleting code system {}", theCodeSystem.getPid());
|
||||
|
||||
myEntityManager.flush();
|
||||
TermCodeSystem cs = myCodeSystemDao.findOne(theCodeSystem.getPid());
|
||||
cs.setCurrentVersion(null);
|
||||
myCodeSystemDao.save(cs);
|
||||
myCodeSystemDao.flush();
|
||||
|
||||
int i = 0;
|
||||
for (TermCodeSystemVersion next : myCodeSystemVersionDao.findByCodeSystemResource(theCodeSystem.getPid())) {
|
||||
myConceptParentChildLinkDao.deleteByCodeSystemVersion(next.getPid());
|
||||
myConceptDao.deleteByCodeSystemVersion(next.getPid());
|
||||
for (TermConcept nextConcept : myConceptDao.findByCodeSystemVersion(next.getPid())) {
|
||||
myConceptPropertyDao.delete(nextConcept.getProperties());
|
||||
myConceptDesignationDao.delete(nextConcept.getDesignations());
|
||||
myConceptDao.delete(nextConcept);
|
||||
}
|
||||
if (next.getCodeSystem().getCurrentVersion() == next) {
|
||||
next.getCodeSystem().setCurrentVersion(null);
|
||||
myCodeSystemDao.save(next.getCodeSystem());
|
||||
}
|
||||
myCodeSystemVersionDao.delete(next);
|
||||
|
||||
if (i % 1000 == 0) {
|
||||
myEntityManager.flush();
|
||||
}
|
||||
}
|
||||
myCodeSystemDao.delete(theCodeSystem.getPid());
|
||||
myCodeSystemVersionDao.deleteForCodeSystem(theCodeSystem);
|
||||
myCodeSystemDao.delete(theCodeSystem);
|
||||
|
||||
myEntityManager.flush();
|
||||
}
|
||||
|
||||
private int ensureParentsSaved(Collection<TermConceptParentChildLink> theParents) {
|
||||
|
@ -171,112 +210,140 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc
|
|||
@Override
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
public ValueSet expandValueSet(ValueSet theValueSetToExpand) {
|
||||
|
||||
ValueSet.ConceptSetComponent include = theValueSetToExpand.getCompose().getIncludeFirstRep();
|
||||
String system = include.getSystem();
|
||||
ourLog.info("Starting expansion around code system: {}", system);
|
||||
|
||||
TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(system);
|
||||
if (cs == null) {
|
||||
throw new InvalidRequestException("Unknown code system: " + system);
|
||||
}
|
||||
|
||||
TermCodeSystemVersion csv = cs.getCurrentVersion();
|
||||
|
||||
ValueSet.ValueSetExpansionComponent expansionComponent = new ValueSet.ValueSetExpansionComponent();
|
||||
Set<String> addedCodes = new HashSet<>();
|
||||
boolean haveIncludeCriteria = false;
|
||||
|
||||
/*
|
||||
* Include Concepts
|
||||
*/
|
||||
for (ValueSet.ConceptReferenceComponent next : include.getConcept()) {
|
||||
String nextCode = next.getCode();
|
||||
if (isNotBlank(nextCode) && !addedCodes.contains(nextCode)) {
|
||||
haveIncludeCriteria = true;
|
||||
TermConcept code = findCode(system, nextCode);
|
||||
if (code != null) {
|
||||
addedCodes.add(nextCode);
|
||||
ValueSet.ValueSetExpansionContainsComponent contains = expansionComponent.addContains();
|
||||
contains.setCode(nextCode);
|
||||
contains.setSystem(system);
|
||||
contains.setDisplay(code.getDisplay());
|
||||
}
|
||||
}
|
||||
}
|
||||
for (ValueSet.ConceptSetComponent include : theValueSetToExpand.getCompose().getInclude()) {
|
||||
String system = include.getSystem();
|
||||
if (isNotBlank(system)) {
|
||||
ourLog.info("Starting expansion around code system: {}", system);
|
||||
|
||||
/*
|
||||
* Filters
|
||||
*/
|
||||
TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(system);
|
||||
if (cs != null) {
|
||||
TermCodeSystemVersion csv = cs.getCurrentVersion();
|
||||
|
||||
if (include.getFilter().size() > 0) {
|
||||
haveIncludeCriteria = true;
|
||||
|
||||
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
|
||||
QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(TermConcept.class).get();
|
||||
BooleanJunction<?> bool = qb.bool();
|
||||
|
||||
bool.must(qb.keyword().onField("myCodeSystemVersionPid").matching(csv.getPid()).createQuery());
|
||||
|
||||
for (ValueSet.ConceptSetFilterComponent nextFilter : include.getFilter()) {
|
||||
if (isBlank(nextFilter.getValue()) && nextFilter.getOp() == null && isBlank(nextFilter.getProperty())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (isBlank(nextFilter.getValue()) || nextFilter.getOp() == null || isBlank(nextFilter.getProperty())) {
|
||||
throw new InvalidRequestException("Invalid filter, must have fields populated: property op value");
|
||||
}
|
||||
|
||||
|
||||
if (nextFilter.getProperty().equals("display:exact") && nextFilter.getOp() == ValueSet.FilterOperator.EQUAL) {
|
||||
addDisplayFilterExact(qb, bool, nextFilter);
|
||||
} else if ("display".equals(nextFilter.getProperty()) && nextFilter.getOp() == ValueSet.FilterOperator.EQUAL) {
|
||||
if (nextFilter.getValue().trim().contains(" ")) {
|
||||
addDisplayFilterExact(qb, bool, nextFilter);
|
||||
} else {
|
||||
addDisplayFilterInexact(qb, bool, nextFilter);
|
||||
}
|
||||
} else if ((nextFilter.getProperty().equals("concept") || nextFilter.getProperty().equals("code")) && nextFilter.getOp() == ValueSet.FilterOperator.ISA) {
|
||||
|
||||
TermConcept code = findCode(system, nextFilter.getValue());
|
||||
if (code == null) {
|
||||
throw new InvalidRequestException("Invalid filter criteria - code does not exist: {" + system + "}" + nextFilter.getValue());
|
||||
/*
|
||||
* Include Concepts
|
||||
*/
|
||||
for (ValueSet.ConceptReferenceComponent next : include.getConcept()) {
|
||||
String nextCode = next.getCode();
|
||||
if (isNotBlank(nextCode) && !addedCodes.contains(nextCode)) {
|
||||
haveIncludeCriteria = true;
|
||||
TermConcept code = findCode(system, nextCode);
|
||||
if (code != null) {
|
||||
addedCodes.add(nextCode);
|
||||
ValueSet.ValueSetExpansionContainsComponent contains = expansionComponent.addContains();
|
||||
contains.setCode(nextCode);
|
||||
contains.setSystem(system);
|
||||
contains.setDisplay(code.getDisplay());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ourLog.info(" * Filtering on codes with a parent of {}/{}/{}", code.getId(), code.getCode(), code.getDisplay());
|
||||
bool.must(qb.keyword().onField("myParentPids").matching("" + code.getId()).createQuery());
|
||||
/*
|
||||
* Filters
|
||||
*/
|
||||
|
||||
} else {
|
||||
if (include.getFilter().size() > 0) {
|
||||
haveIncludeCriteria = true;
|
||||
|
||||
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
|
||||
QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(TermConcept.class).get();
|
||||
BooleanJunction<?> bool = qb.bool();
|
||||
|
||||
bool.must(qb.keyword().onField("myCodeSystemVersionPid").matching(csv.getPid()).createQuery());
|
||||
|
||||
for (ValueSet.ConceptSetFilterComponent nextFilter : include.getFilter()) {
|
||||
if (isBlank(nextFilter.getValue()) && nextFilter.getOp() == null && isBlank(nextFilter.getProperty())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (isBlank(nextFilter.getValue()) || nextFilter.getOp() == null || isBlank(nextFilter.getProperty())) {
|
||||
throw new InvalidRequestException("Invalid filter, must have fields populated: property op value");
|
||||
}
|
||||
|
||||
|
||||
if (nextFilter.getProperty().equals("display:exact") && nextFilter.getOp() == ValueSet.FilterOperator.EQUAL) {
|
||||
addDisplayFilterExact(qb, bool, nextFilter);
|
||||
} else if ("display".equals(nextFilter.getProperty()) && nextFilter.getOp() == ValueSet.FilterOperator.EQUAL) {
|
||||
if (nextFilter.getValue().trim().contains(" ")) {
|
||||
addDisplayFilterExact(qb, bool, nextFilter);
|
||||
} else {
|
||||
addDisplayFilterInexact(qb, bool, nextFilter);
|
||||
}
|
||||
} else if ((nextFilter.getProperty().equals("concept") || nextFilter.getProperty().equals("code")) && nextFilter.getOp() == ValueSet.FilterOperator.ISA) {
|
||||
|
||||
TermConcept code = findCode(system, nextFilter.getValue());
|
||||
if (code == null) {
|
||||
throw new InvalidRequestException("Invalid filter criteria - code does not exist: {" + system + "}" + nextFilter.getValue());
|
||||
}
|
||||
|
||||
ourLog.info(" * Filtering on codes with a parent of {}/{}/{}", code.getId(), code.getCode(), code.getDisplay());
|
||||
bool.must(qb.keyword().onField("myParentPids").matching("" + code.getId()).createQuery());
|
||||
|
||||
} else {
|
||||
|
||||
// bool.must(qb.keyword().onField("myProperties").matching(nextFilter.getStringProperty()+"="+nextFilter.getValue()).createQuery());
|
||||
bool.must(qb.phrase().onField("myProperties").sentence(nextFilter.getProperty() + "=" + nextFilter.getValue()).createQuery());
|
||||
bool.must(qb.phrase().onField("myProperties").sentence(nextFilter.getProperty() + "=" + nextFilter.getValue()).createQuery());
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
Query luceneQuery = bool.createQuery();
|
||||
FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, TermConcept.class);
|
||||
jpaQuery.setMaxResults(1000);
|
||||
|
||||
StopWatch sw = new StopWatch();
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
List<TermConcept> result = jpaQuery.getResultList();
|
||||
|
||||
ourLog.info("Expansion completed in {}ms", sw.getMillis());
|
||||
|
||||
for (TermConcept nextConcept : result) {
|
||||
addCodeIfNotAlreadyAdded(system, expansionComponent, addedCodes, nextConcept);
|
||||
}
|
||||
|
||||
expansionComponent.setTotal(jpaQuery.getResultSize());
|
||||
}
|
||||
|
||||
if (!haveIncludeCriteria) {
|
||||
List<TermConcept> allCodes = findCodes(system);
|
||||
for (TermConcept nextConcept : allCodes) {
|
||||
addCodeIfNotAlreadyAdded(system, expansionComponent, addedCodes, nextConcept);
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
// No codesystem matching the URL found in the database
|
||||
|
||||
CodeSystem codeSystemFromContext = getCodeSystemFromContext(system);
|
||||
if (codeSystemFromContext == null) {
|
||||
throw new InvalidRequestException("Unknown code system: " + system);
|
||||
}
|
||||
|
||||
if (include.getConcept().isEmpty() == false) {
|
||||
for (ValueSet.ConceptReferenceComponent next : include.getConcept()) {
|
||||
String nextCode = next.getCode();
|
||||
if (isNotBlank(nextCode) && !addedCodes.contains(nextCode)) {
|
||||
CodeSystem.ConceptDefinitionComponent code = findCode(codeSystemFromContext.getConcept(), nextCode);
|
||||
if (code != null) {
|
||||
addedCodes.add(nextCode);
|
||||
ValueSet.ValueSetExpansionContainsComponent contains = expansionComponent.addContains();
|
||||
contains.setCode(nextCode);
|
||||
contains.setSystem(system);
|
||||
contains.setDisplay(code.getDisplay());
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
List<CodeSystem.ConceptDefinitionComponent> concept = codeSystemFromContext.getConcept();
|
||||
addConceptsToList(expansionComponent, addedCodes, system, concept);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
Query luceneQuery = bool.createQuery();
|
||||
FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, TermConcept.class);
|
||||
jpaQuery.setMaxResults(1000);
|
||||
|
||||
StopWatch sw = new StopWatch();
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
List<TermConcept> result = jpaQuery.getResultList();
|
||||
|
||||
ourLog.info("Expansion completed in {}ms", sw.getMillis());
|
||||
|
||||
for (TermConcept nextConcept : result) {
|
||||
addCodeIfNotAlreadyAdded(system, expansionComponent, addedCodes, nextConcept);
|
||||
}
|
||||
|
||||
expansionComponent.setTotal(jpaQuery.getResultSize());
|
||||
}
|
||||
|
||||
if (!haveIncludeCriteria) {
|
||||
List<TermConcept> allCodes = findCodes(system);
|
||||
for (TermConcept nextConcept : allCodes) {
|
||||
addCodeIfNotAlreadyAdded(system, expansionComponent, addedCodes, nextConcept);
|
||||
}
|
||||
}
|
||||
|
||||
ValueSet valueSet = new ValueSet();
|
||||
|
@ -284,9 +351,17 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc
|
|||
return valueSet;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<VersionIndependentConcept> expandValueSet(String theValueSet) {
|
||||
throw new UnsupportedOperationException(); // FIXME implement
|
||||
protected List<VersionIndependentConcept> expandValueSetAndReturnVersionIndependentConcepts(org.hl7.fhir.r4.model.ValueSet theValueSetToExpandR4) {
|
||||
org.hl7.fhir.r4.model.ValueSet.ValueSetExpansionComponent expandedR4 = expandValueSet(theValueSetToExpandR4).getExpansion();
|
||||
|
||||
ArrayList<VersionIndependentConcept> retVal = new ArrayList<>();
|
||||
for (org.hl7.fhir.r4.model.ValueSet.ValueSetExpansionContainsComponent nextContains : expandedR4.getContains()) {
|
||||
retVal.add(
|
||||
new VersionIndependentConcept()
|
||||
.setSystem(nextContains.getSystem())
|
||||
.setCode(nextContains.getCode()));
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private void fetchChildren(TermConcept theConcept, Set<TermConcept> theSetToPopulate) {
|
||||
|
@ -312,6 +387,16 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc
|
|||
}
|
||||
}
|
||||
|
||||
private CodeSystem.ConceptDefinitionComponent findCode(List<CodeSystem.ConceptDefinitionComponent> theConcepts, String theCode) {
|
||||
for (CodeSystem.ConceptDefinitionComponent next : theConcepts) {
|
||||
if (theCode.equals(next.getCode())) {
|
||||
return next;
|
||||
}
|
||||
findCode(next.getConcept(), theCode);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TermConcept findCode(String theCodeSystem, String theCode) {
|
||||
TermCodeSystemVersion csv = findCurrentCodeSystemVersionForSystem(theCodeSystem);
|
||||
|
@ -398,6 +483,8 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc
|
|||
return myCodeSystemDao.findByCodeSystemUri(theSystem);
|
||||
}
|
||||
|
||||
protected abstract CodeSystem getCodeSystemFromContext(String theSystem);
|
||||
|
||||
private void persistChildren(TermConcept theConcept, TermCodeSystemVersion theCodeSystem, IdentityHashMap<TermConcept, Object> theConceptsStack, int theTotalConcepts) {
|
||||
if (theConceptsStack.put(theConcept, PLACEHOLDER_OBJECT) != null) {
|
||||
return;
|
||||
|
@ -429,10 +516,14 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc
|
|||
}
|
||||
}
|
||||
|
||||
for (TermConceptProperty next : theConcept.getProperties()){
|
||||
for (TermConceptProperty next : theConcept.getProperties()) {
|
||||
myConceptPropertyDao.save(next);
|
||||
}
|
||||
|
||||
for (TermConceptDesignation next : theConcept.getDesignations()) {
|
||||
myConceptDesignationDao.save(next);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void populateVersion(TermConcept theNext, TermCodeSystemVersion theCodeSystemVersion) {
|
||||
|
@ -636,12 +727,16 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc
|
|||
for (TermCodeSystemVersion next : existing) {
|
||||
ourLog.info(" * Deleting code system version {}", next.getPid());
|
||||
myConceptParentChildLinkDao.deleteByCodeSystemVersion(next.getPid());
|
||||
myConceptDao.deleteByCodeSystemVersion(next.getPid());
|
||||
for (TermConcept nextConcept : myConceptDao.findByCodeSystemVersion(next.getPid())) {
|
||||
myConceptPropertyDao.delete(nextConcept.getProperties());
|
||||
myConceptDao.delete(nextConcept);
|
||||
}
|
||||
}
|
||||
|
||||
ourLog.info("Flushing...");
|
||||
|
||||
myConceptParentChildLinkDao.flush();
|
||||
myConceptPropertyDao.flush();
|
||||
myConceptDao.flush();
|
||||
|
||||
ourLog.info("Done flushing");
|
||||
|
|
|
@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.term.loinc;
|
|||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.hl7.fhir.r4.model.ContactPoint;
|
||||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
|
||||
|
@ -79,6 +80,12 @@ abstract class BaseHandler implements IRecordHandler {
|
|||
conceptMap.setId(theMapping.getConceptMapId());
|
||||
conceptMap.setUrl(theMapping.getConceptMapUri());
|
||||
conceptMap.setName(theMapping.getConceptMapName());
|
||||
conceptMap.setPublisher("Regentrief Institute, Inc.");
|
||||
conceptMap.addContact()
|
||||
.setName("Regentrief Institute, Inc.")
|
||||
.addTelecom()
|
||||
.setSystem(ContactPoint.ContactPointSystem.URL)
|
||||
.setValue("https://loinc.org");
|
||||
myIdToConceptMaps.put(theMapping.getConceptMapId(), conceptMap);
|
||||
myConceptMaps.add(conceptMap);
|
||||
} else {
|
||||
|
@ -144,6 +151,12 @@ abstract class BaseHandler implements IRecordHandler {
|
|||
vs.setId(theValueSetId);
|
||||
vs.setName(theValueSetName);
|
||||
vs.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
vs.setPublisher("Regentrief Institute, Inc.");
|
||||
vs.addContact()
|
||||
.setName("Regentrief Institute, Inc.")
|
||||
.addTelecom()
|
||||
.setSystem(ContactPoint.ContactPointSystem.URL)
|
||||
.setValue("https://loinc.org");
|
||||
myIdToValueSet.put(theValueSetId, vs);
|
||||
myValueSets.add(vs);
|
||||
} else {
|
||||
|
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.jpa.term.loinc;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2018 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||
|
@ -9,6 +29,7 @@ import org.hl7.fhir.r4.model.ValueSet;
|
|||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.trim;
|
||||
|
||||
public class LoincPartHandler implements IRecordHandler {
|
||||
|
@ -30,15 +51,23 @@ public class LoincPartHandler implements IRecordHandler {
|
|||
String partTypeName = trim(theRecord.get("PartTypeName"));
|
||||
String partName = trim(theRecord.get("PartName"));
|
||||
String partDisplayName = trim(theRecord.get("PartDisplayName"));
|
||||
String status = trim(theRecord.get("Status"));
|
||||
|
||||
if (!"ACTIVE".equals(status)) {
|
||||
return;
|
||||
}
|
||||
// Per Dan's note, we include deprecated parts
|
||||
// String status = trim(theRecord.get("Status"));
|
||||
// if (!"ACTIVE".equals(status)) {
|
||||
// return;
|
||||
// }
|
||||
|
||||
TermConcept concept = new TermConcept(myCodeSystemVersion, partNumber);
|
||||
concept.setDisplay(partName);
|
||||
|
||||
if (isNotBlank(partDisplayName)) {
|
||||
concept.addDesignation()
|
||||
.setConcept(concept)
|
||||
.setUseDisplay("PartDisplayName")
|
||||
.setValue(partDisplayName);
|
||||
}
|
||||
|
||||
myCode2Concept.put(partDisplayName, concept);
|
||||
}
|
||||
|
||||
|
|
|
@ -181,6 +181,11 @@ public class TerminologySvcImplDstu3Test extends BaseJpaDstu3Test {
|
|||
TermConcept childAAA = new TermConcept(cs, "childAAA");
|
||||
childAAA.addPropertyString("propA", "valueAAA");
|
||||
childAAA.addPropertyString("propB", "foo");
|
||||
childAAA.addDesignation()
|
||||
.setUseSystem("http://designationsystem")
|
||||
.setUseCode("somedesig")
|
||||
.setUseDisplay("Designation Use")
|
||||
.setValue("Bananas");
|
||||
childAA.addChild(childAAA, RelationshipTypeEnum.ISA);
|
||||
|
||||
TermConcept childAAB = new TermConcept(cs, "childAAB");
|
||||
|
|
|
@ -6,7 +6,37 @@
|
|||
<title>HAPI FHIR Changelog</title>
|
||||
</properties>
|
||||
<body>
|
||||
<release version="3.3.0" date="TBD">
|
||||
<release version="3.4.0" date="TBD">
|
||||
<action type="fix">
|
||||
When performing a FHIR resource update in the JPA server
|
||||
where the update happens within a transaction, and the
|
||||
resource being updated contains placeholder IDs, and
|
||||
the resource has not actually changed, a new version was
|
||||
created even though there was not actually any change.
|
||||
This particular combination of circumstances seems very
|
||||
specific and improbable, but it is quite common for some
|
||||
types of solutions (e.g. mapping HL7v2 data) so this
|
||||
fix can prevent significant wasted space in some cases.
|
||||
</action>
|
||||
<action type="fix">
|
||||
JPA server index tables did not have a column length specified
|
||||
on the resource type column. This caused the default of 255 to
|
||||
be used, which wasted a lot of space since resource names are all
|
||||
less than 30 chars long and a single resource can have 10-100+
|
||||
index rows depending on configuration. This has now been set
|
||||
to a much more sensible 30.
|
||||
</action>
|
||||
<action type="fix">
|
||||
The LOINC uploader for the JPA Terminology Server has been
|
||||
significantly beefed up so that it now takes in the full
|
||||
set of LOINC distribution artifacts, and creates not only
|
||||
the LOINC CodeSystem but a complete set of concept properties,
|
||||
a number of LOINC ValueSets, and a number of LOINC ConceptMaps.
|
||||
This work was sponsored by the Regenstrief Institute. Thanks
|
||||
to Regenstrief for their support!
|
||||
</action>
|
||||
</release>
|
||||
<release version="3.3.0" date="2018-03-29">
|
||||
<action type="add">
|
||||
This release corrects an inefficiency in the JPA Server, but requires a schema
|
||||
change in order to update. Prior to this version of HAPI FHIR, a CLOB column
|
||||
|
@ -14,12 +44,29 @@
|
|||
tables: HFJ_RESOURCE and HFJ_RES_VER. Because the same content was stored in two
|
||||
places, the database consumed more space than is needed to.
|
||||
<![CDATA[<br/><br/>]]>
|
||||
In order to reduce this duplication, two columns have been removed from the
|
||||
HFJ_RESOURCE table. This means that on any database that is being upgraded
|
||||
to HAPI FHIR 3.3.0+, you will need to remove the columns
|
||||
<![CDATA[<code>RES_TEXT</code> and <code>RES_ENCODING</code>]]> (or
|
||||
set them to nullable if you want an easy means of rolling back). Naturally
|
||||
you should back your database up prior to making this change.
|
||||
In order to reduce this duplication, the
|
||||
<![CDATA[<code>RES_TEXT</code> and <code>RES_ENCODING</code>]]>
|
||||
columns have been
|
||||
<![CDATA[<b>dropped</b>]]>
|
||||
from the
|
||||
<![CDATA[<code>HFJ_RESOURCE]]>
|
||||
table, and the
|
||||
<![CDATA[<code>RES_TEXT</code> and <code>RES_ENCODING</code>]]>
|
||||
columns have been
|
||||
<![CDATA[<b>made NULLABLE</b>]]>
|
||||
on the
|
||||
<![CDATA[<code>HFJ_RES_VER]]>
|
||||
table.
|
||||
<![CDATA[<br/><br/>]]>
|
||||
The following migration script may be used to apply these changes to
|
||||
your database. Naturally you should back your database up prior to
|
||||
making this change.
|
||||
<![CDATA[
|
||||
<pre>ALTER TABLE hfj_resource DROP COLUMN res_text;
|
||||
ALTER TABLE hfj_resource DROP COLUMN res_encoding;
|
||||
ALTER TABLE hfj_res_ver ALTER COLUMN res_encoding DROP NOT NULL;
|
||||
ALTER TABLE hfj_res_ver ALTER COLUMN res_text DROP NOT NULL;</pre>
|
||||
]]>
|
||||
</action>
|
||||
<action type="fix">
|
||||
The validation module has been refactored to use the R4 (currently maintained)
|
||||
|
@ -41,6 +88,28 @@
|
|||
</ul>
|
||||
]]>
|
||||
</action>
|
||||
<action type="add" issue="871">
|
||||
A number of HAPI FHIR modules have been converted so that they now work
|
||||
as OSGi modules. Unlike the previous OSGi module, which was a mega-JAR
|
||||
with all of HAPI FHIR in it, this is simply the appropriate
|
||||
OSGi manifest inside the existing JARs. Thanks to John Poth
|
||||
for the Pull Request!
|
||||
<![CDATA[
|
||||
<br/><br/>
|
||||
Note that this does not cover all modules in the project. Current support includes:
|
||||
<ul>
|
||||
<li>HAPI-FHIR structures DSTU2, HL7ORGDSTU2, DSTU2.1, DSTU3, R4</li>
|
||||
<li>HAPI-FHIR Resource validation DSTU2, HL7ORGDSTU2, DSTU2.1, DSTU3, R4</li>
|
||||
<li>Apache Karaf features for all the above</li>
|
||||
<li> Integration Tests</li>
|
||||
</ul>
|
||||
Remaining work includes:
|
||||
<ul>
|
||||
<li>HAPI-FHIR Server support</li>
|
||||
<li> HAPI-FHIR narrative support. This might be tricky as Thymeleaf doesn't support OSGi.</li>
|
||||
</ul>
|
||||
]]>
|
||||
</action>
|
||||
<action type="fix">
|
||||
Fix a crash in the JSON parser when parsing extensions on repeatable
|
||||
elements (e.g. Patient.address.line) where there is an extension on the
|
||||
|
@ -240,6 +309,48 @@
|
|||
is supported according to the FHIR specification. Thanks
|
||||
to Jeff Chung for the pull request!
|
||||
</action>
|
||||
<action type="fix">
|
||||
JPA Server Operation Interceptor create/update methods will now no
|
||||
longer be fired if the create/update operation being performed
|
||||
is a no-op (e.g. a conditional create that did not need to perform
|
||||
any action, or an update where the contents didn't actually change)
|
||||
</action>
|
||||
<action type="fix" issue="879">
|
||||
JPA server sometimes updated resources even though the client
|
||||
supplied an update with no actual changes in it, due to
|
||||
changes in the metadata section being considered content
|
||||
changes. Thanks to Kyle Meadows for the pull request!
|
||||
</action>
|
||||
<action type="add" issue="817">
|
||||
A new example project has been added called hapi-fhir-jpaserver-dynamic,
|
||||
which uses application/environment properties to configure which version
|
||||
of FHIR the server supports and other configuration. Thanks to
|
||||
Anoush Mouradian for the pull request!
|
||||
</action>
|
||||
<action type="add" issue="581">
|
||||
A new example project showing the use of JAX-RS Server Side Events has
|
||||
been added. Thanks to Jens Kristian Villadsen for the pull request!
|
||||
</action>
|
||||
<action type="remove" issue="864">
|
||||
An unneccesary reference to the Javassist library has been
|
||||
removed from the build. Thanks to Łukasz Dywicki for the
|
||||
pull request!
|
||||
</action>
|
||||
<action type="add" issue="819">
|
||||
Support has been added to the JPA server for the :not modifier. Thanks
|
||||
to Łukasz Dywicki for the pull request!
|
||||
</action>
|
||||
<action type="add" issue="877">
|
||||
Suport for the :contains string search parameter modifier has been added to
|
||||
the JPA server. Thanks to Anthony Sute for the pull request!
|
||||
</action>
|
||||
<action type="fix">
|
||||
All instances of DefaultProfileValidationSupport (i.e. one for
|
||||
each version of FHIR) have been fixed so that they explicitly
|
||||
close any InputStreams they open in order to read the built-in
|
||||
profile resources. Leaving these open caused resource starvation
|
||||
in some cases under heavy load.
|
||||
</action>
|
||||
</release>
|
||||
<release version="3.2.0" date="2018-01-13">
|
||||
<action type="add">
|
||||
|
|
Loading…
Reference in New Issue