Issue 3391 2588 expand valueset regex include not working (#3410)

* Refactor ValueSet expansion code to simplify property filtering by using new Hibernate search V6 nested classes capabilities; use generic hibernate search queries where possible (mostly everywhere but regex queries) and add R4 integration tests  for both elastic and lucene.

* Refactor ValueSet expansion code to simplify property filtering by using new Hibernate search V6 nested classes capabilities; use generic hibernate search queries where possible (mostly everywhere but regex queries) and add R4 integration tests  for both elastic and lucene.

* Add comment for trick

* Add codes to exception messages

* Add test specific for the fix

* Move changelog to current release. Add upgrade notice.

* Improve fail-fast method

* Update upgrade instructions

* Try luck with new exception codes

* Update hapi-fhir-storage/src/test/java/ca/uhn/fhir/jpa/search/ElasticsearchNestedQueryBuilderUtilTest.java

Add missed quote

Co-authored-by: michaelabuckley <michael.buckley@smilecdr.com>

Co-authored-by: juan.marchionatto <juan.marchionatto@smilecdr.com>
Co-authored-by: michaelabuckley <michael.buckley@smilecdr.com>
This commit is contained in:
jmarchionatto 2022-02-24 16:32:11 -05:00 committed by GitHub
parent 6968cdfcfc
commit dbc9427a05
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 2497 additions and 251 deletions

View File

@ -25,7 +25,7 @@ public final class Msg {
/** /**
* IMPORTANT: Please update the following comment after you add a new code * IMPORTANT: Please update the following comment after you add a new code
* Last code value: 2036 * Last code value: 2038
*/ */
private Msg() {} private Msg() {}

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 2588
title: "Include property regex operation was not working when expanding ValueSet. This is now fixed"

View File

@ -0,0 +1,28 @@
This release has some breaking changes that are worth knowing about.
* Hibernate Search mappings for Terminology entities have been upgraded, which causes a reindex of ValueSet resources to be required.
## Hibernate Search Mappings Upgrade
Some freetext mappings (definitions about how resources are freetext indexed) were changed for Terminology resources.
This change requires a full reindexing for any Smile CDR installations which make use of the following features:
* Fulltext/Content search
* Terminology Expansion
To reindex all resources call:
```http
POST /$reindex
Content-Type: application/fhir+json
{
"resourceType": "Parameters",
"parameter": [ {
"name": "everything",
"valueBoolean": "true"
}, {
"name": "batchSize",
"valueDecimal": 1000
} ]
}
```

View File

@ -9,7 +9,6 @@ import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query; import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param; import org.springframework.data.repository.query.Param;
import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
@ -51,6 +50,9 @@ public interface ITermConceptDao extends JpaRepository<TermConcept, Long>, IHapi
@Query("SELECT c FROM TermConcept c WHERE c.myCodeSystem = :code_system AND c.myCode = :code") @Query("SELECT c FROM TermConcept c WHERE c.myCodeSystem = :code_system AND c.myCode = :code")
Optional<TermConcept> findByCodeSystemAndCode(@Param("code_system") TermCodeSystemVersion theCodeSystem, @Param("code") String theCode); Optional<TermConcept> findByCodeSystemAndCode(@Param("code_system") TermCodeSystemVersion theCodeSystem, @Param("code") String theCode);
@Query("FROM TermConcept WHERE myCodeSystem = :code_system AND myCode in (:codeList)")
List<TermConcept> findByCodeSystemAndCodeList(@Param("code_system") TermCodeSystemVersion theCodeSystem, @Param("codeList") List<String> theCodeList);
@Modifying @Modifying
@Query("DELETE FROM TermConcept WHERE myCodeSystem.myId = :cs_pid") @Query("DELETE FROM TermConcept WHERE myCodeSystem.myId = :cs_pid")
int deleteByCodeSystemVersion(@Param("cs_pid") Long thePid); int deleteByCodeSystemVersion(@Param("cs_pid") Long thePid);

View File

@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.entity;
* #L% * #L%
*/ */
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.context.support.IValidationSupport; import ca.uhn.fhir.context.support.IValidationSupport;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum; import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
import ca.uhn.fhir.jpa.search.DeferConceptIndexingRoutingBinder; import ca.uhn.fhir.jpa.search.DeferConceptIndexingRoutingBinder;
import ca.uhn.fhir.util.ValidateUtil; import ca.uhn.fhir.util.ValidateUtil;
@ -30,21 +30,43 @@ import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle; import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.search.engine.backend.types.ObjectStructure;
import org.hibernate.search.engine.backend.types.Projectable; import org.hibernate.search.engine.backend.types.Projectable;
import org.hibernate.search.engine.backend.types.Searchable; import org.hibernate.search.engine.backend.types.Searchable;
import org.hibernate.search.mapper.pojo.bridge.mapping.annotation.PropertyBinderRef;
import org.hibernate.search.mapper.pojo.bridge.mapping.annotation.RoutingBinderRef; import org.hibernate.search.mapper.pojo.bridge.mapping.annotation.RoutingBinderRef;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField; import org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed; import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyBinding; import org.hibernate.search.mapper.pojo.mapping.definition.annotation.IndexedEmbedded;
import org.hl7.fhir.r4.model.Coding; import org.hl7.fhir.r4.model.Coding;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.ForeignKey;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Index; import javax.persistence.Index;
import javax.persistence.*; import javax.persistence.JoinColumn;
import javax.persistence.Lob;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.PrePersist;
import javax.persistence.PreUpdate;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.UniqueConstraint;
import java.io.Serializable; import java.io.Serializable;
import java.util.*; import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.left; import static org.apache.commons.lang3.StringUtils.left;
@ -92,8 +114,12 @@ public class TermConcept implements Serializable {
@FullTextField(name = "myDisplayPhonetic", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompletePhoneticAnalyzer") @FullTextField(name = "myDisplayPhonetic", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompletePhoneticAnalyzer")
private String myDisplay; private String myDisplay;
/**
* IndexedEmbedded uses ObjectStructure.NESTED to be able to use hibernate search nested predicate queries.
* @see "https://docs.jboss.org/hibernate/search/6.0/reference/en-US/html_single/#search-dsl-predicate-nested"
*/
@OneToMany(mappedBy = "myConcept", orphanRemoval = false, fetch = FetchType.LAZY) @OneToMany(mappedBy = "myConcept", orphanRemoval = false, fetch = FetchType.LAZY)
@PropertyBinding(binder = @PropertyBinderRef(type = TermConceptPropertyBinder.class)) @IndexedEmbedded(structure = ObjectStructure.NESTED)
private Collection<TermConceptProperty> myProperties; private Collection<TermConceptProperty> myProperties;
@OneToMany(mappedBy = "myConcept", orphanRemoval = false, fetch = FetchType.LAZY) @OneToMany(mappedBy = "myConcept", orphanRemoval = false, fetch = FetchType.LAZY)

View File

@ -26,6 +26,10 @@ import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle; import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.search.engine.backend.types.Projectable;
import org.hibernate.search.engine.backend.types.Searchable;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField;
import org.hibernate.validator.constraints.NotBlank; import org.hibernate.validator.constraints.NotBlank;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
@ -57,9 +61,11 @@ public class TermConceptProperty implements Serializable {
public static final int MAX_PROPTYPE_ENUM_LENGTH = 6; public static final int MAX_PROPTYPE_ENUM_LENGTH = 6;
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
private static final int MAX_LENGTH = 500; private static final int MAX_LENGTH = 500;
@ManyToOne(fetch = FetchType.LAZY) @ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "CONCEPT_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTPROP_CONCEPT")) @JoinColumn(name = "CONCEPT_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTPROP_CONCEPT"))
private TermConcept myConcept; private TermConcept myConcept;
/** /**
* TODO: Make this non-null * TODO: Make this non-null
* *
@ -68,30 +74,41 @@ public class TermConceptProperty implements Serializable {
@ManyToOne(fetch = FetchType.LAZY) @ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "CS_VER_PID", nullable = true, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTPROP_CSV")) @JoinColumn(name = "CS_VER_PID", nullable = true, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTPROP_CSV"))
private TermCodeSystemVersion myCodeSystemVersion; private TermCodeSystemVersion myCodeSystemVersion;
@Id() @Id()
@SequenceGenerator(name = "SEQ_CONCEPT_PROP_PID", sequenceName = "SEQ_CONCEPT_PROP_PID") @SequenceGenerator(name = "SEQ_CONCEPT_PROP_PID", sequenceName = "SEQ_CONCEPT_PROP_PID")
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_CONCEPT_PROP_PID") @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_CONCEPT_PROP_PID")
@Column(name = "PID") @Column(name = "PID")
private Long myId; private Long myId;
@Column(name = "PROP_KEY", nullable = false, length = MAX_LENGTH) @Column(name = "PROP_KEY", nullable = false, length = MAX_LENGTH)
@NotBlank @NotBlank
@GenericField(searchable = Searchable.YES)
private String myKey; private String myKey;
@Column(name = "PROP_VAL", nullable = true, length = MAX_LENGTH) @Column(name = "PROP_VAL", nullable = true, length = MAX_LENGTH)
@FullTextField(searchable = Searchable.YES, projectable = Projectable.YES, analyzer = "standardAnalyzer")
@GenericField(name = "myValueString", searchable = Searchable.YES)
private String myValue; private String myValue;
@Column(name = "PROP_VAL_LOB") @Column(name = "PROP_VAL_LOB")
@Lob() @Lob()
private byte[] myValueLob; private byte[] myValueLob;
@Column(name = "PROP_TYPE", nullable = false, length = MAX_PROPTYPE_ENUM_LENGTH) @Column(name = "PROP_TYPE", nullable = false, length = MAX_PROPTYPE_ENUM_LENGTH)
private TermConceptPropertyTypeEnum myType; private TermConceptPropertyTypeEnum myType;
/** /**
* Relevant only for properties of type {@link TermConceptPropertyTypeEnum#CODING} * Relevant only for properties of type {@link TermConceptPropertyTypeEnum#CODING}
*/ */
@Column(name = "PROP_CODESYSTEM", length = MAX_LENGTH, nullable = true) @Column(name = "PROP_CODESYSTEM", length = MAX_LENGTH, nullable = true)
private String myCodeSystem; private String myCodeSystem;
/** /**
* Relevant only for properties of type {@link TermConceptPropertyTypeEnum#CODING} * Relevant only for properties of type {@link TermConceptPropertyTypeEnum#CODING}
*/ */
@Column(name = "PROP_DISPLAY", length = MAX_LENGTH, nullable = true) @Column(name = "PROP_DISPLAY", length = MAX_LENGTH, nullable = true)
@GenericField(name = "myDisplayString", searchable = Searchable.YES)
private String myDisplay; private String myDisplay;
/** /**

View File

@ -1,80 +0,0 @@
package ca.uhn.fhir.jpa.entity;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.hibernate.search.engine.backend.document.DocumentElement;
import org.hibernate.search.engine.backend.document.model.dsl.IndexSchemaElement;
import org.hibernate.search.mapper.pojo.bridge.PropertyBridge;
import org.hibernate.search.mapper.pojo.bridge.binding.PropertyBindingContext;
import org.hibernate.search.mapper.pojo.bridge.mapping.programmatic.PropertyBinder;
import org.hibernate.search.mapper.pojo.bridge.runtime.PropertyBridgeWriteContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collection;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/**
* Allows hibernate search to index individual concepts' properties
*/
public class TermConceptPropertyBinder implements PropertyBinder {
public static final String CONCEPT_FIELD_PROPERTY_PREFIX = "PROP";
private static final Logger ourLog = LoggerFactory.getLogger(TermConceptPropertyBinder.class);
@Override
public void bind(PropertyBindingContext thePropertyBindingContext) {
thePropertyBindingContext.dependencies().use("myKey").use("myValue");
IndexSchemaElement indexSchemaElement = thePropertyBindingContext.indexSchemaElement();
//In order to support dynamic fields, we have to use field templates. We _must_ define the template at bootstrap time and cannot
//create them adhoc. https://docs.jboss.org/hibernate/search/6.0/reference/en-US/html_single/#mapper-orm-bridge-index-field-dsl-dynamic
//I _think_ im doing the right thing here by indicating that everything matching this template uses this analyzer.
indexSchemaElement.fieldTemplate("propTemplate", f -> f.asString().analyzer("termConceptPropertyAnalyzer"))
.matchingPathGlob(CONCEPT_FIELD_PROPERTY_PREFIX + "*")
.multiValued();
thePropertyBindingContext.bridge(new TermConceptPropertyBridge());
}
private class TermConceptPropertyBridge implements PropertyBridge {
@Override
public void write(DocumentElement theDocument, Object theObject, PropertyBridgeWriteContext thePropertyBridgeWriteContext) {
Collection<TermConceptProperty> properties = (Collection<TermConceptProperty>) theObject;
if (properties != null) {
for (TermConceptProperty next : properties) {
theDocument.addValue(CONCEPT_FIELD_PROPERTY_PREFIX + next.getKey(), next.getValue());
ourLog.trace("Adding Prop: {}{} -- {}", CONCEPT_FIELD_PROPERTY_PREFIX, next.getKey(), next.getValue());
if (next.getType() == TermConceptPropertyTypeEnum.CODING && isNotBlank(next.getDisplay())) {
theDocument.addValue(CONCEPT_FIELD_PROPERTY_PREFIX + next.getKey(), next.getDisplay());
ourLog.trace("Adding multivalue Prop: {}{} -- {}", CONCEPT_FIELD_PROPERTY_PREFIX, next.getKey(), next.getDisplay());
}
}
}
}
}
}

View File

@ -51,7 +51,6 @@ import ca.uhn.fhir.jpa.entity.TermConceptDesignation;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink; import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum; import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
import ca.uhn.fhir.jpa.entity.TermConceptProperty; import ca.uhn.fhir.jpa.entity.TermConceptProperty;
import ca.uhn.fhir.jpa.entity.TermConceptPropertyBinder;
import ca.uhn.fhir.jpa.entity.TermConceptPropertyTypeEnum; import ca.uhn.fhir.jpa.entity.TermConceptPropertyTypeEnum;
import ca.uhn.fhir.jpa.entity.TermValueSet; import ca.uhn.fhir.jpa.entity.TermValueSet;
import ca.uhn.fhir.jpa.entity.TermValueSetConcept; import ca.uhn.fhir.jpa.entity.TermValueSetConcept;
@ -62,6 +61,7 @@ import ca.uhn.fhir.jpa.model.sched.HapiJob;
import ca.uhn.fhir.jpa.model.sched.ISchedulerService; import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.search.ElasticsearchNestedQueryBuilderUtil;
import ca.uhn.fhir.jpa.search.builder.SearchBuilder; import ca.uhn.fhir.jpa.search.builder.SearchBuilder;
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc; import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc;
@ -87,12 +87,15 @@ import com.github.benmanes.caffeine.cache.Caffeine;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Stopwatch; import com.google.common.base.Stopwatch;
import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ArrayListMultimap;
import com.google.gson.JsonObject;
import org.apache.commons.lang3.ObjectUtils; import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.time.DateUtils; import org.apache.commons.lang3.time.DateUtils;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.RegexpQuery; import org.apache.lucene.search.RegexpQuery;
import org.apache.lucene.search.TermQuery;
import org.hibernate.search.backend.elasticsearch.ElasticsearchExtension; import org.hibernate.search.backend.elasticsearch.ElasticsearchExtension;
import org.hibernate.search.backend.lucene.LuceneExtension; import org.hibernate.search.backend.lucene.LuceneExtension;
import org.hibernate.search.engine.search.predicate.dsl.BooleanPredicateClausesStep; import org.hibernate.search.engine.search.predicate.dsl.BooleanPredicateClausesStep;
@ -159,6 +162,7 @@ import javax.persistence.criteria.JoinType;
import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root; import javax.persistence.criteria.Root;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.Date; import java.util.Date;
@ -180,6 +184,10 @@ import java.util.function.Supplier;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static ca.uhn.fhir.jpa.term.api.ITermLoaderSvc.LOINC_URI; import static ca.uhn.fhir.jpa.term.api.ITermLoaderSvc.LOINC_URI;
import static java.lang.String.join;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toSet;
import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
import static org.apache.commons.lang3.StringUtils.defaultString; import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isBlank;
@ -199,6 +207,11 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
private static Runnable myInvokeOnNextCallForUnitTest; private static Runnable myInvokeOnNextCallForUnitTest;
private static boolean ourForceDisableHibernateSearchForUnitTest; private static boolean ourForceDisableHibernateSearchForUnitTest;
private static final String IDX_PROPERTIES = "myProperties";
private static final String IDX_PROP_KEY = IDX_PROPERTIES + ".myKey";
private static final String IDX_PROP_VALUE_STRING = IDX_PROPERTIES + ".myValueString";
private static final String IDX_PROP_DISPLAY_STRING = IDX_PROPERTIES + ".myDisplayString";
private final Cache<String, TermCodeSystemVersion> myCodeSystemCurrentVersionCache = Caffeine.newBuilder().expireAfterWrite(1, TimeUnit.MINUTES).build(); private final Cache<String, TermCodeSystemVersion> myCodeSystemCurrentVersionCache = Caffeine.newBuilder().expireAfterWrite(1, TimeUnit.MINUTES).build();
@Autowired @Autowired
protected DaoRegistry myDaoRegistry; protected DaoRegistry myDaoRegistry;
@ -278,7 +291,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
.getParents() .getParents()
.stream() .stream()
.map(t -> t.getParent().getId().toString()) .map(t -> t.getParent().getId().toString())
.collect(Collectors.joining(" ")); .collect(joining(" "));
} }
Collection<TermConceptDesignation> designations = theConcept.getDesignations(); Collection<TermConceptDesignation> designations = theConcept.getDesignations();
@ -1100,7 +1113,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
break; break;
case "ancestor": case "ancestor":
isCodeSystemLoincOrThrowInvalidRequestException(theCodeSystemIdentifier, theFilter.getProperty()); isCodeSystemLoincOrThrowInvalidRequestException(theCodeSystemIdentifier, theFilter.getProperty());
handleFilterLoincAncestor2(theCodeSystemIdentifier, theF, theB, theFilter); handleFilterLoincAncestor(theCodeSystemIdentifier, theF, theB, theFilter);
break; break;
case "descendant": case "descendant":
isCodeSystemLoincOrThrowInvalidRequestException(theCodeSystemIdentifier, theFilter.getProperty()); isCodeSystemLoincOrThrowInvalidRequestException(theCodeSystemIdentifier, theFilter.getProperty());
@ -1111,14 +1124,23 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
handleFilterLoincCopyright(theF, theB, theFilter); handleFilterLoincCopyright(theF, theB, theFilter);
break; break;
default: default:
if (theFilter.getOp() == ValueSet.FilterOperator.REGEX) {
handleFilterRegex(theF, theB, theFilter); handleFilterRegex(theF, theB, theFilter);
} else {
handleFilterPropertyDefault(theF, theB, theFilter);
}
break; break;
} }
} }
private void handleFilterRegex(SearchPredicateFactory theF, BooleanPredicateClausesStep<?> theB, ValueSet.ConceptSetFilterComponent theFilter) { private void handleFilterPropertyDefault(SearchPredicateFactory theF,
if (theFilter.getOp() == ValueSet.FilterOperator.REGEX) { BooleanPredicateClausesStep<?> theB, ValueSet.ConceptSetFilterComponent theFilter) {
theB.must(getPropertyNameValueNestedPredicate(theF, theFilter.getProperty(), theFilter.getValue()));
}
private void handleFilterRegex(SearchPredicateFactory theF, BooleanPredicateClausesStep<?> theB, ValueSet.ConceptSetFilterComponent theFilter) {
/* /*
* We treat the regex filter as a match on the regex * We treat the regex filter as a match on the regex
* anywhere in the property string. The spec does not * anywhere in the property string. The spec does not
@ -1137,24 +1159,31 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
value = value.substring(1); value = value.substring(1);
} }
Term term = new Term(TermConceptPropertyBinder.CONCEPT_FIELD_PROPERTY_PREFIX + theFilter.getProperty(), value);
if (isFullTextSetToUseElastic()) { if (isFullTextSetToUseElastic()) {
String regexpQuery = "{'regexp':{'" + term.field() + "':{'value':'" + term.text() + "'}}}"; ElasticsearchNestedQueryBuilderUtil nestedQueryBuildUtil = new ElasticsearchNestedQueryBuilderUtil(
ourLog.debug("Build Elasticsearch Regexp Query: {}", regexpQuery); "myProperties", "myKey", theFilter.getProperty(),
theB.must(theF.extension(ElasticsearchExtension.get()).fromJson(regexpQuery)); "myValueString", value);
} else {
RegexpQuery query = new RegexpQuery(term); JsonObject nestedQueryJO = nestedQueryBuildUtil.toGson();
theB.must(theF.extension(LuceneExtension.get()).fromLuceneQuery(query));
} ourLog.debug("Build nested Elasticsearch query: {}", nestedQueryJO);
} else { theB.must(theF.extension(ElasticsearchExtension.get()).fromJson(nestedQueryJO));
String value = theFilter.getValue(); return;
Term term = new Term(TermConceptPropertyBinder.CONCEPT_FIELD_PROPERTY_PREFIX + theFilter.getProperty(), value);
theB.must(theF.match().field(term.field()).matching(term.text()));
} }
// native Lucene configured
Query termPropKeyQuery = new TermQuery(new Term(IDX_PROP_KEY, theFilter.getProperty()));
Query regexpValueQuery = new RegexpQuery(new Term(IDX_PROP_VALUE_STRING, value));
theB.must(theF.nested().objectField("myProperties").nest(
theF.bool()
.must(theF.extension(LuceneExtension.get()).fromLuceneQuery(termPropKeyQuery))
.must(theF.extension(LuceneExtension.get()).fromLuceneQuery(regexpValueQuery))
));
} }
private void handleFilterLoincCopyright(SearchPredicateFactory theF, BooleanPredicateClausesStep<?> theB, ValueSet.ConceptSetFilterComponent theFilter) { private void handleFilterLoincCopyright(SearchPredicateFactory theF, BooleanPredicateClausesStep<?> theB, ValueSet.ConceptSetFilterComponent theFilter) {
if (theFilter.getOp() == ValueSet.FilterOperator.EQUAL) { if (theFilter.getOp() == ValueSet.FilterOperator.EQUAL) {
@ -1162,11 +1191,11 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
switch (copyrightFilterValue) { switch (copyrightFilterValue) {
case "3rdparty": case "3rdparty":
logFilteringValueOnProperty(theFilter.getValue(), theFilter.getProperty()); logFilteringValueOnProperty(theFilter.getValue(), theFilter.getProperty());
addFilterLoincCopyright3rdParty(theF, theB); addFilterLoincCopyright3rdParty(theF, theB, theFilter);
break; break;
case "loinc": case "loinc":
logFilteringValueOnProperty(theFilter.getValue(), theFilter.getProperty()); logFilteringValueOnProperty(theFilter.getValue(), theFilter.getProperty());
addFilterLoincCopyrightLoinc(theF, theB); addFilterLoincCopyrightLoinc(theF, theB, theFilter);
break; break;
default: default:
throwInvalidRequestForValueOnProperty(theFilter.getValue(), theFilter.getProperty()); throwInvalidRequestForValueOnProperty(theFilter.getValue(), theFilter.getProperty());
@ -1177,19 +1206,25 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
} }
} }
private void addFilterLoincCopyrightLoinc(SearchPredicateFactory thePredicateFactory, BooleanPredicateClausesStep<?> theBooleanClause) { private void addFilterLoincCopyrightLoinc(SearchPredicateFactory theF,
theBooleanClause.mustNot(thePredicateFactory.exists().field(TermConceptPropertyBinder.CONCEPT_FIELD_PROPERTY_PREFIX + "EXTERNAL_COPYRIGHT_NOTICE")); BooleanPredicateClausesStep<?> theB, ValueSet.ConceptSetFilterComponent theFilter) {
theB.mustNot(theF.match().field(IDX_PROP_KEY).matching("EXTERNAL_COPYRIGHT_NOTICE"));
} }
private void addFilterLoincCopyright3rdParty(SearchPredicateFactory thePredicateFactory, BooleanPredicateClausesStep<?> theBooleanClause) {
private void addFilterLoincCopyright3rdParty(SearchPredicateFactory thePredicateFactory,
BooleanPredicateClausesStep<?> theBooleanClause, ValueSet.ConceptSetFilterComponent theFilter) {
//TODO GGG HS These used to be Term term = new Term(TermConceptPropertyBinder.CONCEPT_FIELD_PROPERTY_PREFIX + "EXTERNAL_COPYRIGHT_NOTICE", ".*");, which was lucene-specific. //TODO GGG HS These used to be Term term = new Term(TermConceptPropertyBinder.CONCEPT_FIELD_PROPERTY_PREFIX + "EXTERNAL_COPYRIGHT_NOTICE", ".*");, which was lucene-specific.
//TODO GGG HS ask diederik if this is equivalent. //TODO GGG HS ask diederik if this is equivalent.
//This old .* regex is the same as an existence check on a field, which I've implemented here. //This old .* regex is the same as an existence check on a field, which I've implemented here.
theBooleanClause.must(thePredicateFactory.exists().field(TermConceptPropertyBinder.CONCEPT_FIELD_PROPERTY_PREFIX + "EXTERNAL_COPYRIGHT_NOTICE")); // theBooleanClause.must(thePredicateFactory.exists().field("EXTERNAL_COPYRIGHT_NOTICE"));
theBooleanClause.must(thePredicateFactory.match().field(IDX_PROP_KEY).matching("EXTERNAL_COPYRIGHT_NOTICE"));
} }
@SuppressWarnings("EnumSwitchStatementWhichMissesCases") @SuppressWarnings("EnumSwitchStatementWhichMissesCases")
private void handleFilterLoincAncestor2(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) { private void handleFilterLoincAncestor(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) {
switch (theFilter.getOp()) { switch (theFilter.getOp()) {
case EQUAL: case EQUAL:
addLoincFilterAncestorEqual(theSystem, f, b, theFilter); addLoincFilterAncestorEqual(theSystem, f, b, theFilter);
@ -1204,24 +1239,24 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
} }
private void addLoincFilterAncestorEqual(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) { private void addLoincFilterAncestorEqual(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) {
addLoincFilterAncestorEqual(theSystem, f, b, theFilter.getProperty(), theFilter.getValue()); long parentPid = getAncestorCodePid(theSystem, theFilter.getProperty(), theFilter.getValue());
b.must(f.match().field("myParentPids").matching(String.valueOf(parentPid)));
} }
private void addLoincFilterAncestorEqual(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, String theProperty, String theValue) {
List<Term> terms = getAncestorTerms(theSystem, theProperty, theValue);
b.must(f.bool(innerB -> terms.forEach(term -> innerB.should(f.match().field(term.field()).matching(term.text())))));
}
private void addLoincFilterAncestorIn(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) { private void addLoincFilterAncestorIn(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) {
String[] values = theFilter.getValue().split(","); String[] values = theFilter.getValue().split(",");
List<Term> terms = new ArrayList<>(); List<Long> ancestorCodePidList = new ArrayList<>();
for (String value : values) { for (String value : values) {
terms.addAll(getAncestorTerms(theSystem, theFilter.getProperty(), value)); ancestorCodePidList.add(getAncestorCodePid(theSystem, theFilter.getProperty(), value));
} }
b.must(f.bool(innerB -> terms.forEach(term -> innerB.should(f.match().field(term.field()).matching(term.text())))));
b.must(f.bool(innerB -> ancestorCodePidList.forEach(
ancestorPid -> innerB.should(f.match().field("myParentPids").matching(String.valueOf(ancestorPid)))
)));
} }
@SuppressWarnings("EnumSwitchStatementWhichMissesCases") @SuppressWarnings("EnumSwitchStatementWhichMissesCases")
private void handleFilterLoincParentChild(SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) { private void handleFilterLoincParentChild(SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) {
switch (theFilter.getOp()) { switch (theFilter.getOp()) {
@ -1238,31 +1273,33 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
private void addLoincFilterParentChildIn(SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) { private void addLoincFilterParentChildIn(SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) {
String[] values = theFilter.getValue().split(","); String[] values = theFilter.getValue().split(",");
List<Term> terms = new ArrayList<>(); b.minimumShouldMatchNumber(1);
for (String value : values) { for (String value : values) {
logFilteringValueOnProperty(value, theFilter.getProperty()); logFilteringValueOnProperty(value, theFilter.getProperty());
terms.add(getPropertyTerm(theFilter.getProperty(), value)); b.should(getPropertyNameValueNestedPredicate(f, theFilter.getProperty(), value));
} }
//TODO GGG HS: Not sure if this is the right equivalent...seems to be no equivalent to `TermsQuery` in HS6.
//Far as I'm aware, this is a single element of a MUST portion of a bool, which itself should contain a list of OR'ed options, e.g.
// shape == round && color == (green || blue)
b.must(f.bool(innerB -> terms.forEach(term -> innerB.should(f.match().field(term.field()).matching(term.text())))));
} }
private void addLoincFilterParentChildEqual(SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, String theProperty, String theValue) { private void addLoincFilterParentChildEqual(SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, String theProperty, String theValue) {
logFilteringValueOnProperty(theValue, theProperty); logFilteringValueOnProperty(theValue, theProperty);
//TODO GGG HS: Not sure if this is the right equivalent...seems to be no equivalent to `TermsQuery` in HS6. b.must(getPropertyNameValueNestedPredicate(f, theProperty, theValue));
//b.must(new TermsQuery(getPropertyTerm(theProperty, theValue)));
//According to the DSL migration reference (https://docs.jboss.org/hibernate/search/6.0/migration/html_single/#queries-reference),
//Since this property is handled with a specific analyzer, I'm not sure a terms match here is actually correct. The analyzer is literally just a whitespace tokenizer here.
b.must(f.match().field(TermConceptPropertyBinder.CONCEPT_FIELD_PROPERTY_PREFIX + theProperty).matching(theValue));
} }
/**
* A nested predicate is required for both predicates to be applied to same property, otherwise if properties
* propAA:valueAA and propBB:valueBB are defined, a search for propAA:valueBB would be a match
* @see "https://docs.jboss.org/hibernate/search/6.0/reference/en-US/html_single/#search-dsl-predicate-nested"
*/
private PredicateFinalStep getPropertyNameValueNestedPredicate(SearchPredicateFactory f, String theProperty, String theValue) {
return f.nested().objectField(IDX_PROPERTIES).nest(
f.bool()
.must(f.match().field(IDX_PROP_KEY).matching(theProperty))
.must(f.match().field(IDX_PROP_VALUE_STRING).field(IDX_PROP_DISPLAY_STRING).matching(theValue))
);
}
private void handleFilterConceptAndCode(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) { private void handleFilterConceptAndCode(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) {
TermConcept code = findCode(theSystem, theFilter.getValue()) TermConcept code = findCode(theSystem, theFilter.getValue())
.orElseThrow(() -> new InvalidRequestException("Invalid filter criteria - code does not exist: {" + Constants.codeSystemWithDefaultDescription(theSystem) + "}" + theFilter.getValue())); .orElseThrow(() -> new InvalidRequestException("Invalid filter criteria - code does not exist: {" + Constants.codeSystemWithDefaultDescription(theSystem) + "}" + theFilter.getValue()));
@ -1312,20 +1349,12 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
); );
} }
private Term getPropertyTerm(String theProperty, String theValue) { private long getAncestorCodePid(String theSystem, String theProperty, String theValue) {
return new Term(TermConceptPropertyBinder.CONCEPT_FIELD_PROPERTY_PREFIX + theProperty, theValue);
}
private List<Term> getAncestorTerms(String theSystem, String theProperty, String theValue) {
List<Term> retVal = new ArrayList<>();
TermConcept code = findCode(theSystem, theValue) TermConcept code = findCode(theSystem, theValue)
.orElseThrow(() -> new InvalidRequestException("Invalid filter criteria - code does not exist: {" + Constants.codeSystemWithDefaultDescription(theSystem) + "}" + theValue)); .orElseThrow(() -> new InvalidRequestException("Invalid filter criteria - code does not exist: {" + Constants.codeSystemWithDefaultDescription(theSystem) + "}" + theValue));
retVal.add(new Term("myParentPids", "" + code.getId()));
logFilteringValueOnProperty(theValue, theProperty); logFilteringValueOnProperty(theValue, theProperty);
return code.getId();
return retVal;
} }
@SuppressWarnings("EnumSwitchStatementWhichMissesCases") @SuppressWarnings("EnumSwitchStatementWhichMissesCases")
@ -1342,56 +1371,117 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
} }
} }
private void addLoincFilterDescendantEqual(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) {
addLoincFilterDescendantEqual(theSystem, f, b, theFilter.getProperty(), theFilter.getValue()); private void addLoincFilterDescendantEqual(String theSystem, SearchPredicateFactory f,
BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) {
List<Long> parentPids = getCodeParentPids(theSystem, theFilter.getProperty(), theFilter.getValue());
if (parentPids.isEmpty()) {
// Can't return empty must, because it wil match according to other predicates.
// Some day there will be a 'matchNone' predicate (https://discourse.hibernate.org/t/fail-fast-predicate/6062)
b.mustNot( f.matchAll() );
return;
} }
private void addLoincFilterDescendantIn(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) {
String[] values = theFilter.getValue().split(",");
List<Term> terms = new ArrayList<>();
for (String value : values) {
terms.addAll(getDescendantTerms(theSystem, theFilter.getProperty(), value));
}
searchByParentPids(f, b, terms);
}
private void addLoincFilterDescendantEqual(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, String theProperty, String theValue) {
List<Term> terms = getDescendantTerms(theSystem, theProperty, theValue);
searchByParentPids(f, b, terms);
}
private void searchByParentPids(SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, List<Term> theTerms) {
List<Long> parentPids = convertTermsToParentPids(theTerms);
b.must(f.bool(innerB -> { b.must(f.bool(innerB -> {
parentPids.forEach(pid -> innerB.should(f.match().field(theTerms.get(0).field()).matching(pid))); innerB.minimumShouldMatchNumber(1);
parentPids.forEach(pid -> innerB.should(f.match().field("myId").matching(pid)));
})); }));
} }
private List<Long> convertTermsToParentPids(List<Term> theTerms) { /**
return theTerms.stream().map(Term::text).map(Long::valueOf).collect(Collectors.toList()); * We are looking for codes which have codes indicated in theFilter.getValue() as descendants.
* Strategy is to find codes which have their pId(s) in the list of the parentId(s) of all the TermConcept(s)
* representing the codes in theFilter.getValue()
*/
private void addLoincFilterDescendantIn(String theSystem, SearchPredicateFactory f,
BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) {
String[] values = theFilter.getValue().split(",");
if (values.length == 0) {
throw new InvalidRequestException(Msg.code(2037) + "Invalid filter criteria - no codes specified");
} }
private List<Term> getDescendantTerms(String theSystem, String theProperty, String theValue) { List<Long> descendantCodePidList = getMultipleCodeParentPids(theSystem, theFilter.getProperty(), values);
List<Term> retVal = new ArrayList<>();
b.must(f.bool(innerB -> descendantCodePidList.forEach(
pId -> innerB.should(f.match().field("myId").matching(pId))
)));
}
/**
* Returns the list of parentId(s) of the TermConcept representing theValue as a code
*/
private List<Long> getCodeParentPids(String theSystem, String theProperty, String theValue) {
TermConcept code = findCode(theSystem, theValue) TermConcept code = findCode(theSystem, theValue)
.orElseThrow(() -> new InvalidRequestException("Invalid filter criteria - code does not exist: {" + Constants.codeSystemWithDefaultDescription(theSystem) + "}" + theValue)); .orElseThrow(() -> new InvalidRequestException("Invalid filter criteria - code does not exist: {" +
Constants.codeSystemWithDefaultDescription(theSystem) + "}" + theValue));
String[] parentPids = code.getParentPidsAsString().split(" "); String[] parentPids = code.getParentPidsAsString().split(" ");
for (String parentPid : parentPids) { List<Long> retVal = Arrays.stream(parentPids)
if (!StringUtils.equals(parentPid, "NONE")) { .filter( pid -> !StringUtils.equals(pid, "NONE") )
retVal.add(new Term("myId", parentPid)); .map(Long::parseLong)
} .collect(Collectors.toList());
}
logFilteringValueOnProperty(theValue, theProperty); logFilteringValueOnProperty(theValue, theProperty);
return retVal;
}
/**
* Returns the list of parentId(s) of the TermConcept representing theValue as a code
*/
private List<Long> getMultipleCodeParentPids(String theSystem, String theProperty, String[] theValues) {
List<String> valuesList = Arrays.asList(theValues);
List<TermConcept> termConcepts = findCodes(theSystem, valuesList);
if (valuesList.size() != termConcepts.size()) {
String exMsg = getTermConceptsFetchExceptionMsg(termConcepts, valuesList);
throw new InvalidRequestException(Msg.code(2038) + "Invalid filter criteria - {" +
Constants.codeSystemWithDefaultDescription(theSystem) + "}: " + exMsg);
}
List<Long> retVal = termConcepts.stream()
.flatMap(tc -> Arrays.stream(tc.getParentPidsAsString().split(" ")))
.filter( pid -> !StringUtils.equals(pid, "NONE") )
.map(Long::parseLong)
.collect(Collectors.toList());
logFilteringValueOnProperties(valuesList, theProperty);
return retVal; return retVal;
} }
/**
* Generate message indicating for which of theValues a TermConcept was not found
*/
private String getTermConceptsFetchExceptionMsg(List<TermConcept> theTermConcepts, List<String> theValues) {
// case: more TermConcept(s) retrieved than codes queried
if (theTermConcepts.size() > theValues.size()) {
return "Invalid filter criteria - More TermConcepts were found than indicated codes. Queried codes: [" +
join(",", theValues + "]; Obtained TermConcept IDs, codes: [" +
theTermConcepts.stream().map(tc -> tc.getId() + ", " + tc.getCode())
.collect(joining("; "))+ "]");
}
// case: less TermConcept(s) retrieved than codes queried
Set<String> matchedCodes = theTermConcepts.stream().map(TermConcept::getCode).collect(toSet());
List<String> notMatchedValues = theValues.stream()
.filter(v -> ! matchedCodes.contains (v)) .collect(toList());
return "Invalid filter criteria - No TermConcept(s) were found for the requested codes: [" +
join(",", notMatchedValues + "]");
}
private void logFilteringValueOnProperty(String theValue, String theProperty) { private void logFilteringValueOnProperty(String theValue, String theProperty) {
ourLog.debug(" * Filtering with value={} on property {}", theValue, theProperty); ourLog.debug(" * Filtering with value={} on property {}", theValue, theProperty);
} }
private void logFilteringValueOnProperties(List<String> theValues, String theProperty) {
ourLog.debug(" * Filtering with values={} on property {}", String.join(", ", theValues), theProperty);
}
private void throwInvalidRequestForOpOnProperty(ValueSet.FilterOperator theOp, String theProperty) { private void throwInvalidRequestForOpOnProperty(ValueSet.FilterOperator theOp, String theProperty) {
throw new InvalidRequestException(Msg.code(897) + "Don't know how to handle op=" + theOp + " on property " + theProperty); throw new InvalidRequestException(Msg.code(897) + "Don't know how to handle op=" + theOp + " on property " + theProperty);
} }
@ -1635,6 +1725,16 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
}); });
} }
@Transactional(propagation = Propagation.MANDATORY)
public List<TermConcept> findCodes(String theCodeSystem, List<String> theCodeList) {
TermCodeSystemVersion csv = getCurrentCodeSystemVersion(theCodeSystem);
if (csv == null) { return Collections.emptyList(); }
return myConceptDao.findByCodeSystemAndCodeList(csv, theCodeList);
}
@Nullable @Nullable
private TermCodeSystemVersion getCurrentCodeSystemVersion(String theCodeSystemIdentifier) { private TermCodeSystemVersion getCurrentCodeSystemVersion(String theCodeSystemIdentifier) {
String version = getVersionFromIdentifier(theCodeSystemIdentifier); String version = getVersionFromIdentifier(theCodeSystemIdentifier);

View File

@ -74,6 +74,8 @@ public interface ITermReadSvc extends IValidationSupport {
Optional<TermConcept> findCode(String theCodeSystem, String theCode); Optional<TermConcept> findCode(String theCodeSystem, String theCode);
List<TermConcept> findCodes(String theCodeSystem, List<String> theCodes);
Set<TermConcept> findCodesAbove(Long theCodeSystemResourcePid, Long theCodeSystemResourceVersionPid, String theCode); Set<TermConcept> findCodesAbove(Long theCodeSystemResourcePid, Long theCodeSystemResourceVersionPid, String theCode);
List<FhirVersionIndependentConcept> findCodesAbove(String theSystem, String theCode); List<FhirVersionIndependentConcept> findCodesAbove(String theSystem, String theCode);

View File

@ -23,12 +23,14 @@ import org.testcontainers.elasticsearch.ElasticsearchContainer;
import javax.annotation.PreDestroy; import javax.annotation.PreDestroy;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Properties; import java.util.Properties;
/** /**
* Configurations for Hibernate Search: off, lucene, or elastic. * Configurations for Hibernate Search: off, lucene in-memory, lucene on file system or elastic.
* *
* We use {@link DefaultLuceneHeap} by default in our JPA test configs. * We use {@link DefaultLuceneHeap} by default in our JPA test configs.
* Turn off by adding {@link NoFT} to the test Contexts. * Turn off by adding {@link NoFT} to the test Contexts.
@ -44,10 +46,48 @@ public class TestHibernateSearchAddInConfig {
void apply(Properties theJPAProperties); void apply(Properties theJPAProperties);
} }
/**
* Lucene on file system. Useful for debugging
* Uses temporary directory by default. Replace by permanent directory for debugging
*/
@Configuration
public static class LuceneFilesystem {
@Bean
@Primary
IHibernateSearchConfigurer hibernateSearchConfigurer() throws IOException {
ourLog.warn("Hibernate Search: using lucene - filesystem");
// replace by existing directory for debugging purposes
Path tempDirPath = Files.createTempDirectory(null);
String dirPath = tempDirPath.toString();
Map<String, String> luceneProperties = new HashMap<>();
luceneProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "lucene");
luceneProperties.put(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), HapiLuceneAnalysisConfigurer.class.getName());
luceneProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_TYPE), "local-filesystem");
luceneProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_ROOT), dirPath);
ourLog.info("Using lucene root dir: {}", dirPath);
luceneProperties.put(BackendSettings.backendKey(LuceneBackendSettings.LUCENE_VERSION), "LUCENE_CURRENT");
// for lucene trace logging
luceneProperties.put(BackendSettings.backendKey(LuceneIndexSettings.IO_WRITER_INFOSTREAM), "true");
luceneProperties.put(HibernateOrmMapperSettings.ENABLED, "true");
return (theProperties) ->
theProperties.putAll(luceneProperties);
}
@Bean(name={"searchDao", "searchDaoDstu2", "searchDaoDstu3", "searchDaoR4", "searchDaoR5"})
public IFulltextSearchSvc searchDao() {
ourLog.info("Hibernate Search: FulltextSearchSvcImpl present");
return new FulltextSearchSvcImpl();
}
}
/** /**
* Our default config - Lucene in-memory. * Our default config - Lucene in-memory.
*
* Override by adding {@link NoFT} or {@link Elasticsearch} to your test class contexts.
*/ */
@Configuration @Configuration
public static class DefaultLuceneHeap { public static class DefaultLuceneHeap {

View File

@ -347,7 +347,7 @@ public class TerminologySvcImplDstu3Test extends BaseJpaDstu3Test {
.addFilter() .addFilter()
.setProperty("copyright") .setProperty("copyright")
.setOp(ValueSet.FilterOperator.EQUAL) .setOp(ValueSet.FilterOperator.EQUAL)
.setValue("3rdParty"); .setValue("3rdParty"); // mixed case
outcome = myTermSvc.expandValueSet(null, vs); outcome = myTermSvc.expandValueSet(null, vs);
codes = toCodesContains(outcome.getExpansion().getContains()); codes = toCodesContains(outcome.getExpansion().getContains());
assertThat(codes, containsInAnyOrder("50015-7", "43343-3", "43343-4")); assertThat(codes, containsInAnyOrder("50015-7", "43343-3", "43343-4"));
@ -364,7 +364,7 @@ public class TerminologySvcImplDstu3Test extends BaseJpaDstu3Test {
.addFilter() .addFilter()
.setProperty("copyright") .setProperty("copyright")
.setOp(ValueSet.FilterOperator.EQUAL) .setOp(ValueSet.FilterOperator.EQUAL)
.setValue("3rdparty"); .setValue("3rdparty"); // lowercase
outcome = myTermSvc.expandValueSet(null, vs); outcome = myTermSvc.expandValueSet(null, vs);
codes = toCodesContains(outcome.getExpansion().getContains()); codes = toCodesContains(outcome.getExpansion().getContains());
assertThat(codes, containsInAnyOrder("50015-7", "43343-3", "43343-4")); assertThat(codes, containsInAnyOrder("50015-7", "43343-3", "43343-4"));

View File

@ -0,0 +1,16 @@
package ca.uhn.fhir.jpa.term;
import ca.uhn.fhir.jpa.config.TestHibernateSearchAddInConfig;
import ca.uhn.fhir.jpa.config.TestR4Config;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
/**
* This class runs all parent class tests using Elasticsearch configuration
*/
@ExtendWith(SpringExtension.class)
@ContextConfiguration(classes = {TestR4Config.class, TestHibernateSearchAddInConfig.Elasticsearch.class})
public class ValeSetFreeTextExpansionR4ElasticIT extends AbstractValeSetFreeTextExpansionR4Test {
}

View File

@ -0,0 +1,17 @@
package ca.uhn.fhir.jpa.term;
import ca.uhn.fhir.jpa.config.TestHibernateSearchAddInConfig;
import ca.uhn.fhir.jpa.config.TestR4Config;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
/**
* This class runs all parent class tests using Lucene configuration
* There is also a LuceneFilesystem configuration available, for debugging purposes
*/
@ExtendWith(SpringExtension.class)
@ContextConfiguration(classes = {TestR4Config.class, TestHibernateSearchAddInConfig.DefaultLuceneHeap.class})
public class ValeSetFreeTextExpansionR4LuceneIT extends AbstractValeSetFreeTextExpansionR4Test {
}

View File

@ -61,12 +61,6 @@ import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertSame; import static org.junit.jupiter.api.Assertions.assertSame;
import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.ArgumentMatchers.anyCollection;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.nullable;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class ValueSetExpansionR4Test extends BaseTermR4Test { public class ValueSetExpansionR4Test extends BaseTermR4Test {
private static final Logger ourLog = LoggerFactory.getLogger(ValueSetExpansionR4Test.class); private static final Logger ourLog = LoggerFactory.getLogger(ValueSetExpansionR4Test.class);
@ -978,53 +972,6 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test {
assertThat(ValueSetTestUtil.toCodes(expandedValueSet), is(equalTo(expandedConceptCodes.subList(1, 23)))); assertThat(ValueSetTestUtil.toCodes(expandedValueSet), is(equalTo(expandedConceptCodes.subList(1, 23))));
} }
@Test
public void testExpandValueSetInMemoryRespectsMaxSize() {
createCodeSystem();
// Add lots more codes
CustomTerminologySet additions = new CustomTerminologySet();
for (int i = 0; i < 100; i++) {
additions.addRootConcept("CODE" + i, "Display " + i);
}
myTermCodeSystemStorageSvc.applyDeltaCodeSystemsAdd(CS_URL, additions);
// Codes available exceeds the max
myDaoConfig.setMaximumExpansionSize(50);
ValueSet vs = new ValueSet();
ValueSet.ConceptSetComponent include = vs.getCompose().addInclude();
include.setSystem(CS_URL);
try {
ValueSet expansion = myTermSvc.expandValueSet(null, vs);
fail("Expanded to " + expansion.getExpansion().getContains().size() + " but max was " + myDaoConfig.getMaximumExpansionSize());
} catch (InternalErrorException e) {
assertEquals(Msg.code(832) + "Expansion of ValueSet produced too many codes (maximum 50) - Operation aborted!", e.getMessage());
}
// Increase the max so it won't exceed
myDaoConfig.setMaximumExpansionSize(150);
vs = new ValueSet();
include = vs.getCompose().addInclude();
include.setSystem(CS_URL);
ValueSet outcome = myTermSvc.expandValueSet(null, vs);
assertEquals(109, outcome.getExpansion().getContains().size());
}
@Test
public void testExpandValueSetWithValueSetCodeAccumulator() {
createCodeSystem();
when(myValueSetCodeAccumulator.getCapacityRemaining()).thenReturn(100);
ValueSet vs = new ValueSet();
ValueSet.ConceptSetComponent include = vs.getCompose().addInclude();
include.setSystem(CS_URL);
myTermSvc.expandValueSet(null, vs, myValueSetCodeAccumulator);
verify(myValueSetCodeAccumulator, times(9)).includeConceptWithDesignations(anyString(), anyString(), nullable(String.class), anyCollection(), nullable(Long.class), nullable(String.class), nullable(String.class));
}
@Test @Test
public void testExpandValueSetPreservesExplicitOrder() { public void testExpandValueSetPreservesExplicitOrder() {

View File

@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.term.api;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao; import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.term.TermReadSvcR4; import ca.uhn.fhir.jpa.term.TermReadSvcR4;
import ca.uhn.fhir.jpa.term.TermReadSvcUtil; import ca.uhn.fhir.jpa.term.TermReadSvcUtil;
@ -41,7 +42,9 @@ import org.springframework.test.util.ReflectionTestUtils;
import javax.persistence.EntityManager; import javax.persistence.EntityManager;
import javax.persistence.NonUniqueResultException; import javax.persistence.NonUniqueResultException;
import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.List;
import java.util.Optional; import java.util.Optional;
import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertFalse;
@ -232,4 +235,54 @@ class ITermReadSvcTest {
} }
@Nested
public class TestGetMultipleCodeParentPids {
public static final String CODE_1 = "code-1";
public static final String CODE_2 = "code-2";
public static final String CODE_3 = "code-3";
public static final String CODE_4 = "code-4";
public static final String CODE_5 = "code-5";
@Mock TermConcept termConceptCode1;
@Mock TermConcept termConceptCode3;
@Mock TermConcept termConceptCode4;
@Test
public void morePropertiesThanValues() {
when(termConceptCode1.getCode()).thenReturn(CODE_1);
when(termConceptCode3.getCode()).thenReturn(CODE_3);
when(termConceptCode4.getCode()).thenReturn(CODE_4);
List<TermConcept> termConcepts = Lists.newArrayList(termConceptCode1, termConceptCode3, termConceptCode4);
List<String> values = Arrays.asList(CODE_1, CODE_2, CODE_3, CODE_4, CODE_5);
String msg = (String) ReflectionTestUtils.invokeMethod(
testedClass, "getTermConceptsFetchExceptionMsg", termConcepts, values);
assertTrue(msg.contains("No TermConcept(s) were found"));
assertFalse(msg.contains(CODE_1));
assertTrue(msg.contains(CODE_2));
assertFalse(msg.contains(CODE_3));
assertFalse(msg.contains(CODE_4));
assertTrue(msg.contains(CODE_5));
}
@Test
void moreValuesThanProperties() {
when(termConceptCode1.getCode()).thenReturn(CODE_1);
when(termConceptCode1.getId()).thenReturn(1L);
when(termConceptCode3.getCode()).thenReturn(CODE_3);
when(termConceptCode3.getId()).thenReturn(3L);
List<TermConcept> termConcepts = Lists.newArrayList(termConceptCode1, termConceptCode3);
List<String> values = Arrays.asList(CODE_3);
String msg = (String) ReflectionTestUtils.invokeMethod(
testedClass, "getTermConceptsFetchExceptionMsg", termConcepts, values);
assertTrue(msg.contains("More TermConcepts were found than indicated codes"));
assertFalse(msg.contains("Queried codes: [" + CODE_3 + "]"));
assertTrue(msg.contains("Obtained TermConcept IDs, codes: [1, code-1; 3, code-3]"));
}
}
} }

View File

@ -0,0 +1,74 @@
package ca.uhn.fhir.jpa.search;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
/**
* The whole purpose of his class is to ease construction of a non-trivial gson.JsonObject,
* which can't be done the easy way in this case (using a JSON string), because there are
* valid regex strings which break gson, as this: ".*\\^Donor$"
*/
public class ElasticsearchNestedQueryBuilderUtil {
private final String myNestedObjName;
private final String myNestedKeyPropName;
private final String myNestedKeyPropValue;
private final String myNestedValuePropName;
private final String myNestedValuePropValue;
private final String myNestedPropertyKeyPath;
private final String myNestedPropertyValuePath;
private JsonObject builtJsonObj = new JsonObject();
public ElasticsearchNestedQueryBuilderUtil(String theNestedObjName, String theNestedKeyPropName,
String theNestedKeyPropValue, String theNestedValuePropName, String theNestedValuePropValue) {
myNestedObjName = theNestedObjName ;
myNestedKeyPropName = theNestedKeyPropName;
myNestedKeyPropValue = theNestedKeyPropValue;
myNestedValuePropName = theNestedValuePropName;
myNestedValuePropValue = theNestedValuePropValue;
myNestedPropertyKeyPath = myNestedObjName + "." + myNestedKeyPropName;
myNestedPropertyValuePath = myNestedObjName + "." + myNestedValuePropName;
buildJsonObj();
}
private void buildJsonObj() {
JsonObject matchPropJO = new JsonObject();
matchPropJO.addProperty(myNestedObjName + "." + myNestedKeyPropName, myNestedKeyPropValue);
JsonObject matchJO = new JsonObject();
matchJO.add("match", matchPropJO);
JsonObject regexpPropJO = new JsonObject();
regexpPropJO.addProperty(myNestedObjName + "." + myNestedValuePropName, myNestedValuePropValue);
JsonObject regexpJO = new JsonObject();
regexpJO.add("regexp", regexpPropJO);
JsonArray mustPropJA = new JsonArray();
mustPropJA.add(matchJO);
mustPropJA.add(regexpJO);
JsonObject mustPropJO = new JsonObject();
mustPropJO.add("must", mustPropJA);
JsonObject boolJO = new JsonObject();
boolJO.add("bool", mustPropJO);
JsonObject nestedJO = new JsonObject();
nestedJO.addProperty("path", myNestedObjName);
nestedJO.add("query", boolJO);
builtJsonObj.add("nested", nestedJO);
}
public JsonObject toGson() { return builtJsonObj; }
public String getNestedPropertyKeyPath() { return myNestedPropertyKeyPath; }
public String getNestedPropertyValuePath() { return myNestedPropertyValuePath; }
}

View File

@ -0,0 +1,87 @@
package ca.uhn.fhir.jpa.search;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import com.google.gson.JsonSyntaxException;
import com.google.gson.stream.MalformedJsonException;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
class ElasticsearchNestedQueryBuilderUtilTest {
private static final String NESTED_KEY_PROP_NAME = "myKey";
private static final String NESTED_VALUE_PROP_NAME = "myValueString";
private static final String NESTED_OBJECT_NAME = "myProperties";
// a valid regex string which breaks gson
private static final String GSON_FAILING_REGEX = ".*\\^Donor$";
private ElasticsearchNestedQueryBuilderUtil testedUtil;
@Test
void testBuildUsingTestedClass() {
String propertyValue = "propAAA";
testedUtil = new ElasticsearchNestedQueryBuilderUtil(
NESTED_OBJECT_NAME, NESTED_KEY_PROP_NAME, propertyValue, NESTED_VALUE_PROP_NAME, GSON_FAILING_REGEX);
assertFalse(testedUtil.toGson().isJsonNull());
JsonObject generatedJson = testedUtil.toGson();
JsonObject nestedJO = generatedJson.getAsJsonObject("nested");
assertEquals(NESTED_OBJECT_NAME, nestedJO.get("path").getAsString());
JsonObject queryJO = nestedJO.getAsJsonObject("query");
JsonObject boolJO = queryJO.getAsJsonObject("bool");
JsonArray mustJA = boolJO.getAsJsonArray("must");
JsonObject matchPropKeyJE = (JsonObject) mustJA.get(0);
JsonObject propKeyJO = matchPropKeyJE.getAsJsonObject("match");
assertEquals(propertyValue, propKeyJO.get(testedUtil.getNestedPropertyKeyPath()).getAsString());
JsonObject regexpPropValueJE = (JsonObject) mustJA.get(1);
JsonObject propValueJO = regexpPropValueJE.getAsJsonObject("regexp");
assertEquals(GSON_FAILING_REGEX, propValueJO.get(testedUtil.getNestedPropertyValuePath()).getAsString());
}
/**
* This test demonstrates that valid regex strings break gson library.
* If one day this test fails would mean the library added support for this kind of strings
* so the tested class could be removed and the code using it just build a gson.JsonObject from a JSON string
*/
@Test
void testShowGsonLibFailing() {
String workingRegex = ".*[abc]?jk";
String workingNestedQuery =
"{'nested': { 'path': 'myProperties', 'query': { 'bool': { 'must': [" +
"{'match': {'myProperties.myKey': 'propAAA' }}," +
"{'regexp': {'myProperties.myValueString': '" + workingRegex + "'}}" +
"]}}}}";
JsonObject jsonObj = new Gson().fromJson(workingNestedQuery, JsonObject.class);
assertFalse(jsonObj.isJsonNull());
// same json structure fails with some valid regex strings
String nestedQuery =
"{'nested': { 'path': 'myProperties', 'query': { 'bool': { 'must': [" +
"{'match': {'myProperties.myKey': 'propAAA' }}," +
"{'regexp': {'myProperties.myValueString': '" + GSON_FAILING_REGEX + "'}}" +
"]}}}}";
// MalformedJsonException thrown = assertThrows(
JsonSyntaxException thrown = assertThrows(
JsonSyntaxException.class,
() -> new Gson().fromJson(nestedQuery, JsonObject.class));
assertTrue(thrown.getCause() instanceof MalformedJsonException);
}
}