Work on Lucene searching

This commit is contained in:
jamesagnew 2015-10-07 09:01:32 -04:00
parent 3050d4776c
commit ad868038a8
50 changed files with 529 additions and 110 deletions

View File

@ -147,6 +147,8 @@ public class Constants {
public static final String TAG_SUBSETTED_SYSTEM = "http://hl7.org/fhir/v3/ObservationValue";
public static final String URL_TOKEN_HISTORY = "_history";
public static final String URL_TOKEN_METADATA = "metadata";
public static final String PARAM_CONTENT = "_content";
public static final String PARAM_TEXT = "_text";
static {
Map<String, EncodingEnum> valToEncoding = new HashMap<String, EncodingEnum>();

View File

@ -1,6 +1,7 @@
target/
/bin
nohup.out
lucene_indexes/
# Created by https://www.gitignore.io

View File

@ -45,6 +45,8 @@ import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import javax.xml.stream.events.Characters;
import javax.xml.stream.events.XMLEvent;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
@ -250,8 +252,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
try {
resourceDefinition = getContext().getResourceDefinition(typeString);
} catch (DataFormatException e) {
throw new InvalidRequestException(
"Invalid resource reference found at path[" + nextPathsUnsplit + "] - Resource type is unknown or not supported on this server - " + nextValue.getReference().getValue());
throw new InvalidRequestException("Invalid resource reference found at path[" + nextPathsUnsplit + "] - Resource type is unknown or not supported on this server - " + nextValue.getReference().getValue());
}
Class<? extends IBaseResource> type = resourceDefinition.getImplementingClass();
@ -287,8 +288,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
}
if (!typeString.equals(target.getResourceType())) {
throw new UnprocessableEntityException("Resource contains reference to " + nextValue.getReference().getValue() + " but resource with ID " + nextValue.getReference().getIdPart()
+ " is actually of type " + target.getResourceType());
throw new UnprocessableEntityException("Resource contains reference to " + nextValue.getReference().getValue() + " but resource with ID " + nextValue.getReference().getIdPart() + " is actually of type " + target.getResourceType());
}
/*
@ -716,9 +716,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
}
/**
* This method is called when an update to an existing resource detects that the resource supplied for update is missing a tag/profile/security label that the currently persisted resource holds.
* This method is called when an update to an existing resource detects that the resource supplied for update is
* missing a tag/profile/security label that the currently persisted resource holds.
* <p>
* The default implementation removes any profile declarations, but leaves tags and security labels in place. Subclasses may choose to override and change this behaviour.
* The default implementation removes any profile declarations, but leaves tags and security labels in place.
* Subclasses may choose to override and change this behaviour.
* </p>
*
* @param theEntity
@ -726,7 +728,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
* @param theTag
* The tag
* @return Retturns <code>true</code> if the tag should be removed
* @see <a href="http://hl7.org/fhir/2015Sep/resource.html#1.11.3.7">Updates to Tags, Profiles, and Security Labels</a> for a description of the logic that the default behaviour folows.
* @see <a href="http://hl7.org/fhir/2015Sep/resource.html#1.11.3.7">Updates to Tags, Profiles, and Security
* Labels</a> for a description of the logic that the default behaviour folows.
*/
protected boolean shouldDroppedTagBeRemovedOnUpdate(ResourceTable theEntity, ResourceTag theTag) {
if (theTag.getTag().getTagType() == TagTypeEnum.PROFILE) {
@ -763,13 +766,13 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
RuntimeResourceDefinition resourceDef = getContext().getResourceDefinition(theResourceType);
SearchParameterMap paramMap = translateMatchUrl(theMatchUrl, resourceDef);
if (paramMap.isEmpty()) {
throw new InvalidRequestException("Invalid match URL[" + theMatchUrl + "] - URL has no search parameters");
}
IFhirResourceDao<R> dao = getDao(theResourceType);
Set<Long> ids = dao.searchForIdsWithAndOr(paramMap);
Set<Long> ids = dao.searchForIdsWithAndOr(paramMap, new HashSet<Long>());
return ids;
}
@ -845,7 +848,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
if (RESOURCE_META_PARAMS.containsKey(nextParamName)) {
if (isNotBlank(paramList.get(0).getQualifier()) && paramList.get(0).getQualifier().startsWith(".")) {
throw new InvalidRequestException("Invalid parameter chain: " + nextParamName + paramList.get(0).getQualifier());
throw new InvalidRequestException("Invalid parameter chain: " + nextParamName + paramList.get(0).getQualifier());
}
IQueryParameterAnd<?> type = newInstanceAnd(nextParamName);
type.setValuesAsQueryTokens((paramList));
@ -857,7 +860,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
if (paramDef == null) {
throw new InvalidRequestException("Failed to parse match URL[" + theMatchUrl + "] - Resource type " + resourceDef.getName() + " does not have a parameter with name: " + nextParamName);
}
IQueryParameterAnd<?> param = MethodUtil.parseQueryParams(paramDef, nextParamName, paramList);
paramMap.add(nextParamName, param);
}
@ -1100,7 +1103,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
}
} else if (theForHistoryOperation) {
/*
* If the create and update times match, this was when the resource was created so we should mark it as a POST. Otherwise, it's a PUT.
* If the create and update times match, this was when the resource was created so we should mark it as a POST.
* Otherwise, it's a PUT.
*/
Date published = theEntity.getPublished().getValue();
Date updated = theEntity.getUpdated().getValue();
@ -1194,8 +1198,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
}
@SuppressWarnings("unchecked")
protected ResourceTable updateEntity(final IResource theResource, ResourceTable theEntity, boolean theUpdateHistory, Date theDeletedTimestampOrNull, boolean thePerformIndexing,
boolean theUpdateVersion, Date theUpdateTime) {
protected ResourceTable updateEntity(final IResource theResource, ResourceTable theEntity, boolean theUpdateHistory, Date theDeletedTimestampOrNull, boolean thePerformIndexing, boolean theUpdateVersion, Date theUpdateTime) {
/*
* This should be the very first thing..
@ -1204,8 +1207,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
validateResourceForStorage((T) theResource, theEntity);
String resourceType = myContext.getResourceDefinition(theResource).getName();
if (isNotBlank(theEntity.getResourceType()) && !theEntity.getResourceType().equals(resourceType)) {
throw new UnprocessableEntityException(
"Existing resource ID[" + theEntity.getIdDt().toUnqualifiedVersionless() + "] is of type[" + theEntity.getResourceType() + "] - Cannot update with [" + resourceType + "]");
throw new UnprocessableEntityException("Existing resource ID[" + theEntity.getIdDt().toUnqualifiedVersionless() + "] is of type[" + theEntity.getResourceType() + "] - Cannot update with [" + resourceType + "]");
}
}
@ -1261,6 +1263,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
links = Collections.emptySet();
theEntity.setDeleted(theDeletedTimestampOrNull);
theEntity.setUpdated(theDeletedTimestampOrNull);
theEntity.setNarrativeTextParsedIntoWords(null);
} else {
@ -1309,6 +1312,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
theEntity.setResourceLinks(links);
theEntity.setHasLinks(links.isEmpty() == false);
theEntity.setIndexStatus(INDEX_STATUS_INDEXED);
theEntity.setNarrativeTextParsedIntoWords(parseNarrativeTextIntoWords(theResource));
} else {
@ -1425,7 +1429,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
}
/**
* Subclasses may override to provide behaviour. Called when a resource has been inserved into the database for the first time.
* Subclasses may override to provide behaviour. Called when a resource has been inserved into the database for the
* first time.
*
* @param theEntity
* The resource
@ -1437,7 +1442,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
}
/**
* Subclasses may override to provide behaviour. Called when a resource has been inserved into the database for the first time.
* Subclasses may override to provide behaviour. Called when a resource has been inserved into the database for the
* first time.
*
* @param theEntity
* The resource
@ -1449,8 +1455,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
}
/**
* This method is invoked immediately before storing a new resource, or an update to an existing resource to allow the DAO to ensure that it is valid for persistence. By default, checks for the
* "subsetted" tag and rejects resources which have it. Subclasses should call the superclass implementation to preserve this check.
* This method is invoked immediately before storing a new resource, or an update to an existing resource to allow
* the DAO to ensure that it is valid for persistence. By default, checks for the "subsetted" tag and rejects
* resources which have it. Subclasses should call the superclass implementation to preserve this check.
*
* @param theResource
* The resource that is about to be persisted
@ -1481,4 +1488,18 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
return new String(out).toUpperCase();
}
private static String parseNarrativeTextIntoWords(IResource theResource) {
StringBuilder b = new StringBuilder();
List<XMLEvent> xmlEvents = theResource.getText().getDiv().getValue();
if (xmlEvents != null) {
for (XMLEvent next : xmlEvents) {
if (next.isCharacters()) {
Characters characters = next.asCharacters();
b.append(characters.getData()).append(" ");
}
}
}
return b.toString();
}
}

View File

@ -158,6 +158,9 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
@Autowired
private DaoConfig myDaoConfig;
@Autowired(required=false)
private ISearchDao mySearchDao;
private String myResourceName;
private Class<T> myResourceType;
private String mySecondaryPrimaryKeyParamName;
@ -2147,7 +2150,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
StopWatch w = new StopWatch();
final InstantDt now = InstantDt.withCurrentTime();
Set<Long> loadPids;
Collection<Long> loadPids;
if (theParams.getEverythingMode() != null) {
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
@ -2185,12 +2188,24 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
if (loadPids.isEmpty()) {
return new SimpleBundleProvider();
}
} else {
loadPids = searchForIdsWithAndOr(theParams);
} else {
List<Long> searchResultPids;
if (mySearchDao == null) {
searchResultPids = null;
} else {
searchResultPids = mySearchDao.search(getResourceName(), theParams);
}
if (theParams.isEmpty()) {
loadPids = searchResultPids;
} else {
loadPids = searchForIdsWithAndOr(theParams, searchResultPids);
}
if (loadPids.isEmpty()) {
return new SimpleBundleProvider();
}
}
// // Load _include and _revinclude before filter and sort in everything mode
@ -2279,15 +2294,15 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
return retVal;
}
private List<Long> filterResourceIdsByLastUpdated(Set<Long> loadPids, final DateRangeParam lu) {
private List<Long> filterResourceIdsByLastUpdated(Collection<Long> thePids, final DateRangeParam theLastUpdated) {
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Long> cq = builder.createQuery(Long.class);
Root<ResourceTable> from = cq.from(ResourceTable.class);
cq.select(from.get("myId").as(Long.class));
Predicate predicateIds = (from.get("myId").in(loadPids));
Predicate predicateLower = lu.getLowerBoundAsInstant() != null ? builder.greaterThanOrEqualTo(from.<Date> get("myUpdated"), lu.getLowerBoundAsInstant()) : null;
Predicate predicateUpper = lu.getUpperBoundAsInstant() != null ? builder.lessThanOrEqualTo(from.<Date> get("myUpdated"), lu.getUpperBoundAsInstant()) : null;
Predicate predicateIds = (from.get("myId").in(thePids));
Predicate predicateLower = theLastUpdated.getLowerBoundAsInstant() != null ? builder.greaterThanOrEqualTo(from.<Date> get("myUpdated"), theLastUpdated.getLowerBoundAsInstant()) : null;
Predicate predicateUpper = theLastUpdated.getUpperBoundAsInstant() != null ? builder.lessThanOrEqualTo(from.<Date> get("myUpdated"), theLastUpdated.getUpperBoundAsInstant()) : null;
if (predicateLower != null && predicateUpper != null) {
cq.where(predicateIds, predicateLower, predicateUpper);
} else if (predicateLower != null) {
@ -2313,20 +2328,20 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
return query;
}
private List<Long> processSort(final SearchParameterMap theParams, Set<Long> theLoadPids) {
private List<Long> processSort(final SearchParameterMap theParams, Collection<Long> theLoadPids) {
final List<Long> pids;
Set<Long> loadPids = theLoadPids;
// Set<Long> loadPids = theLoadPids;
if (theParams.getSort() != null && isNotBlank(theParams.getSort().getParamName())) {
List<Order> orders = new ArrayList<Order>();
List<Predicate> predicates = new ArrayList<Predicate>();
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Tuple> cq = builder.createTupleQuery();
Root<ResourceTable> from = cq.from(ResourceTable.class);
predicates.add(from.get("myId").in(loadPids));
predicates.add(from.get("myId").in(theLoadPids));
createSort(builder, from, theParams.getSort(), orders, predicates);
if (orders.size() > 0) {
Set<Long> originalPids = loadPids;
loadPids = new LinkedHashSet<Long>();
Collection<Long> originalPids = theLoadPids;
LinkedHashSet<Long> loadPids = new LinkedHashSet<Long>();
cq.multiselect(from.get("myId").as(Long.class));
cq.where(predicates.toArray(new Predicate[0]));
cq.orderBy(orders);
@ -2349,10 +2364,20 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
}
} else {
pids = new ArrayList<Long>(loadPids);
pids = toList(theLoadPids);
}
} else {
pids = new ArrayList<Long>(loadPids);
pids = toList(theLoadPids);
}
return pids;
}
private List<Long> toList(Collection<Long> theLoadPids) {
final List<Long> pids;
if (theLoadPids instanceof List) {
pids = (List<Long>) theLoadPids;
} else {
pids = new ArrayList<Long>(theLoadPids);
}
return pids;
}
@ -2368,7 +2393,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
for (Entry<String, IQueryParameterType> nextEntry : theParams.entrySet()) {
map.add(nextEntry.getKey(), (nextEntry.getValue()));
}
return searchForIdsWithAndOr(map);
return searchForIdsWithAndOr(map, null);
}
@Override
@ -2377,7 +2402,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
}
@Override
public Set<Long> searchForIdsWithAndOr(SearchParameterMap theParams) {
public Set<Long> searchForIdsWithAndOr(SearchParameterMap theParams, Collection<Long> theInitialPids) {
SearchParameterMap params = theParams;
if (params == null) {
params = new SearchParameterMap();
@ -2386,6 +2411,9 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
RuntimeResourceDefinition resourceDef = getContext().getResourceDefinition(myResourceType);
Set<Long> pids = new HashSet<Long>();
if (theInitialPids != null) {
pids.addAll(theInitialPids);
}
for (Entry<String, List<List<? extends IQueryParameterType>>> nextParamEntry : params.entrySet()) {
String nextParamName = nextParamEntry.getKey();

View File

@ -0,0 +1,98 @@
package ca.uhn.fhir.jpa.dao;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType;
import org.apache.commons.lang3.StringUtils;
import org.apache.lucene.search.Query;
import org.hibernate.search.jpa.FullTextEntityManager;
import org.hibernate.search.jpa.FullTextQuery;
import org.hibernate.search.query.dsl.BooleanJunction;
import org.hibernate.search.query.dsl.QueryBuilder;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.transaction.annotation.Transactional;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.server.Constants;
public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISearchDao {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirSearchDao.class);
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
private EntityManager myEntityManager;
@Transactional()
@Override
public List<Long> search(String theResourceName, SearchParameterMap theParams) {
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
QueryBuilder qb = em
.getSearchFactory()
.buildQueryBuilder()
.forEntity(ResourceTable.class).get();
BooleanJunction<?> bool = qb.bool();
List<List<? extends IQueryParameterType>> contentAndTerms = theParams.remove(Constants.PARAM_CONTENT);
addTextSearch(qb, bool, contentAndTerms, "myParamsString.myValueComplete");
List<List<? extends IQueryParameterType>> textAndTerms = theParams.remove(Constants.PARAM_TEXT);
addTextSearch(qb, bool, textAndTerms, "myNarrativeText");
if (bool.isEmpty()) {
return null;
}
if (isNotBlank(theResourceName)) {
bool.must(qb.keyword().onField("myResourceType").matching(theResourceName).createQuery());
}
Query luceneQuery = bool.createQuery();
// wrap Lucene query in a javax.persistence.Query
FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, ResourceTable.class);
jpaQuery.setProjection("myId");
// execute search
List<?> result = jpaQuery.getResultList();
ArrayList<Long> retVal = new ArrayList<Long>();
for (Object object : result) {
Object[] nextArray = (Object[]) object;
retVal.add((Long)nextArray[0]);
}
return retVal;
}
private void addTextSearch(QueryBuilder qb, BooleanJunction<?> bool, List<List<? extends IQueryParameterType>> contentAndTerms, String field) {
if (contentAndTerms == null) {
return;
}
for (List<? extends IQueryParameterType> nextAnd : contentAndTerms) {
Set<String> terms = new HashSet<String>();
for (IQueryParameterType nextOr : nextAnd) {
StringParam nextOrString = (StringParam) nextOr;
String nextValueTrimmed = StringUtils.defaultString(nextOrString.getValue()).trim();
if (isNotBlank(nextValueTrimmed)) {
terms.add(nextValueTrimmed);
}
}
if (terms.isEmpty() == false) {
String joinedTerms = StringUtils.join(terms, ' ');
bool.must(qb.keyword().onField(field).matching(joinedTerms).createQuery());
}
}
}
}

View File

@ -1,5 +1,7 @@
package ca.uhn.fhir.jpa.dao;
import java.util.Collection;
/*
* #%L
* HAPI FHIR JPA Server
@ -121,7 +123,7 @@ public interface IFhirResourceDao<T extends IBaseResource> extends IDao {
Set<Long> searchForIds(String theParameterName, IQueryParameterType theValue);
Set<Long> searchForIdsWithAndOr(SearchParameterMap theParams);
Set<Long> searchForIdsWithAndOr(SearchParameterMap theParams, Collection<Long> theInitialPids);
DaoMethodOutcome update(T theResource);

View File

@ -1,11 +1,9 @@
package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.rest.server.IBundleProvider;
import java.util.List;
public interface ISearchDao {
public static final String FULL_TEXT_PARAM_NAME = "fullTextSearch";
IBundleProvider search(SearchParameterMap theParams);
List<Long> search(String theResourceName, SearchParameterMap theParams);
}

View File

@ -1,53 +0,0 @@
package ca.uhn.fhir.jpa.dao;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType;
import org.hibernate.search.jpa.FullTextEntityManager;
import org.hibernate.search.jpa.FullTextQuery;
import org.hibernate.search.query.dsl.QueryBuilder;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.transaction.annotation.Transactional;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamString;
import ca.uhn.fhir.rest.server.IBundleProvider;
public class SearchDao extends BaseHapiFhirDao<IBaseResource> implements ISearchDao {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SearchDao.class);
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
private EntityManager myEntityManager;
@Transactional()
@Override
public IBundleProvider search(SearchParameterMap theParams) {
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
for (String nextParamName : theParams.keySet()) {
if (nextParamName.equals(FULL_TEXT_PARAM_NAME)) {
QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(ResourceIndexedSearchParamString.class).get();
org.apache.lucene.search.Query luceneQuery = qb
.keyword()
.onFields("myValueComplete")
.matching("AAAS")
.createQuery();
// wrap Lucene query in a javax.persistence.Query
FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, ResourceIndexedSearchParamString.class);
// execute search
List<?> result = jpaQuery.getResultList();
for (Object object : result) {
ourLog.info(""+ object);
}
}
}
return null;
}
}

View File

@ -32,7 +32,6 @@ import org.hibernate.search.annotations.Field;
import org.hibernate.search.annotations.Indexed;
//@formatter:off
@Indexed
@Entity
@Table(name = "HFJ_SPIDX_COORDS" /* , indexes = { @Index(name = "IDX_SP_TOKEN", columnList = "SP_SYSTEM,SP_VALUE") } */)
@org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_COORDS", indexes = {

View File

@ -36,7 +36,6 @@ import org.hibernate.search.annotations.Field;
import org.hibernate.search.annotations.Indexed;
//@formatter:off
@Indexed
@Entity
@Table(name = "HFJ_SPIDX_DATE" /*, indexes= {@Index(name="IDX_SP_DATE", columnList= "SP_VALUE_LOW,SP_VALUE_HIGH")}*/)
@org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_DATE", indexes= {

View File

@ -31,10 +31,13 @@ import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.search.annotations.Field;
import org.hibernate.search.annotations.FieldBridge;
import org.hibernate.search.annotations.Indexed;
import org.hibernate.search.annotations.NumericField;
import ca.uhn.fhir.jpa.util.BigDecimalNumericFieldBridge;
//@formatter:off
@Indexed
@Entity
@Table(name = "HFJ_SPIDX_NUMBER" /*, indexes= {@Index(name="IDX_SP_NUMBER", columnList="SP_VALUE")}*/ )
@org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_NUMBER", indexes= {
@ -47,6 +50,8 @@ public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchP
@Column(name = "SP_VALUE", nullable = true)
@Field
@NumericField
@FieldBridge(impl = BigDecimalNumericFieldBridge.class)
public BigDecimal myValue;
public ResourceIndexedSearchParamNumber() {

View File

@ -31,10 +31,13 @@ import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.search.annotations.Field;
import org.hibernate.search.annotations.FieldBridge;
import org.hibernate.search.annotations.Indexed;
import org.hibernate.search.annotations.NumericField;
import ca.uhn.fhir.jpa.util.BigDecimalNumericFieldBridge;
//@formatter:off
@Indexed
@Entity
@Table(name = "HFJ_SPIDX_QUANTITY" /*, indexes= {@Index(name="IDX_SP_NUMBER", columnList="SP_VALUE")}*/ )
@org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_QUANTITY", indexes= {
@ -57,6 +60,8 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc
@Column(name = "SP_VALUE", nullable = true)
@Field
@NumericField
@FieldBridge(impl = BigDecimalNumericFieldBridge.class)
public BigDecimal myValue;
public ResourceIndexedSearchParamQuantity() {

View File

@ -32,7 +32,6 @@ import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.search.annotations.Field;
import org.hibernate.search.annotations.Indexed;
@Indexed
@Entity
@Table(name = "HFJ_SPIDX_STRING"/* , indexes= {@Index(name="IDX_SP_STRING", columnList="SP_VALUE_NORMALIZED")} */)
@org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_STRING", indexes = {

View File

@ -32,7 +32,6 @@ import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.search.annotations.Field;
import org.hibernate.search.annotations.Indexed;
@Indexed
@Entity
@Table(name = "HFJ_SPIDX_TOKEN" /* , indexes = { @Index(name = "IDX_SP_TOKEN", columnList = "SP_SYSTEM,SP_VALUE") } */)
@org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_TOKEN", indexes = { @org.hibernate.annotations.Index(name = "IDX_SP_TOKEN", columnNames = { "RES_TYPE", "SP_NAME", "SP_SYSTEM", "SP_VALUE" }),

View File

@ -33,7 +33,6 @@ import org.hibernate.search.annotations.Field;
import org.hibernate.search.annotations.Indexed;
//@formatter:off
@Indexed
@Entity
@Table(name = "HFJ_SPIDX_URI" /* , indexes = { @Index(name = "IDX_SP_TOKEN", columnList = "SP_SYSTEM,SP_VALUE") } */)
@org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_URI", indexes = {

View File

@ -35,14 +35,21 @@ import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Index;
import javax.persistence.Lob;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import org.hibernate.search.annotations.Field;
import org.hibernate.search.annotations.Indexed;
import org.hibernate.search.annotations.IndexedEmbedded;
import ca.uhn.fhir.jpa.search.IndexNonDeletedInterceptor;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.server.Constants;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
//@formatter:off
@Indexed(interceptor=IndexNonDeletedInterceptor.class)
@Entity
@Table(name = "HFJ_RESOURCE", uniqueConstraints = {}, indexes= {
@Index(name = "IDX_RES_DATE", columnList="RES_UPDATED"),
@ -76,43 +83,58 @@ public class ResourceTable extends BaseHasResource implements Serializable {
@Column(name = "RES_LANGUAGE", length = MAX_LANGUAGE_LENGTH, nullable = true)
private String myLanguage;
/**
* Holds the narrative text only - Used for Fulltext searching but not directly stored in the DB
*/
@Column(name = "SP_NARRATIVE_TEXT")
@Lob
@Field()
private String myNarrativeText;
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
@IndexedEmbedded
private Collection<ResourceIndexedSearchParamCoords> myParamsCoords;
@Column(name = "SP_COORDS_PRESENT")
private boolean myParamsCoordsPopulated;
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
@IndexedEmbedded
private Collection<ResourceIndexedSearchParamDate> myParamsDate;
@Column(name = "SP_DATE_PRESENT")
private boolean myParamsDatePopulated;
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
@IndexedEmbedded
private Collection<ResourceIndexedSearchParamNumber> myParamsNumber;
@Column(name = "SP_NUMBER_PRESENT")
private boolean myParamsNumberPopulated;
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
@IndexedEmbedded
private Collection<ResourceIndexedSearchParamQuantity> myParamsQuantity;
@Column(name = "SP_QUANTITY_PRESENT")
private boolean myParamsQuantityPopulated;
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
@IndexedEmbedded
private Collection<ResourceIndexedSearchParamString> myParamsString;
@Column(name = "SP_STRING_PRESENT")
private boolean myParamsStringPopulated;
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
@IndexedEmbedded
private Collection<ResourceIndexedSearchParamToken> myParamsToken;
@Column(name = "SP_TOKEN_PRESENT")
private boolean myParamsTokenPopulated;
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
@IndexedEmbedded
private Collection<ResourceIndexedSearchParamUri> myParamsUri;
@Column(name = "SP_URI_PRESENT")
@ -125,6 +147,7 @@ public class ResourceTable extends BaseHasResource implements Serializable {
private Collection<ResourceLink> myResourceLinks;
@Column(name = "RES_TYPE", length = RESTYPE_LEN)
@Field
private String myResourceType;
@OneToMany(mappedBy = "myResource", cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true)
@ -297,6 +320,10 @@ public class ResourceTable extends BaseHasResource implements Serializable {
myLanguage = theLanguage;
}
public void setNarrativeTextParsedIntoWords(String theNarrativeText) {
myNarrativeText = theNarrativeText;
}
public void setParamsCoords(Collection<ResourceIndexedSearchParamCoords> theParamsCoords) {
if (!isParamsTokenPopulated() && theParamsCoords.isEmpty()) {
return;

View File

@ -0,0 +1,38 @@
package ca.uhn.fhir.jpa.search;
import org.hibernate.search.indexes.interceptor.EntityIndexingInterceptor;
import org.hibernate.search.indexes.interceptor.IndexingOverride;
import ca.uhn.fhir.jpa.entity.ResourceTable;
/**
* Only store non-deleted resources
*/
public class IndexNonDeletedInterceptor implements EntityIndexingInterceptor<ResourceTable> {
@Override
public IndexingOverride onAdd(ResourceTable entity) {
if (entity.getDeleted() == null) {
return IndexingOverride.APPLY_DEFAULT;
}
return IndexingOverride.SKIP;
}
@Override
public IndexingOverride onUpdate(ResourceTable entity) {
if (entity.getDeleted() == null) {
return IndexingOverride.UPDATE;
}
return IndexingOverride.REMOVE;
}
@Override
public IndexingOverride onDelete(ResourceTable entity) {
return IndexingOverride.APPLY_DEFAULT;
}
@Override
public IndexingOverride onCollectionUpdate(ResourceTable entity) {
return onUpdate(entity);
}
}

View File

@ -0,0 +1,42 @@
package ca.uhn.fhir.jpa.util;
import java.math.BigDecimal;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexableField;
import org.hibernate.search.bridge.LuceneOptions;
import org.hibernate.search.bridge.TwoWayFieldBridge;
public class BigDecimalNumericFieldBridge implements TwoWayFieldBridge {
@Override
public void set(String name, Object value, Document document, LuceneOptions luceneOptions) {
if (value == null) {
if (luceneOptions.indexNullAs() != null) {
luceneOptions.addFieldToDocument(name, luceneOptions.indexNullAs(), document);
}
} else {
BigDecimal bdValue = (BigDecimal)value;
applyToLuceneOptions(luceneOptions, name, bdValue.doubleValue(), document);
}
}
@Override
public final String objectToString(final Object object) {
return object == null ? null : object.toString();
}
@Override
public Object get(final String name, final Document document) {
final IndexableField field = document.getField(name);
if (field != null) {
Double doubleVal = (Double)field.numericValue();
return new BigDecimal(doubleVal);
} else {
return null;
}
}
protected void applyToLuceneOptions(LuceneOptions luceneOptions, String name, Number value, Document document) {
luceneOptions.addNumericFieldToDocument(name, value, document);
}
}

View File

@ -16,6 +16,6 @@
<tx:annotation-driven transaction-manager="myTxManagerDstu2" />
<bean id="mySearchDaoDstu2" class="ca.uhn.fhir.jpa.dao.SearchDao" />
<bean id="mySearchDaoDstu2" class="ca.uhn.fhir.jpa.dao.FhirSearchDao" />
</beans>

View File

@ -168,7 +168,7 @@ public abstract class BaseJpaDstu2Test extends BaseJpaTest {
}
protected <T extends IBaseResource> T loadResourceFromClasspath(Class<T> type, String resourceName) throws IOException {
InputStream stream = FhirResourceDaoDstu2SearchTest.class.getResourceAsStream(resourceName);
InputStream stream = FhirResourceDaoDstu2SearchNoFtTest.class.getResourceAsStream(resourceName);
if (stream == null) {
fail("Unable to load resource: " + resourceName);
}

View File

@ -94,8 +94,8 @@ import ca.uhn.fhir.rest.server.IBundleProvider;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
@SuppressWarnings("unchecked")
public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu2SearchTest.class);
public class FhirResourceDaoDstu2SearchFtTest extends BaseJpaDstu2Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu2SearchFtTest.class);
@Test
public void testSearchWithEmptySort() {

View File

@ -0,0 +1,53 @@
package ca.uhn.fhir.jpa.dao;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.Test;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
import ca.uhn.fhir.model.dstu2.resource.Observation;
import ca.uhn.fhir.model.dstu2.resource.Patient;
import ca.uhn.fhir.model.primitive.StringDt;
import ca.uhn.fhir.rest.server.Constants;
@SuppressWarnings("unchecked")
public class FhirResourceDaoDstu2SearchNoFtTest extends BaseJpaDstu2Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu2SearchNoFtTest.class);
@Test
public void testSearchWithChainedParams() {
String methodName = "testSearchWithChainedParams";
IIdType pId1;
{
Patient patient = new Patient();
patient.addName().addGiven("methodName");
patient.addAddress().addLine("My fulltext address");
pId1 = myPatientDao.create(patient).getId();
}
Observation obs = new Observation();
obs.getSubject().setReference(pId1);
obs.setValue(new StringDt("This is the fulltext of the observation"));
IIdType oId1 = myObservationDao.create(obs).getId().toUnqualifiedVersionless();
obs = new Observation();
obs.getSubject().setReference(pId1);
obs.setValue(new StringDt("Another fulltext"));
IIdType oId2 = myObservationDao.create(obs).getId().toUnqualifiedVersionless();
SearchParameterMap params = new SearchParameterMap();
params.add(Constants.PARAM_CONTENT, new StringDt("fulltext"));
List<IIdType> patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, containsInAnyOrder(oId1, oId2));
}
}

View File

@ -1,13 +1,26 @@
package ca.uhn.fhir.jpa.dao;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.empty;
import static org.junit.Assert.assertThat;
import java.util.List;
import org.hibernate.search.jpa.FullTextEntityManager;
import org.hibernate.search.jpa.Search;
import org.junit.Before;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.transaction.annotation.Transactional;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.model.dstu2.resource.Organization;
import ca.uhn.fhir.model.dstu2.resource.Patient;
import ca.uhn.fhir.rest.param.StringAndListParam;
import ca.uhn.fhir.rest.param.StringOrListParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.server.Constants;
@ContextConfiguration(locations = { "classpath:fhir-spring-search-config-dstu2.xml" })
public class FhirSearchDaoDstu2Test extends BaseJpaDstu2Test {
@ -15,25 +28,159 @@ public class FhirSearchDaoDstu2Test extends BaseJpaDstu2Test {
@Autowired
private ISearchDao mySearchDao;
@Before
@Transactional
public void beforeFlushFT() {
FullTextEntityManager ftem = Search.getFullTextEntityManager(myEntityManager);
ftem.purgeAll(ResourceTable.class);
ftem.flushToIndexes();
}
@Test
public void testStringSearch() {
public void testContentSearch() {
Long id1;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
patient.addName().addGiven("testSearchStringParamWithNonNormalized_h\u00F6ra");
patient.addName().addFamily("AAAS");
myPatientDao.create(patient);
patient.addName().addFamily("CCC");
id1 = myPatientDao.create(patient).getId().toUnqualifiedVersionless().getIdPartAsLong();
}
Long id2;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("002");
patient.addName().addGiven("testSearchStringParamWithNonNormalized_HORA");
myPatientDao.create(patient);
patient.addName().addFamily("AAAB");
patient.addName().addFamily("CCC");
id2 = myPatientDao.create(patient).getId().toUnqualifiedVersionless().getIdPartAsLong();
}
Long id3;
{
Organization org = new Organization();
org.setName("DDD");
id3 = myOrganizationDao.create(org).getId().toUnqualifiedVersionless().getIdPartAsLong();
}
SearchParameterMap map = new SearchParameterMap();
map.add(ISearchDao.FULL_TEXT_PARAM_NAME, new StringAndListParam().addAnd(new StringOrListParam().addOr(new StringParam("AAA"))));
mySearchDao.search(map);
String resourceName = "Patient";
// One term
{
StringAndListParam content = new StringAndListParam();
content.addAnd(new StringOrListParam().addOr(new StringParam("AAAS")));
map.add(Constants.PARAM_CONTENT, content);
List<Long> found = mySearchDao.search(resourceName, map);
assertThat(found, containsInAnyOrder(id1));
}
// OR
{
StringAndListParam content = new StringAndListParam();
content.addAnd(new StringOrListParam().addOr(new StringParam("AAAS")).addOr(new StringParam("AAAB")));
map.add(Constants.PARAM_CONTENT, content);
List<Long> found = mySearchDao.search(resourceName, map);
assertThat(found, containsInAnyOrder(id1, id2));
}
// AND
{
StringAndListParam content = new StringAndListParam();
content.addAnd(new StringOrListParam().addOr(new StringParam("AAAS")));
content.addAnd(new StringOrListParam().addOr(new StringParam("CCC")));
map.add(Constants.PARAM_CONTENT, content);
List<Long> found = mySearchDao.search(resourceName, map);
assertThat(found, containsInAnyOrder(id1));
}
// AND OR
{
StringAndListParam content = new StringAndListParam();
content.addAnd(new StringOrListParam().addOr(new StringParam("AAAB")).addOr(new StringParam("AAAS")));
content.addAnd(new StringOrListParam().addOr(new StringParam("CCC")));
map.add(Constants.PARAM_CONTENT, content);
List<Long> found = mySearchDao.search(resourceName, map);
assertThat(found, containsInAnyOrder(id1, id2));
}
// All Resource Types
{
StringAndListParam content = new StringAndListParam();
content.addAnd(new StringOrListParam().addOr(new StringParam("CCC")).addOr(new StringParam("DDD")));
map.add(Constants.PARAM_CONTENT, content);
List<Long> found = mySearchDao.search(null, map);
assertThat(found, containsInAnyOrder(id1, id2, id3));
}
}
@Test
public void testNarrativeSearch() {
Long id1;
{
Patient patient = new Patient();
patient.getText().setDiv("<div>AAAS<p>FOO</p> CCC </div>");
id1 = myPatientDao.create(patient).getId().toUnqualifiedVersionless().getIdPartAsLong();
}
Long id2;
{
Patient patient = new Patient();
patient.getText().setDiv("<div>AAAB<p>FOO</p> CCC </div>");
id2 = myPatientDao.create(patient).getId().toUnqualifiedVersionless().getIdPartAsLong();
}
SearchParameterMap map = new SearchParameterMap();
String resourceName = "Patient";
// One term
{
StringAndListParam content = new StringAndListParam();
content.addAnd(new StringOrListParam().addOr(new StringParam("AAAS")));
map.add(Constants.PARAM_TEXT, content);
List<Long> found = mySearchDao.search(resourceName, map);
assertThat(found, containsInAnyOrder(id1));
}
// OR
{
StringAndListParam content = new StringAndListParam();
content.addAnd(new StringOrListParam().addOr(new StringParam("AAAS")).addOr(new StringParam("AAAB")));
map.add(Constants.PARAM_TEXT, content);
List<Long> found = mySearchDao.search(resourceName, map);
assertThat(found, containsInAnyOrder(id1, id2));
}
// AND
{
StringAndListParam content = new StringAndListParam();
content.addAnd(new StringOrListParam().addOr(new StringParam("AAAS")));
content.addAnd(new StringOrListParam().addOr(new StringParam("CCC")));
map.add(Constants.PARAM_TEXT, content);
List<Long> found = mySearchDao.search(resourceName, map);
assertThat(found, containsInAnyOrder(id1));
}
// AND OR
{
StringAndListParam content = new StringAndListParam();
content.addAnd(new StringOrListParam().addOr(new StringParam("AAAB")).addOr(new StringParam("AAAS")));
content.addAnd(new StringOrListParam().addOr(new StringParam("CCC")));
map.add(Constants.PARAM_TEXT, content);
List<Long> found = mySearchDao.search(resourceName, map);
assertThat(found, containsInAnyOrder(id1, id2));
}
// Tag Contents
{
StringAndListParam content = new StringAndListParam();
content.addAnd(new StringOrListParam().addOr(new StringParam("div")));
map.add(Constants.PARAM_TEXT, content);
List<Long> found = mySearchDao.search(resourceName, map);
assertThat(found, empty());
}
}
}

View File

@ -50,6 +50,14 @@ public class ${className}ResourceProvider extends
@OptionalParam(name="_language")
StringAndListParam theResourceLanguage,
@Description(shortDefinition="Search the contents of the resource's data using a fulltext search")
@OptionalParam(name=ca.uhn.fhir.rest.server.Constants.PARAM_CONTENT)
StringAndListParam theFtContent,
@Description(shortDefinition="Search the contents of the resource's narrative using a fulltext search")
@OptionalParam(name=ca.uhn.fhir.rest.server.Constants.PARAM_TEXT)
StringAndListParam theFtText,
@Description(shortDefinition="Search for resources which have the given tag")
@OptionalParam(name=ca.uhn.fhir.rest.server.Constants.PARAM_TAG)
TokenAndListParam theSearchForTag,
@ -128,6 +136,8 @@ public class ${className}ResourceProvider extends
SearchParameterMap paramMap = new SearchParameterMap();
paramMap.add("_id", theId);
paramMap.add("_language", theResourceLanguage);
paramMap.add(ca.uhn.fhir.rest.server.Constants.PARAM_CONTENT, theFtContent);
paramMap.add(ca.uhn.fhir.rest.server.Constants.PARAM_TEXT, theFtText);
paramMap.add(ca.uhn.fhir.rest.server.Constants.PARAM_TAG, theSearchForTag);
paramMap.add(ca.uhn.fhir.rest.server.Constants.PARAM_SECURITY, theSearchForSecurity);
paramMap.add(ca.uhn.fhir.rest.server.Constants.PARAM_PROFILE, theSearchForProfile);