mirror of
https://github.com/hapifhir/hapi-fhir.git
synced 2025-03-09 14:33:32 +00:00
Fulltext searching works
This commit is contained in:
parent
23b35661ea
commit
3fc7a16735
@ -110,7 +110,7 @@ public class OperationParameter implements IParameter {
|
|||||||
myMax = 1;
|
myMax = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
myAllowGet = IPrimitiveType.class.isAssignableFrom(myParameterType);
|
myAllowGet = IPrimitiveType.class.isAssignableFrom(myParameterType) || String.class.equals(myParameterType);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* The parameter can be of type string for validation methods - This is a bit
|
* The parameter can be of type string for validation methods - This is a bit
|
||||||
@ -173,6 +173,12 @@ public class OperationParameter implements IParameter {
|
|||||||
DateRangeParam dateRangeParam = new DateRangeParam();
|
DateRangeParam dateRangeParam = new DateRangeParam();
|
||||||
dateRangeParam.setValuesAsQueryTokens(parameters);
|
dateRangeParam.setValuesAsQueryTokens(parameters);
|
||||||
matchingParamValues.add(dateRangeParam);
|
matchingParamValues.add(dateRangeParam);
|
||||||
|
} else if (String.class.isAssignableFrom(myParameterType)) {
|
||||||
|
|
||||||
|
for (String next : paramValues) {
|
||||||
|
matchingParamValues.add(next);
|
||||||
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
for (String nextValue : paramValues) {
|
for (String nextValue : paramValues) {
|
||||||
FhirContext ctx = theRequest.getServer().getFhirContext();
|
FhirContext ctx = theRequest.getServer().getFhirContext();
|
||||||
|
@ -78,7 +78,6 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.javassist</groupId>
|
<groupId>org.javassist</groupId>
|
||||||
<artifactId>javassist</artifactId>
|
<artifactId>javassist</artifactId>
|
||||||
<version>3.20.0-GA</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
@ -239,6 +238,14 @@
|
|||||||
<groupId>org.hibernate</groupId>
|
<groupId>org.hibernate</groupId>
|
||||||
<artifactId>hibernate-search-orm</artifactId>
|
<artifactId>hibernate-search-orm</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.lucene</groupId>
|
||||||
|
<artifactId>lucene-highlighter</artifactId>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.lucene</groupId>
|
||||||
|
<artifactId>lucene-analyzers-phonetic</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
<!-- Misc -->
|
<!-- Misc -->
|
||||||
<dependency>
|
<dependency>
|
||||||
|
@ -31,6 +31,7 @@ import java.util.Comparator;
|
|||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
@ -1189,6 +1190,12 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
|||||||
return translateForcedIdToPid(theId, myEntityManager);
|
return translateForcedIdToPid(theId, myEntityManager);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static void validateResourceType(BaseHasResource theEntity, String theResourceName) {
|
||||||
|
if (!theResourceName.equals(theEntity.getResourceType())) {
|
||||||
|
throw new ResourceNotFoundException("Resource with ID " + theEntity.getIdDt().getIdPart() + " exists but it is not of type " + theResourceName + ", found resource of type " + theEntity.getResourceType());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static Long translateForcedIdToPid(IIdType theId, EntityManager entityManager) {
|
static Long translateForcedIdToPid(IIdType theId, EntityManager entityManager) {
|
||||||
if (isValidPid(theId)) {
|
if (isValidPid(theId)) {
|
||||||
return theId.getIdPartAsLong();
|
return theId.getIdPartAsLong();
|
||||||
@ -1271,9 +1278,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
|||||||
if (theEntity.isParamsCoordsPopulated()) {
|
if (theEntity.isParamsCoordsPopulated()) {
|
||||||
paramsCoords.addAll(theEntity.getParamsCoords());
|
paramsCoords.addAll(theEntity.getParamsCoords());
|
||||||
}
|
}
|
||||||
Collection<ResourceLink> resourceLinks = new ArrayList<ResourceLink>();
|
Collection<ResourceLink> existingResourceLinks = new ArrayList<ResourceLink>();
|
||||||
if (theEntity.isHasLinks()) {
|
if (theEntity.isHasLinks()) {
|
||||||
resourceLinks.addAll(theEntity.getResourceLinks());
|
existingResourceLinks.addAll(theEntity.getResourceLinks());
|
||||||
}
|
}
|
||||||
|
|
||||||
Set<ResourceIndexedSearchParamString> stringParams = null;
|
Set<ResourceIndexedSearchParamString> stringParams = null;
|
||||||
@ -1326,6 +1333,19 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
|||||||
}
|
}
|
||||||
|
|
||||||
links = extractResourceLinks(theEntity, theResource);
|
links = extractResourceLinks(theEntity, theResource);
|
||||||
|
|
||||||
|
/*
|
||||||
|
* If the existing resource already has links and those match links we still want,
|
||||||
|
* use them instead of removing them and re adding them
|
||||||
|
*/
|
||||||
|
for (Iterator<ResourceLink> existingLinkIter = existingResourceLinks.iterator(); existingLinkIter.hasNext(); ) {
|
||||||
|
ResourceLink nextExisting = existingLinkIter.next();
|
||||||
|
if (links.remove(nextExisting)) {
|
||||||
|
existingLinkIter.remove();
|
||||||
|
links.add(nextExisting);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
populateResourceIntoEntity(theResource, theEntity);
|
populateResourceIntoEntity(theResource, theEntity);
|
||||||
|
|
||||||
theEntity.setUpdated(theUpdateTime);
|
theEntity.setUpdated(theUpdateTime);
|
||||||
@ -1431,7 +1451,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Store resource links
|
// Store resource links
|
||||||
for (ResourceLink next : resourceLinks) {
|
for (ResourceLink next : existingResourceLinks) {
|
||||||
myEntityManager.remove(next);
|
myEntityManager.remove(next);
|
||||||
}
|
}
|
||||||
for (ResourceLink next : links) {
|
for (ResourceLink next : links) {
|
||||||
|
@ -1045,9 +1045,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void validateResourceType(BaseHasResource entity) {
|
private void validateResourceType(BaseHasResource entity) {
|
||||||
if (!myResourceName.equals(entity.getResourceType())) {
|
validateResourceType(entity, myResourceName);
|
||||||
throw new ResourceNotFoundException("Resource with ID " + entity.getIdDt().getIdPart() + " exists but it is not of type " + myResourceName + ", found resource of type " + entity.getResourceType());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void validateResourceTypeAndThrowIllegalArgumentException(IIdType theId) {
|
private void validateResourceTypeAndThrowIllegalArgumentException(IIdType theId) {
|
||||||
|
@ -23,7 +23,9 @@ package ca.uhn.fhir.jpa.dao;
|
|||||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
@ -32,23 +34,37 @@ import javax.persistence.PersistenceContext;
|
|||||||
import javax.persistence.PersistenceContextType;
|
import javax.persistence.PersistenceContextType;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.commons.lang3.Validate;
|
||||||
|
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||||
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
|
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.search.highlight.Formatter;
|
||||||
|
import org.apache.lucene.search.highlight.Highlighter;
|
||||||
|
import org.apache.lucene.search.highlight.QueryScorer;
|
||||||
|
import org.apache.lucene.search.highlight.Scorer;
|
||||||
|
import org.apache.lucene.search.highlight.SimpleHTMLFormatter;
|
||||||
|
import org.apache.lucene.search.highlight.TokenGroup;
|
||||||
import org.hibernate.search.jpa.FullTextEntityManager;
|
import org.hibernate.search.jpa.FullTextEntityManager;
|
||||||
import org.hibernate.search.jpa.FullTextQuery;
|
import org.hibernate.search.jpa.FullTextQuery;
|
||||||
import org.hibernate.search.query.dsl.BooleanJunction;
|
import org.hibernate.search.query.dsl.BooleanJunction;
|
||||||
import org.hibernate.search.query.dsl.QueryBuilder;
|
import org.hibernate.search.query.dsl.QueryBuilder;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
|
||||||
import org.springframework.transaction.annotation.Transactional;
|
import org.springframework.transaction.annotation.Transactional;
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import com.google.common.collect.Lists;
|
||||||
|
import com.google.common.collect.Sets;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.dao.FhirSearchDao.MySuggestionFormatter;
|
||||||
import ca.uhn.fhir.jpa.entity.ResourceTable;
|
import ca.uhn.fhir.jpa.entity.ResourceTable;
|
||||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||||
import ca.uhn.fhir.model.dstu.resource.BaseResource;
|
import ca.uhn.fhir.model.dstu.resource.BaseResource;
|
||||||
import ca.uhn.fhir.model.primitive.IdDt;
|
import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
import ca.uhn.fhir.rest.param.StringParam;
|
import ca.uhn.fhir.rest.param.StringParam;
|
||||||
import ca.uhn.fhir.rest.server.Constants;
|
import ca.uhn.fhir.rest.server.Constants;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
|
|
||||||
public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISearchDao {
|
public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISearchDao {
|
||||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirSearchDao.class);
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirSearchDao.class);
|
||||||
@ -56,61 +72,6 @@ public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISe
|
|||||||
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||||
private EntityManager myEntityManager;
|
private EntityManager myEntityManager;
|
||||||
|
|
||||||
@Transactional()
|
|
||||||
@Override
|
|
||||||
public List<Long> search(String theResourceName, SearchParameterMap theParams) {
|
|
||||||
return doSearch(theResourceName, theParams, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
private List<Long> doSearch(String theResourceName, SearchParameterMap theParams, Long theReferencingPid) {
|
|
||||||
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
|
|
||||||
|
|
||||||
QueryBuilder qb = em
|
|
||||||
.getSearchFactory()
|
|
||||||
.buildQueryBuilder()
|
|
||||||
.forEntity(ResourceTable.class).get();
|
|
||||||
|
|
||||||
BooleanJunction<?> bool = qb.bool();
|
|
||||||
|
|
||||||
List<List<? extends IQueryParameterType>> contentAndTerms = theParams.remove(Constants.PARAM_CONTENT);
|
|
||||||
addTextSearch(qb, bool, contentAndTerms, "myContentText");
|
|
||||||
|
|
||||||
List<List<? extends IQueryParameterType>> textAndTerms = theParams.remove(Constants.PARAM_TEXT);
|
|
||||||
addTextSearch(qb, bool, textAndTerms, "myNarrativeText");
|
|
||||||
|
|
||||||
if (theReferencingPid != null) {
|
|
||||||
bool.must(qb.keyword().onField("myResourceLinks.myTargetResourcePid").matching(theReferencingPid).createQuery());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (bool.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isNotBlank(theResourceName)) {
|
|
||||||
bool.must(qb.keyword().onField("myResourceType").matching(theResourceName).createQuery());
|
|
||||||
}
|
|
||||||
|
|
||||||
Query luceneQuery = bool.createQuery();
|
|
||||||
|
|
||||||
// wrap Lucene query in a javax.persistence.Query
|
|
||||||
FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, ResourceTable.class);
|
|
||||||
jpaQuery.setProjection("myId");
|
|
||||||
|
|
||||||
// execute search
|
|
||||||
List<?> result = jpaQuery.getResultList();
|
|
||||||
|
|
||||||
ArrayList<Long> retVal = new ArrayList<Long>();
|
|
||||||
for (Object object : result) {
|
|
||||||
Object[] nextArray = (Object[]) object;
|
|
||||||
Long next = (Long)nextArray[0];
|
|
||||||
if (next != null) {
|
|
||||||
retVal.add(next);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return retVal;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void addTextSearch(QueryBuilder qb, BooleanJunction<?> bool, List<List<? extends IQueryParameterType>> contentAndTerms, String field) {
|
private void addTextSearch(QueryBuilder qb, BooleanJunction<?> bool, List<List<? extends IQueryParameterType>> contentAndTerms, String field) {
|
||||||
if (contentAndTerms == null) {
|
if (contentAndTerms == null) {
|
||||||
return;
|
return;
|
||||||
@ -131,9 +92,55 @@ public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISe
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private List<Long> doSearch(String theResourceName, SearchParameterMap theParams, Long theReferencingPid) {
|
||||||
|
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
|
||||||
|
|
||||||
|
QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(ResourceTable.class).get();
|
||||||
|
|
||||||
|
BooleanJunction<?> bool = qb.bool();
|
||||||
|
|
||||||
|
List<List<? extends IQueryParameterType>> contentAndTerms = theParams.remove(Constants.PARAM_CONTENT);
|
||||||
|
addTextSearch(qb, bool, contentAndTerms, "myContentText");
|
||||||
|
|
||||||
|
List<List<? extends IQueryParameterType>> textAndTerms = theParams.remove(Constants.PARAM_TEXT);
|
||||||
|
addTextSearch(qb, bool, textAndTerms, "myNarrativeText");
|
||||||
|
|
||||||
|
if (theReferencingPid != null) {
|
||||||
|
bool.must(qb.keyword().onField("myResourceLinks.myTargetResourcePid").matching(theReferencingPid).createQuery());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (bool.isEmpty()) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isNotBlank(theResourceName)) {
|
||||||
|
bool.must(qb.keyword().onField("myResourceType").matching(theResourceName).createQuery());
|
||||||
|
}
|
||||||
|
|
||||||
|
Query luceneQuery = bool.createQuery();
|
||||||
|
|
||||||
|
// wrap Lucene query in a javax.persistence.Query
|
||||||
|
FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, ResourceTable.class);
|
||||||
|
jpaQuery.setProjection("myId");
|
||||||
|
|
||||||
|
// execute search
|
||||||
|
List<?> result = jpaQuery.getResultList();
|
||||||
|
|
||||||
|
ArrayList<Long> retVal = new ArrayList<Long>();
|
||||||
|
for (Object object : result) {
|
||||||
|
Object[] nextArray = (Object[]) object;
|
||||||
|
Long next = (Long) nextArray[0];
|
||||||
|
if (next != null) {
|
||||||
|
retVal.add(next);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<Long> everything(String theResourceName, SearchParameterMap theParams) {
|
public List<Long> everything(String theResourceName, SearchParameterMap theParams) {
|
||||||
|
|
||||||
Long pid = null;
|
Long pid = null;
|
||||||
if (theParams.get(BaseResource.SP_RES_ID) != null) {
|
if (theParams.get(BaseResource.SP_RES_ID) != null) {
|
||||||
StringParam idParm = (StringParam) theParams.get(BaseResource.SP_RES_ID).get(0).get(0);
|
StringParam idParm = (StringParam) theParams.get(BaseResource.SP_RES_ID).get(0).get(0);
|
||||||
@ -148,6 +155,133 @@ public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISe
|
|||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Transactional()
|
||||||
|
@Override
|
||||||
|
public List<Long> search(String theResourceName, SearchParameterMap theParams) {
|
||||||
|
return doSearch(theResourceName, theParams, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<Suggestion> suggestKeywords(String theContext, String theSearchParam, String theText) {
|
||||||
|
Validate.notBlank(theContext, "theContext must be provided");
|
||||||
|
Validate.notBlank(theSearchParam, "theSearchParam must be provided");
|
||||||
|
Validate.notBlank(theText, "theSearchParam must be provided");
|
||||||
|
|
||||||
|
long start = System.currentTimeMillis();
|
||||||
|
|
||||||
|
String[] contextParts = StringUtils.split(theContext, '/');
|
||||||
|
if (contextParts.length != 3 || "Patient".equals(contextParts[0]) == false || "$everything".equals(contextParts[2]) == false) {
|
||||||
|
throw new InvalidRequestException("Invalid context: " + theContext);
|
||||||
|
}
|
||||||
|
IdDt contextId = new IdDt(contextParts[0], contextParts[1]);
|
||||||
|
Long pid = BaseHapiFhirDao.translateForcedIdToPid(contextId, myEntityManager);
|
||||||
|
|
||||||
|
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
|
||||||
|
|
||||||
|
QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(ResourceTable.class).get();
|
||||||
|
|
||||||
|
//@formatter:off
|
||||||
|
Query textQuery = qb
|
||||||
|
.phrase()
|
||||||
|
.withSlop(2)
|
||||||
|
.onField("myContentText").boostedTo(4.0f)
|
||||||
|
.andField("myContentTextEdgeNGram").boostedTo(2.0f)
|
||||||
|
.andField("myContentTextNGram").boostedTo(1.0f)
|
||||||
|
.andField("myContentTextPhonetic").boostedTo(0.5f)
|
||||||
|
.sentence(theText.toLowerCase()).createQuery();
|
||||||
|
|
||||||
|
Query query = qb.bool()
|
||||||
|
.must(qb.keyword().onField("myResourceLinks.myTargetResourcePid").matching(pid).createQuery())
|
||||||
|
.must(textQuery)
|
||||||
|
.createQuery();
|
||||||
|
//@formatter:on
|
||||||
|
|
||||||
|
FullTextQuery ftq = em.createFullTextQuery(query, ResourceTable.class);
|
||||||
|
ftq.setProjection("myContentText");
|
||||||
|
ftq.setMaxResults(20);
|
||||||
|
|
||||||
|
List<?> resultList = ftq.getResultList();
|
||||||
|
List<Suggestion> suggestions = Lists.newArrayList();
|
||||||
|
for (Object next : resultList) {
|
||||||
|
Object[] nextAsArray = (Object[]) next;
|
||||||
|
String nextValue = (String) nextAsArray[0];
|
||||||
|
|
||||||
|
try {
|
||||||
|
MySuggestionFormatter formatter = new MySuggestionFormatter(suggestions);
|
||||||
|
|
||||||
|
Scorer scorer = new QueryScorer(textQuery);
|
||||||
|
Highlighter highlighter = new Highlighter(formatter, scorer);
|
||||||
|
|
||||||
|
Analyzer analyzer = em.getSearchFactory().getAnalyzer(ResourceTable.class);
|
||||||
|
highlighter.getBestFragment(analyzer.tokenStream("myContentText", nextValue), nextValue);
|
||||||
|
highlighter.getBestFragment(analyzer.tokenStream("myContentTextNGram", nextValue), nextValue);
|
||||||
|
highlighter.getBestFragment(analyzer.tokenStream("myContentTextEdgeNGram", nextValue), nextValue);
|
||||||
|
highlighter.getBestFragment(analyzer.tokenStream("myContentTextPhonetic", nextValue), nextValue);
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new InternalErrorException(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
Collections.sort(suggestions);
|
||||||
|
|
||||||
|
Set<String> terms = Sets.newHashSet();
|
||||||
|
for (Iterator<Suggestion> iter = suggestions.iterator(); iter.hasNext(); ) {
|
||||||
|
if (!terms.add(iter.next().getTerm())) {
|
||||||
|
iter.remove();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
long delay = System.currentTimeMillis()- start;
|
||||||
|
ourLog.info("Provided {} suggestions for term {} in {} ms", new Object[] {terms.size(), theText, delay});
|
||||||
|
|
||||||
|
return suggestions;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class Suggestion implements Comparable<Suggestion> {
|
||||||
|
public Suggestion(String theTerm, float theScore) {
|
||||||
|
myTerm = theTerm;
|
||||||
|
myScore = theScore;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getTerm() {
|
||||||
|
return myTerm;
|
||||||
|
}
|
||||||
|
|
||||||
|
public float getScore() {
|
||||||
|
return myScore;
|
||||||
|
}
|
||||||
|
|
||||||
|
private String myTerm;
|
||||||
|
private float myScore;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int compareTo(Suggestion theO) {
|
||||||
|
return Float.compare(theO.myScore, myScore);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "Suggestion[myTerm=" + myTerm + ", myScore=" + myScore + "]";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public class MySuggestionFormatter implements Formatter {
|
||||||
|
|
||||||
|
private List<Suggestion> mySuggestions;
|
||||||
|
|
||||||
|
public MySuggestionFormatter(List<Suggestion> theSuggestions) {
|
||||||
|
mySuggestions = theSuggestions;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String highlightTerm(String theOriginalText, TokenGroup theTokenGroup) {
|
||||||
|
if (theTokenGroup.getTotalScore() > 0) {
|
||||||
|
mySuggestions.add(new Suggestion(theOriginalText, theTokenGroup.getTotalScore()));
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -22,8 +22,12 @@ package ca.uhn.fhir.jpa.dao;
|
|||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.dao.FhirSearchDao.Suggestion;
|
||||||
|
|
||||||
public interface ISearchDao {
|
public interface ISearchDao {
|
||||||
|
|
||||||
|
List<Suggestion> suggestKeywords(String theContext, String theSearchParam, String theText);
|
||||||
|
|
||||||
List<Long> search(String theResourceName, SearchParameterMap theParams);
|
List<Long> search(String theResourceName, SearchParameterMap theParams);
|
||||||
|
|
||||||
List<Long> everything(String theResourceName, SearchParameterMap theParams);
|
List<Long> everything(String theResourceName, SearchParameterMap theParams);
|
||||||
|
@ -36,7 +36,6 @@ import javax.persistence.Table;
|
|||||||
import org.apache.commons.lang3.Validate;
|
import org.apache.commons.lang3.Validate;
|
||||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||||
import org.hibernate.search.annotations.ContainedIn;
|
|
||||||
import org.hibernate.search.annotations.Field;
|
import org.hibernate.search.annotations.Field;
|
||||||
|
|
||||||
@Entity
|
@Entity
|
||||||
@ -72,9 +71,9 @@ public class ResourceLink implements Serializable {
|
|||||||
@Column(name = "TARGET_RESOURCE_ID", insertable = false, updatable = false, nullable = false)
|
@Column(name = "TARGET_RESOURCE_ID", insertable = false, updatable = false, nullable = false)
|
||||||
@Field()
|
@Field()
|
||||||
private Long myTargetResourcePid;
|
private Long myTargetResourcePid;
|
||||||
|
|
||||||
public ResourceLink() {
|
public ResourceLink() {
|
||||||
// nothing
|
super();
|
||||||
}
|
}
|
||||||
|
|
||||||
public ResourceLink(String theSourcePath, ResourceTable theSourceResource, ResourceTable theTargetResource) {
|
public ResourceLink(String theSourcePath, ResourceTable theSourceResource, ResourceTable theTargetResource) {
|
||||||
@ -101,7 +100,7 @@ public class ResourceLink implements Serializable {
|
|||||||
EqualsBuilder b = new EqualsBuilder();
|
EqualsBuilder b = new EqualsBuilder();
|
||||||
b.append(mySourcePath, obj.mySourcePath);
|
b.append(mySourcePath, obj.mySourcePath);
|
||||||
b.append(mySourceResource, obj.mySourceResource);
|
b.append(mySourceResource, obj.mySourceResource);
|
||||||
b.append(myTargetResource, obj.myTargetResource);
|
b.append(myTargetResourcePid, obj.myTargetResourcePid);
|
||||||
return b.isEquals();
|
return b.isEquals();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -130,7 +129,7 @@ public class ResourceLink implements Serializable {
|
|||||||
HashCodeBuilder b = new HashCodeBuilder();
|
HashCodeBuilder b = new HashCodeBuilder();
|
||||||
b.append(mySourcePath);
|
b.append(mySourcePath);
|
||||||
b.append(mySourceResource);
|
b.append(mySourceResource);
|
||||||
b.append(myTargetResource);
|
b.append(myTargetResourcePid);
|
||||||
return b.toHashCode();
|
return b.toHashCode();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -29,7 +29,6 @@ import java.util.Set;
|
|||||||
|
|
||||||
import javax.persistence.CascadeType;
|
import javax.persistence.CascadeType;
|
||||||
import javax.persistence.Column;
|
import javax.persistence.Column;
|
||||||
import javax.persistence.Embedded;
|
|
||||||
import javax.persistence.Entity;
|
import javax.persistence.Entity;
|
||||||
import javax.persistence.FetchType;
|
import javax.persistence.FetchType;
|
||||||
import javax.persistence.GeneratedValue;
|
import javax.persistence.GeneratedValue;
|
||||||
@ -42,9 +41,29 @@ import javax.persistence.Transient;
|
|||||||
|
|
||||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||||
|
import org.apache.lucene.analysis.core.KeywordTokenizerFactory;
|
||||||
|
import org.apache.lucene.analysis.core.LowerCaseFilterFactory;
|
||||||
|
import org.apache.lucene.analysis.core.StopFilterFactory;
|
||||||
|
import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilterFactory;
|
||||||
|
import org.apache.lucene.analysis.ngram.EdgeNGramFilterFactory;
|
||||||
|
import org.apache.lucene.analysis.ngram.NGramFilterFactory;
|
||||||
|
import org.apache.lucene.analysis.pattern.PatternReplaceFilterFactory;
|
||||||
|
import org.apache.lucene.analysis.phonetic.PhoneticFilterFactory;
|
||||||
|
import org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory;
|
||||||
|
import org.apache.lucene.analysis.standard.StandardFilterFactory;
|
||||||
|
import org.apache.lucene.analysis.standard.StandardTokenizerFactory;
|
||||||
|
import org.hibernate.search.annotations.Analyze;
|
||||||
|
import org.hibernate.search.annotations.Analyzer;
|
||||||
|
import org.hibernate.search.annotations.AnalyzerDef;
|
||||||
|
import org.hibernate.search.annotations.AnalyzerDefs;
|
||||||
import org.hibernate.search.annotations.Field;
|
import org.hibernate.search.annotations.Field;
|
||||||
|
import org.hibernate.search.annotations.Fields;
|
||||||
import org.hibernate.search.annotations.Indexed;
|
import org.hibernate.search.annotations.Indexed;
|
||||||
import org.hibernate.search.annotations.IndexedEmbedded;
|
import org.hibernate.search.annotations.IndexedEmbedded;
|
||||||
|
import org.hibernate.search.annotations.Parameter;
|
||||||
|
import org.hibernate.search.annotations.Store;
|
||||||
|
import org.hibernate.search.annotations.TokenFilterDef;
|
||||||
|
import org.hibernate.search.annotations.TokenizerDef;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.search.IndexNonDeletedInterceptor;
|
import ca.uhn.fhir.jpa.search.IndexNonDeletedInterceptor;
|
||||||
import ca.uhn.fhir.model.primitive.IdDt;
|
import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
@ -60,6 +79,62 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
|||||||
@Index(name = "IDX_RES_PROFILE", columnList="RES_PROFILE"),
|
@Index(name = "IDX_RES_PROFILE", columnList="RES_PROFILE"),
|
||||||
@Index(name = "IDX_INDEXSTATUS", columnList="SP_INDEX_STATUS")
|
@Index(name = "IDX_INDEXSTATUS", columnList="SP_INDEX_STATUS")
|
||||||
})
|
})
|
||||||
|
@AnalyzerDefs({
|
||||||
|
@AnalyzerDef(name = "autocompleteEdgeAnalyzer",
|
||||||
|
tokenizer = @TokenizerDef(factory = KeywordTokenizerFactory.class),
|
||||||
|
filters = {
|
||||||
|
@TokenFilterDef(factory = PatternReplaceFilterFactory.class, params = {
|
||||||
|
@Parameter(name = "pattern",value = "([^a-zA-Z0-9\\.])"),
|
||||||
|
@Parameter(name = "replacement", value = " "),
|
||||||
|
@Parameter(name = "replace", value = "all")
|
||||||
|
}),
|
||||||
|
@TokenFilterDef(factory = LowerCaseFilterFactory.class),
|
||||||
|
@TokenFilterDef(factory = StopFilterFactory.class),
|
||||||
|
@TokenFilterDef(factory = EdgeNGramFilterFactory.class, params = {
|
||||||
|
@Parameter(name = "minGramSize", value = "3"),
|
||||||
|
@Parameter(name = "maxGramSize", value = "50")
|
||||||
|
})
|
||||||
|
}),
|
||||||
|
@AnalyzerDef(name = "autocompletePhoneticAnalyzer",
|
||||||
|
tokenizer = @TokenizerDef(factory=StandardTokenizerFactory.class),
|
||||||
|
filters = {
|
||||||
|
@TokenFilterDef(factory=StandardFilterFactory.class),
|
||||||
|
@TokenFilterDef(factory=StopFilterFactory.class),
|
||||||
|
@TokenFilterDef(factory=PhoneticFilterFactory.class, params = {
|
||||||
|
@Parameter(name="encoder", value="DoubleMetaphone")
|
||||||
|
}),
|
||||||
|
@TokenFilterDef(factory=SnowballPorterFilterFactory.class, params = {
|
||||||
|
@Parameter(name="language", value="English")
|
||||||
|
})
|
||||||
|
}),
|
||||||
|
@AnalyzerDef(name = "autocompleteNGramAnalyzer",
|
||||||
|
tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
|
||||||
|
filters = {
|
||||||
|
@TokenFilterDef(factory = WordDelimiterFilterFactory.class),
|
||||||
|
@TokenFilterDef(factory = LowerCaseFilterFactory.class),
|
||||||
|
@TokenFilterDef(factory = NGramFilterFactory.class, params = {
|
||||||
|
@Parameter(name = "minGramSize", value = "3"),
|
||||||
|
@Parameter(name = "maxGramSize", value = "20")
|
||||||
|
}),
|
||||||
|
// @TokenFilterDef(factory = PatternReplaceFilterFactory.class, params = {
|
||||||
|
// @Parameter(name = "pattern",value = "([^a-zA-Z0-9\\.])"),
|
||||||
|
// @Parameter(name = "replacement", value = " "),
|
||||||
|
// @Parameter(name = "replace", value = "all")
|
||||||
|
// })
|
||||||
|
}),
|
||||||
|
@AnalyzerDef(name = "standardAnalyzer",
|
||||||
|
tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
|
||||||
|
filters = {
|
||||||
|
// @TokenFilterDef(factory = WordDelimiterFilterFactory.class),
|
||||||
|
@TokenFilterDef(factory = LowerCaseFilterFactory.class),
|
||||||
|
// @TokenFilterDef(factory = PatternReplaceFilterFactory.class, params = {
|
||||||
|
// @Parameter(name = "pattern", value = "([^a-zA-Z0-9\\.])"),
|
||||||
|
// @Parameter(name = "replacement", value = " "),
|
||||||
|
// @Parameter(name = "replace", value = "all")
|
||||||
|
// })
|
||||||
|
}) // Def
|
||||||
|
}
|
||||||
|
)
|
||||||
//@formatter:on
|
//@formatter:on
|
||||||
public class ResourceTable extends BaseHasResource implements Serializable {
|
public class ResourceTable extends BaseHasResource implements Serializable {
|
||||||
private static final int MAX_LANGUAGE_LENGTH = 20;
|
private static final int MAX_LANGUAGE_LENGTH = 20;
|
||||||
@ -72,8 +147,15 @@ public class ResourceTable extends BaseHasResource implements Serializable {
|
|||||||
/**
|
/**
|
||||||
* Holds the narrative text only - Used for Fulltext searching but not directly stored in the DB
|
* Holds the narrative text only - Used for Fulltext searching but not directly stored in the DB
|
||||||
*/
|
*/
|
||||||
|
//@formatter:off
|
||||||
@Transient()
|
@Transient()
|
||||||
@Field()
|
@Fields({
|
||||||
|
@Field(name = "myContentText", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "standardAnalyzer")),
|
||||||
|
@Field(name = "myContentTextEdgeNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteEdgeAnalyzer")),
|
||||||
|
@Field(name = "myContentTextNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteNGramAnalyzer")),
|
||||||
|
@Field(name = "myContentTextPhonetic", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompletePhoneticAnalyzer"))
|
||||||
|
})
|
||||||
|
//@formatter:on
|
||||||
private String myContentText;
|
private String myContentText;
|
||||||
|
|
||||||
@Column(name = "SP_HAS_LINKS")
|
@Column(name = "SP_HAS_LINKS")
|
||||||
|
@ -1,5 +1,9 @@
|
|||||||
package ca.uhn.fhir.jpa.provider;
|
package ca.uhn.fhir.jpa.provider;
|
||||||
|
|
||||||
|
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* #%L
|
* #%L
|
||||||
* HAPI FHIR JPA Server
|
* HAPI FHIR JPA Server
|
||||||
@ -29,11 +33,15 @@ import javax.servlet.http.HttpServletRequest;
|
|||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.dao.FhirSearchDao.Suggestion;
|
||||||
import ca.uhn.fhir.jpa.dao.IFhirSystemDao;
|
import ca.uhn.fhir.jpa.dao.IFhirSystemDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.ISearchDao;
|
||||||
import ca.uhn.fhir.model.api.annotation.Description;
|
import ca.uhn.fhir.model.api.annotation.Description;
|
||||||
import ca.uhn.fhir.model.dstu2.composite.MetaDt;
|
import ca.uhn.fhir.model.dstu2.composite.MetaDt;
|
||||||
import ca.uhn.fhir.model.dstu2.resource.Bundle;
|
import ca.uhn.fhir.model.dstu2.resource.Bundle;
|
||||||
import ca.uhn.fhir.model.dstu2.resource.Parameters;
|
import ca.uhn.fhir.model.dstu2.resource.Parameters;
|
||||||
|
import ca.uhn.fhir.model.dstu2.resource.Parameters.Parameter;
|
||||||
|
import ca.uhn.fhir.model.primitive.DecimalDt;
|
||||||
import ca.uhn.fhir.model.primitive.IntegerDt;
|
import ca.uhn.fhir.model.primitive.IntegerDt;
|
||||||
import ca.uhn.fhir.model.primitive.StringDt;
|
import ca.uhn.fhir.model.primitive.StringDt;
|
||||||
import ca.uhn.fhir.rest.annotation.Operation;
|
import ca.uhn.fhir.rest.annotation.Operation;
|
||||||
@ -41,12 +49,16 @@ import ca.uhn.fhir.rest.annotation.OperationParam;
|
|||||||
import ca.uhn.fhir.rest.annotation.Transaction;
|
import ca.uhn.fhir.rest.annotation.Transaction;
|
||||||
import ca.uhn.fhir.rest.annotation.TransactionParam;
|
import ca.uhn.fhir.rest.annotation.TransactionParam;
|
||||||
import ca.uhn.fhir.rest.method.RequestDetails;
|
import ca.uhn.fhir.rest.method.RequestDetails;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
|
|
||||||
public class JpaSystemProviderDstu2 extends BaseJpaSystemProvider<Bundle> {
|
public class JpaSystemProviderDstu2 extends BaseJpaSystemProvider<Bundle> {
|
||||||
|
|
||||||
@Autowired()
|
@Autowired()
|
||||||
@Qualifier("mySystemDaoDstu2")
|
@Qualifier("mySystemDaoDstu2")
|
||||||
private IFhirSystemDao<Bundle> mySystemDao;
|
private IFhirSystemDao<Bundle> mySystemDao;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private ISearchDao mySearchDao;
|
||||||
|
|
||||||
//@formatter:off
|
//@formatter:off
|
||||||
// This is generated by hand:
|
// This is generated by hand:
|
||||||
@ -176,12 +188,43 @@ public class JpaSystemProviderDstu2 extends BaseJpaSystemProvider<Bundle> {
|
|||||||
@OperationParam(name="return", type=MetaDt.class)
|
@OperationParam(name="return", type=MetaDt.class)
|
||||||
})
|
})
|
||||||
//@formatter:on
|
//@formatter:on
|
||||||
public Parameters operation() {
|
public Parameters meta() {
|
||||||
Parameters parameters = new Parameters();
|
Parameters parameters = new Parameters();
|
||||||
parameters.addParameter().setName("return").setValue(getDao().metaGetOperation());
|
parameters.addParameter().setName("return").setValue(getDao().metaGetOperation());
|
||||||
return parameters;
|
return parameters;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Operation(name="$suggest-keywords", idempotent=true)
|
||||||
|
public Parameters suggestKeywords(
|
||||||
|
@OperationParam(name="context", min=1, max=1) String theContext,
|
||||||
|
@OperationParam(name="searchParam", min=1, max=1) String theSearchParam,
|
||||||
|
@OperationParam(name="text", min=1, max=1) String theText
|
||||||
|
) {
|
||||||
|
|
||||||
|
if (isBlank(theContext)) {
|
||||||
|
throw new InvalidRequestException("Parameter 'context' must be provided");
|
||||||
|
}
|
||||||
|
if (isBlank(theSearchParam)) {
|
||||||
|
throw new InvalidRequestException("Parameter 'searchParam' must be provided");
|
||||||
|
}
|
||||||
|
if (isBlank(theText)) {
|
||||||
|
throw new InvalidRequestException("Parameter 'text' must be provided");
|
||||||
|
}
|
||||||
|
|
||||||
|
List<Suggestion> keywords = mySearchDao.suggestKeywords(theContext, theSearchParam, theText);
|
||||||
|
|
||||||
|
Parameters retVal = new Parameters();
|
||||||
|
for (Suggestion next : keywords) {
|
||||||
|
//@formatter:off
|
||||||
|
retVal.addParameter()
|
||||||
|
.addPart(new Parameter().setName("keyword").setValue(new StringDt(next.getTerm())))
|
||||||
|
.addPart(new Parameter().setName("score").setValue(new DecimalDt(next.getScore())));
|
||||||
|
//@formatter:on
|
||||||
|
}
|
||||||
|
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
@Transaction
|
@Transaction
|
||||||
public Bundle transaction(RequestDetails theRequestDetails, @TransactionParam Bundle theResources) {
|
public Bundle transaction(RequestDetails theRequestDetails, @TransactionParam Bundle theResources) {
|
||||||
startRequest(theRequestDetails);
|
startRequest(theRequestDetails);
|
||||||
|
@ -7,7 +7,6 @@ import java.io.IOException;
|
|||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
|
|
||||||
import javax.persistence.EntityManager;
|
import javax.persistence.EntityManager;
|
||||||
import javax.persistence.PersistenceContext;
|
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.hibernate.search.jpa.FullTextEntityManager;
|
import org.hibernate.search.jpa.FullTextEntityManager;
|
||||||
@ -77,7 +76,8 @@ public abstract class BaseJpaDstu2Test extends BaseJpaTest {
|
|||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
protected ApplicationContext myAppCtx;
|
protected ApplicationContext myAppCtx;
|
||||||
|
@Autowired
|
||||||
|
protected ISearchDao mySearchDao;
|
||||||
@Autowired
|
@Autowired
|
||||||
@Qualifier("myConceptMapDaoDstu2")
|
@Qualifier("myConceptMapDaoDstu2")
|
||||||
protected IFhirResourceDao<ConceptMap> myConceptMapDao;
|
protected IFhirResourceDao<ConceptMap> myConceptMapDao;
|
||||||
|
@ -4,8 +4,7 @@ import static org.hamcrest.Matchers.contains;
|
|||||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||||
import static org.hamcrest.Matchers.empty;
|
import static org.hamcrest.Matchers.empty;
|
||||||
import static org.hamcrest.Matchers.not;
|
import static org.hamcrest.Matchers.not;
|
||||||
import static org.junit.Assert.assertNotEquals;
|
import static org.junit.Assert.*;
|
||||||
import static org.junit.Assert.assertThat;
|
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
@ -15,6 +14,7 @@ import javax.servlet.http.HttpServletRequest;
|
|||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.dao.FhirSearchDao.Suggestion;
|
||||||
import ca.uhn.fhir.model.dstu2.resource.Device;
|
import ca.uhn.fhir.model.dstu2.resource.Device;
|
||||||
import ca.uhn.fhir.model.dstu2.resource.Observation;
|
import ca.uhn.fhir.model.dstu2.resource.Observation;
|
||||||
import ca.uhn.fhir.model.dstu2.resource.Patient;
|
import ca.uhn.fhir.model.dstu2.resource.Patient;
|
||||||
@ -27,6 +27,72 @@ import ca.uhn.fhir.rest.server.Constants;
|
|||||||
public class FhirResourceDaoDstu2SearchFtTest extends BaseJpaDstu2Test {
|
public class FhirResourceDaoDstu2SearchFtTest extends BaseJpaDstu2Test {
|
||||||
|
|
||||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu2SearchFtTest.class);
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu2SearchFtTest.class);
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSuggest() {
|
||||||
|
Patient patient = new Patient();
|
||||||
|
patient.addName().addFamily("testSuggest");
|
||||||
|
IIdType ptId = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
|
Observation obs = new Observation();
|
||||||
|
obs.getSubject().setReference(ptId);
|
||||||
|
obs.getCode().setText("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL");
|
||||||
|
myObservationDao.create(obs);
|
||||||
|
|
||||||
|
obs = new Observation();
|
||||||
|
obs.getSubject().setReference(ptId);
|
||||||
|
obs.getCode().setText("MNBVCXZ");
|
||||||
|
myObservationDao.create(obs);
|
||||||
|
|
||||||
|
obs = new Observation();
|
||||||
|
obs.getSubject().setReference(ptId);
|
||||||
|
obs.getCode().setText("ZXC HELLO");
|
||||||
|
myObservationDao.create(obs);
|
||||||
|
|
||||||
|
/*
|
||||||
|
* These shouldn't match since they're for another patient
|
||||||
|
*/
|
||||||
|
patient = new Patient();
|
||||||
|
patient.addName().addFamily("testSuggest2");
|
||||||
|
IIdType ptId2 = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
|
Observation obs2 = new Observation();
|
||||||
|
obs2.getSubject().setReference(ptId2);
|
||||||
|
obs2.getCode().setText("ZXCVBNMZZ");
|
||||||
|
myObservationDao.create(obs2);
|
||||||
|
|
||||||
|
List<Suggestion> output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "ZXCVBNM");
|
||||||
|
ourLog.info("Found: " + output);
|
||||||
|
assertEquals(4, output.size());
|
||||||
|
assertEquals("ZXCVBNM", output.get(0).getTerm());
|
||||||
|
assertEquals("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL", output.get(1).getTerm());
|
||||||
|
assertEquals("ZXC", output.get(2).getTerm());
|
||||||
|
assertEquals("ZXC HELLO", output.get(3).getTerm());
|
||||||
|
|
||||||
|
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "ZXC");
|
||||||
|
ourLog.info("Found: " + output);
|
||||||
|
assertEquals(4, output.size());
|
||||||
|
assertEquals("ZXC", output.get(0).getTerm());
|
||||||
|
assertEquals("ZXC HELLO", output.get(1).getTerm());
|
||||||
|
assertEquals("ZXCVBNM", output.get(2).getTerm());
|
||||||
|
assertEquals("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL", output.get(3).getTerm());
|
||||||
|
|
||||||
|
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "HELO");
|
||||||
|
ourLog.info("Found: " + output);
|
||||||
|
assertEquals(1, output.size());
|
||||||
|
assertEquals("HELLO", output.get(0).getTerm());
|
||||||
|
|
||||||
|
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "Z");
|
||||||
|
ourLog.info("Found: " + output);
|
||||||
|
assertEquals(0, output.size());
|
||||||
|
|
||||||
|
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "ZX");
|
||||||
|
ourLog.info("Found: " + output);
|
||||||
|
assertEquals(1, output.size());
|
||||||
|
assertEquals("ZXC", output.get(0).getTerm());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSearchAndReindex() {
|
public void testSearchAndReindex() {
|
||||||
@ -320,7 +386,7 @@ public class FhirResourceDaoDstu2SearchFtTest extends BaseJpaDstu2Test {
|
|||||||
IIdType pId1;
|
IIdType pId1;
|
||||||
{
|
{
|
||||||
Patient patient = new Patient();
|
Patient patient = new Patient();
|
||||||
patient.addName().addGiven("methodName");
|
patient.addName().addGiven(methodName);
|
||||||
patient.addAddress().addLine("My fulltext address");
|
patient.addAddress().addLine("My fulltext address");
|
||||||
pId1 = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
|
pId1 = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
|
||||||
}
|
}
|
||||||
|
@ -6,15 +6,9 @@ import static org.junit.Assert.assertThat;
|
|||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.hibernate.search.jpa.FullTextEntityManager;
|
|
||||||
import org.hibernate.search.jpa.Search;
|
|
||||||
import org.junit.Before;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.test.context.ContextConfiguration;
|
|
||||||
import org.springframework.transaction.annotation.Transactional;
|
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.entity.ResourceTable;
|
|
||||||
import ca.uhn.fhir.model.dstu2.resource.Organization;
|
import ca.uhn.fhir.model.dstu2.resource.Organization;
|
||||||
import ca.uhn.fhir.model.dstu2.resource.Patient;
|
import ca.uhn.fhir.model.dstu2.resource.Patient;
|
||||||
import ca.uhn.fhir.rest.param.StringAndListParam;
|
import ca.uhn.fhir.rest.param.StringAndListParam;
|
||||||
@ -27,14 +21,6 @@ public class FhirSearchDaoDstu2Test extends BaseJpaDstu2Test {
|
|||||||
@Autowired
|
@Autowired
|
||||||
private ISearchDao mySearchDao;
|
private ISearchDao mySearchDao;
|
||||||
|
|
||||||
@Before
|
|
||||||
@Transactional
|
|
||||||
public void beforeFlushFT() {
|
|
||||||
FullTextEntityManager ftem = Search.getFullTextEntityManager(myEntityManager);
|
|
||||||
ftem.purgeAll(ResourceTable.class);
|
|
||||||
ftem.flushToIndexes();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testContentSearch() {
|
public void testContentSearch() {
|
||||||
Long id1;
|
Long id1;
|
||||||
|
@ -21,9 +21,12 @@ import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
|
|||||||
import org.eclipse.jetty.server.Server;
|
import org.eclipse.jetty.server.Server;
|
||||||
import org.eclipse.jetty.servlet.ServletContextHandler;
|
import org.eclipse.jetty.servlet.ServletContextHandler;
|
||||||
import org.eclipse.jetty.servlet.ServletHolder;
|
import org.eclipse.jetty.servlet.ServletHolder;
|
||||||
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.springframework.transaction.annotation.Propagation;
|
||||||
|
import org.springframework.transaction.annotation.Transactional;
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.jpa.dao.BaseJpaDstu2Test;
|
import ca.uhn.fhir.jpa.dao.BaseJpaDstu2Test;
|
||||||
@ -32,12 +35,16 @@ import ca.uhn.fhir.jpa.rp.dstu2.OrganizationResourceProvider;
|
|||||||
import ca.uhn.fhir.jpa.rp.dstu2.PatientResourceProvider;
|
import ca.uhn.fhir.jpa.rp.dstu2.PatientResourceProvider;
|
||||||
import ca.uhn.fhir.jpa.testutil.RandomServerPortProvider;
|
import ca.uhn.fhir.jpa.testutil.RandomServerPortProvider;
|
||||||
import ca.uhn.fhir.model.dstu2.resource.Bundle;
|
import ca.uhn.fhir.model.dstu2.resource.Bundle;
|
||||||
|
import ca.uhn.fhir.model.dstu2.resource.Observation;
|
||||||
import ca.uhn.fhir.model.dstu2.resource.OperationDefinition;
|
import ca.uhn.fhir.model.dstu2.resource.OperationDefinition;
|
||||||
import ca.uhn.fhir.model.dstu2.resource.OperationOutcome;
|
import ca.uhn.fhir.model.dstu2.resource.OperationOutcome;
|
||||||
|
import ca.uhn.fhir.model.dstu2.resource.Parameters;
|
||||||
import ca.uhn.fhir.model.dstu2.resource.Patient;
|
import ca.uhn.fhir.model.dstu2.resource.Patient;
|
||||||
import ca.uhn.fhir.model.dstu2.valueset.BundleTypeEnum;
|
import ca.uhn.fhir.model.dstu2.valueset.BundleTypeEnum;
|
||||||
import ca.uhn.fhir.model.dstu2.valueset.HTTPVerbEnum;
|
import ca.uhn.fhir.model.dstu2.valueset.HTTPVerbEnum;
|
||||||
|
import ca.uhn.fhir.model.primitive.DecimalDt;
|
||||||
import ca.uhn.fhir.model.primitive.IdDt;
|
import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
|
import ca.uhn.fhir.model.primitive.StringDt;
|
||||||
import ca.uhn.fhir.rest.client.IGenericClient;
|
import ca.uhn.fhir.rest.client.IGenericClient;
|
||||||
import ca.uhn.fhir.rest.server.EncodingEnum;
|
import ca.uhn.fhir.rest.server.EncodingEnum;
|
||||||
import ca.uhn.fhir.rest.server.FifoMemoryPagingProvider;
|
import ca.uhn.fhir.rest.server.FifoMemoryPagingProvider;
|
||||||
@ -177,6 +184,90 @@ public class SystemProviderDstu2Test extends BaseJpaDstu2Test {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Transactional(propagation=Propagation.NEVER)
|
||||||
|
@Test
|
||||||
|
public void testSuggestKeywords() throws Exception {
|
||||||
|
|
||||||
|
Patient patient = new Patient();
|
||||||
|
patient.addName().addFamily("testSuggest");
|
||||||
|
IIdType ptId = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
|
Observation obs = new Observation();
|
||||||
|
obs.getCode().setText("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL");
|
||||||
|
obs.getSubject().setReference(ptId);
|
||||||
|
IIdType obsId = myObservationDao.create(obs).getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
|
obs = new Observation();
|
||||||
|
obs.setId(obsId);
|
||||||
|
obs.getSubject().setReference(ptId);
|
||||||
|
obs.getCode().setText("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL");
|
||||||
|
myObservationDao.update(obs);
|
||||||
|
|
||||||
|
HttpGet get = new HttpGet(ourServerBase + "/$suggest-keywords?context=Patient/" + ptId.getIdPart() + "/$everything&searchParam=_content&text=zxc&_pretty=true&_format=xml");
|
||||||
|
CloseableHttpResponse http = ourHttpClient.execute(get);
|
||||||
|
try {
|
||||||
|
assertEquals(200, http.getStatusLine().getStatusCode());
|
||||||
|
String output = IOUtils.toString(http.getEntity().getContent());
|
||||||
|
ourLog.info(output);
|
||||||
|
|
||||||
|
Parameters parameters = ourCtx.newXmlParser().parseResource(Parameters.class, output);
|
||||||
|
assertEquals(2, parameters.getParameter().size());
|
||||||
|
assertEquals("keyword", parameters.getParameter().get(0).getPart().get(0).getName());
|
||||||
|
assertEquals(new StringDt("ZXCVBNM"), parameters.getParameter().get(0).getPart().get(0).getValue());
|
||||||
|
assertEquals("score", parameters.getParameter().get(0).getPart().get(1).getName());
|
||||||
|
assertEquals(new DecimalDt("1.0"), parameters.getParameter().get(0).getPart().get(1).getValue());
|
||||||
|
|
||||||
|
} finally {
|
||||||
|
http.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSuggestKeywordsInvalid() throws Exception {
|
||||||
|
Patient patient = new Patient();
|
||||||
|
patient.addName().addFamily("testSuggest");
|
||||||
|
IIdType ptId = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
|
Observation obs = new Observation();
|
||||||
|
obs.getSubject().setReference(ptId);
|
||||||
|
obs.getCode().setText("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL");
|
||||||
|
myObservationDao.create(obs);
|
||||||
|
|
||||||
|
HttpGet get = new HttpGet(ourServerBase + "/$suggest-keywords");
|
||||||
|
CloseableHttpResponse http = ourHttpClient.execute(get);
|
||||||
|
try {
|
||||||
|
assertEquals(400, http.getStatusLine().getStatusCode());
|
||||||
|
String output = IOUtils.toString(http.getEntity().getContent());
|
||||||
|
ourLog.info(output);
|
||||||
|
assertThat(output, containsString("Parameter 'context' must be provided"));
|
||||||
|
} finally {
|
||||||
|
http.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
get = new HttpGet(ourServerBase + "/$suggest-keywords?context=Patient/" + ptId.getIdPart() + "/$everything");
|
||||||
|
http = ourHttpClient.execute(get);
|
||||||
|
try {
|
||||||
|
assertEquals(400, http.getStatusLine().getStatusCode());
|
||||||
|
String output = IOUtils.toString(http.getEntity().getContent());
|
||||||
|
ourLog.info(output);
|
||||||
|
assertThat(output, containsString("Parameter 'searchParam' must be provided"));
|
||||||
|
} finally {
|
||||||
|
http.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
get = new HttpGet(ourServerBase + "/$suggest-keywords?context=Patient/" + ptId.getIdPart() + "/$everything&searchParam=aa");
|
||||||
|
http = ourHttpClient.execute(get);
|
||||||
|
try {
|
||||||
|
assertEquals(400, http.getStatusLine().getStatusCode());
|
||||||
|
String output = IOUtils.toString(http.getEntity().getContent());
|
||||||
|
ourLog.info(output);
|
||||||
|
assertThat(output, containsString("Parameter 'text' must be provided"));
|
||||||
|
} finally {
|
||||||
|
http.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGetOperationDefinition() {
|
public void testGetOperationDefinition() {
|
||||||
OperationDefinition op = ourClient.read(OperationDefinition.class, "get-resource-counts");
|
OperationDefinition op = ourClient.read(OperationDefinition.class, "get-resource-counts");
|
||||||
|
10
pom.xml
10
pom.xml
@ -367,6 +367,16 @@
|
|||||||
<artifactId>httpcore</artifactId>
|
<artifactId>httpcore</artifactId>
|
||||||
<version>4.4</version>
|
<version>4.4</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.lucene</groupId>
|
||||||
|
<artifactId>lucene-highlighter</artifactId>
|
||||||
|
<version>5.3.0</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.lucene</groupId>
|
||||||
|
<artifactId>lucene-analyzers-phonetic</artifactId>
|
||||||
|
<version>5.3.0</version>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.maven.doxia</groupId>
|
<groupId>org.apache.maven.doxia</groupId>
|
||||||
<artifactId>doxia-module-markdown</artifactId>
|
<artifactId>doxia-module-markdown</artifactId>
|
||||||
|
Loading…
x
Reference in New Issue
Block a user