From 8ed9ca83af72c3f6f172539963ccbd2ef589d1ec Mon Sep 17 00:00:00 2001 From: jamesagnew Date: Thu, 3 Dec 2015 21:52:40 -0500 Subject: [PATCH] Fix build --- .../ca/uhn/fhir/rest/param/TokenParam.java | 13 ++- .../ca/uhn/fhir/jpa/dao/FhirSearchDao.java | 92 +++++++++++++------ .../BaseResourceIndexedSearchParam.java | 5 +- .../ResourceIndexedSearchParamString.java | 88 +++++++++--------- .../FhirResourceDaoDstu21SearchFtTest.java | 27 ++++++ 5 files changed, 148 insertions(+), 77 deletions(-) diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/TokenParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/TokenParam.java index c137982b48c..00ce7966e1a 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/TokenParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/TokenParam.java @@ -38,7 +38,11 @@ public class TokenParam extends BaseParam implements IQueryParameterType { private boolean myText; private String myValue; + /** + * Constructor + */ public TokenParam() { + super(); } /** @@ -143,16 +147,19 @@ public class TokenParam extends BaseParam implements IQueryParameterType { return myText; } - public void setSystem(String theSystem) { + public TokenParam setSystem(String theSystem) { mySystem = theSystem; + return this; } - public void setText(boolean theText) { + public TokenParam setText(boolean theText) { myText = theText; + return this; } - public void setValue(String theValue) { + public TokenParam setValue(String theValue) { myValue = theValue; + return this; } @Override diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirSearchDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirSearchDao.java index ad0ea58713d..f2dda1ff35f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirSearchDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirSearchDao.java @@ -91,26 +91,58 @@ public class FhirSearchDao extends BaseHapiFhirDao implements ISe private List doSearch(String theResourceName, SearchParameterMap theParams, Long theReferencingPid) { FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager); + + List pids = null; /* * Handle textual params */ for (String nextParamName : theParams.keySet()) { for (List nextAndList : theParams.get(nextParamName)) { - for (Iterator orIterator = nextAndList.iterator(); orIterator.hasNext(); ) { + for (Iterator orIterator = nextAndList.iterator(); orIterator.hasNext();) { IQueryParameterType nextParam = orIterator.next(); - if (nextParam instanceof TokenParam) { - TokenParam nextTokenParam = (TokenParam)nextParam; + if (nextParam instanceof TokenParam && false) { + TokenParam nextTokenParam = (TokenParam) nextParam; if (nextTokenParam.isText()) { + orIterator.remove(); QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(ResourceIndexedSearchParamString.class).get(); BooleanJunction bool = qb.bool(); - bool.must(qb.keyword().onField("myParamsString").matching(nextTokenParam.getValue()).createQuery()); + bool.must(qb.keyword().onField("myParamName").matching(nextParamName).createQuery()); + if (isNotBlank(theResourceName)) { + bool.must(qb.keyword().onField("myResourceType").matching(theResourceName).createQuery()); + } + + //@formatter:off + Query textQuery = qb + .phrase() + .withSlop(2) + .onField("myValueText").boostedTo(4.0f) + .andField("myValueTextEdgeNGram").boostedTo(2.0f) + .andField("myValueTextNGram").boostedTo(1.0f) + .sentence(nextTokenParam.getValue().toLowerCase()).createQuery(); + bool.must(textQuery); + //@formatter:on + + FullTextQuery ftq = em.createFullTextQuery(bool.createQuery(), ResourceTable.class); + ftq.setProjection("myResourcePid"); + + List resultList = ftq.getResultList(); + pids = new ArrayList(); + for (Object next : resultList) { + Object[] nextAsArray = (Object[]) next; + Long nextValue = (Long) nextAsArray[0]; + pids.add(nextValue); + } } } } } } + + if (pids != null && pids.isEmpty()) { + return pids; + } QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(ResourceTable.class).get(); BooleanJunction bool = qb.bool(); @@ -122,7 +154,7 @@ public class FhirSearchDao extends BaseHapiFhirDao implements ISe addTextSearch(qb, bool, contentAndTerms, "myContentText"); /* - * Handle _text parameter (resource narrative content) + * Handle _text parameter (resource narrative content) */ List> textAndTerms = theParams.remove(Constants.PARAM_TEXT); addTextSearch(qb, bool, textAndTerms, "myNarrativeText"); @@ -132,7 +164,7 @@ public class FhirSearchDao extends BaseHapiFhirDao implements ISe } if (bool.isEmpty()) { - return null; + return pids; } if (isNotBlank(theResourceName)) { @@ -148,11 +180,13 @@ public class FhirSearchDao extends BaseHapiFhirDao implements ISe // execute search List result = jpaQuery.getResultList(); + HashSet pidsSet = pids != null ? new HashSet(pids) : null; + ArrayList retVal = new ArrayList(); for (Object object : result) { Object[] nextArray = (Object[]) object; Long next = (Long) nextArray[0]; - if (next != null) { + if (next != null && (pidsSet == null || pidsSet.contains(next))) { retVal.add(next); } } @@ -188,16 +222,16 @@ public class FhirSearchDao extends BaseHapiFhirDao implements ISe Validate.notBlank(theContext, "theContext must be provided"); Validate.notBlank(theSearchParam, "theSearchParam must be provided"); Validate.notBlank(theText, "theSearchParam must be provided"); - + long start = System.currentTimeMillis(); - + String[] contextParts = StringUtils.split(theContext, '/'); if (contextParts.length != 3 || "Patient".equals(contextParts[0]) == false || "$everything".equals(contextParts[2]) == false) { throw new InvalidRequestException("Invalid context: " + theContext); } IdDt contextId = new IdDt(contextParts[0], contextParts[1]); Long pid = BaseHapiFhirDao.translateForcedIdToPid(contextId, myEntityManager); - + FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager); QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(ResourceTable.class).get(); @@ -236,7 +270,7 @@ public class FhirSearchDao extends BaseHapiFhirDao implements ISe formatter.setAnalyzer("myContentTextPhonetic"); highlighter.getBestFragments(analyzer.tokenStream("myContentTextPhonetic", nextValue), nextValue, 10); - + formatter.setAnalyzer("myContentTextNGram"); highlighter.getBestFragments(analyzer.tokenStream("myContentTextNGram", nextValue), nextValue, 10); @@ -244,14 +278,14 @@ public class FhirSearchDao extends BaseHapiFhirDao implements ISe formatter.setAnalyzer("myContentTextEdgeNGram"); highlighter.getBestFragments(analyzer.tokenStream("myContentTextEdgeNGram", nextValue), nextValue, 10); -// formatter.setAnalyzer("myContentText"); -// highlighter.getBestFragments(analyzer.tokenStream("myContentText", nextValue), nextValue, 10); -// formatter.setAnalyzer("myContentTextNGram"); -// highlighter.getBestFragments(analyzer.tokenStream("myContentTextNGram", nextValue), nextValue, 10); -// formatter.setAnalyzer("myContentTextEdgeNGram"); -// highlighter.getBestFragments(analyzer.tokenStream("myContentTextEdgeNGram", nextValue), nextValue, 10); -// formatter.setAnalyzer("myContentTextPhonetic"); -// highlighter.getBestFragments(analyzer.tokenStream("myContentTextPhonetic", nextValue), nextValue, 10); + // formatter.setAnalyzer("myContentText"); + // highlighter.getBestFragments(analyzer.tokenStream("myContentText", nextValue), nextValue, 10); + // formatter.setAnalyzer("myContentTextNGram"); + // highlighter.getBestFragments(analyzer.tokenStream("myContentTextNGram", nextValue), nextValue, 10); + // formatter.setAnalyzer("myContentTextEdgeNGram"); + // highlighter.getBestFragments(analyzer.tokenStream("myContentTextEdgeNGram", nextValue), nextValue, 10); + // formatter.setAnalyzer("myContentTextPhonetic"); + // highlighter.getBestFragments(analyzer.tokenStream("myContentTextPhonetic", nextValue), nextValue, 10); } catch (Exception e) { throw new InternalErrorException(e); } @@ -259,18 +293,18 @@ public class FhirSearchDao extends BaseHapiFhirDao implements ISe } Collections.sort(suggestions); - + Set terms = Sets.newHashSet(); - for (Iterator iter = suggestions.iterator(); iter.hasNext(); ) { + for (Iterator iter = suggestions.iterator(); iter.hasNext();) { String nextTerm = iter.next().getTerm().toLowerCase(); if (!terms.add(nextTerm)) { iter.remove(); } } - - long delay = System.currentTimeMillis()- start; - ourLog.info("Provided {} suggestions for term {} in {} ms", new Object[] {terms.size(), theText, delay}); - + + long delay = System.currentTimeMillis() - start; + ourLog.info("Provided {} suggestions for term {} in {} ms", new Object[] { terms.size(), theText, delay }); + return suggestions; } @@ -318,12 +352,12 @@ public class FhirSearchDao extends BaseHapiFhirDao implements ISe public void setFindPhrasesWith() { myPartialMatchPhrases = new ArrayList(); myPartialMatchScores = new ArrayList(); - + for (Suggestion next : mySuggestions) { myPartialMatchPhrases.add(' ' + next.myTerm); myPartialMatchScores.add(next.myScore); } - + myPartialMatchPhrases.add(myOriginalSearch); myPartialMatchScores.add(1.0f); } @@ -334,7 +368,7 @@ public class FhirSearchDao extends BaseHapiFhirDao implements ISe @Override public String highlightTerm(String theOriginalText, TokenGroup theTokenGroup) { - ourLog.debug("{} Found {} with score {}", new Object[] {myAnalyzer, theOriginalText, theTokenGroup.getTotalScore()}); + ourLog.debug("{} Found {} with score {}", new Object[] { myAnalyzer, theOriginalText, theTokenGroup.getTotalScore() }); if (theTokenGroup.getTotalScore() > 0) { float score = theTokenGroup.getTotalScore(); if (theOriginalText.equalsIgnoreCase(myOriginalSearch)) { @@ -350,7 +384,7 @@ public class FhirSearchDao extends BaseHapiFhirDao implements ISe } } } - + return null; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BaseResourceIndexedSearchParam.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BaseResourceIndexedSearchParam.java index 34e227e93b2..43c5333ac96 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BaseResourceIndexedSearchParam.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BaseResourceIndexedSearchParam.java @@ -32,6 +32,7 @@ import javax.persistence.MappedSuperclass; import org.hibernate.search.annotations.ContainedIn; import org.hibernate.search.annotations.Field; +import org.hibernate.search.annotations.Store; @MappedSuperclass public abstract class BaseResourceIndexedSearchParam implements Serializable { @@ -54,11 +55,11 @@ public abstract class BaseResourceIndexedSearchParam implements Serializable { @ContainedIn private ResourceTable myResource; - @Field + @Field(store=Store.YES) @Column(name = "RES_ID", insertable = false, updatable = false) private Long myResourcePid; - @Field + @Field(store=Store.YES) @Column(name = "RES_TYPE", nullable=false) private String myResourceType; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamString.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamString.java index 19bc17075de..1a8f7cf6f05 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamString.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamString.java @@ -49,6 +49,7 @@ import org.hibernate.search.annotations.AnalyzerDefs; import org.hibernate.search.annotations.ContainedIn; import org.hibernate.search.annotations.Field; import org.hibernate.search.annotations.Fields; +import org.hibernate.search.annotations.Indexed; import org.hibernate.search.annotations.Parameter; import org.hibernate.search.annotations.Store; import org.hibernate.search.annotations.TokenFilterDef; @@ -61,49 +62,50 @@ import org.hibernate.search.annotations.TokenizerDef; @org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_STRING", indexes = { @org.hibernate.annotations.Index(name = "IDX_SP_STRING", columnNames = { "RES_TYPE", "SP_NAME", "SP_VALUE_NORMALIZED" }) }) -@AnalyzerDefs({ - @AnalyzerDef(name = "autocompleteEdgeAnalyzer", - tokenizer = @TokenizerDef(factory = PatternTokenizerFactory.class, params= { - @Parameter(name="pattern", value="(.*)"), - @Parameter(name="group", value="1") - }), - filters = { - @TokenFilterDef(factory = LowerCaseFilterFactory.class), - @TokenFilterDef(factory = StopFilterFactory.class), - @TokenFilterDef(factory = EdgeNGramFilterFactory.class, params = { - @Parameter(name = "minGramSize", value = "3"), - @Parameter(name = "maxGramSize", value = "50") - }), - }), - @AnalyzerDef(name = "autocompletePhoneticAnalyzer", - tokenizer = @TokenizerDef(factory=StandardTokenizerFactory.class), - filters = { - @TokenFilterDef(factory=StandardFilterFactory.class), - @TokenFilterDef(factory=StopFilterFactory.class), - @TokenFilterDef(factory=PhoneticFilterFactory.class, params = { - @Parameter(name="encoder", value="DoubleMetaphone") - }), - @TokenFilterDef(factory=SnowballPorterFilterFactory.class, params = { - @Parameter(name="language", value="English") - }) - }), - @AnalyzerDef(name = "autocompleteNGramAnalyzer", - tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class), - filters = { - @TokenFilterDef(factory = WordDelimiterFilterFactory.class), - @TokenFilterDef(factory = LowerCaseFilterFactory.class), - @TokenFilterDef(factory = NGramFilterFactory.class, params = { - @Parameter(name = "minGramSize", value = "3"), - @Parameter(name = "maxGramSize", value = "20") - }), - }), - @AnalyzerDef(name = "standardAnalyzer", - tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class), - filters = { - @TokenFilterDef(factory = LowerCaseFilterFactory.class), - }) // Def - } -) +@Indexed() +//@AnalyzerDefs({ +// @AnalyzerDef(name = "autocompleteEdgeAnalyzer", +// tokenizer = @TokenizerDef(factory = PatternTokenizerFactory.class, params= { +// @Parameter(name="pattern", value="(.*)"), +// @Parameter(name="group", value="1") +// }), +// filters = { +// @TokenFilterDef(factory = LowerCaseFilterFactory.class), +// @TokenFilterDef(factory = StopFilterFactory.class), +// @TokenFilterDef(factory = EdgeNGramFilterFactory.class, params = { +// @Parameter(name = "minGramSize", value = "3"), +// @Parameter(name = "maxGramSize", value = "50") +// }), +// }), +// @AnalyzerDef(name = "autocompletePhoneticAnalyzer", +// tokenizer = @TokenizerDef(factory=StandardTokenizerFactory.class), +// filters = { +// @TokenFilterDef(factory=StandardFilterFactory.class), +// @TokenFilterDef(factory=StopFilterFactory.class), +// @TokenFilterDef(factory=PhoneticFilterFactory.class, params = { +// @Parameter(name="encoder", value="DoubleMetaphone") +// }), +// @TokenFilterDef(factory=SnowballPorterFilterFactory.class, params = { +// @Parameter(name="language", value="English") +// }) +// }), +// @AnalyzerDef(name = "autocompleteNGramAnalyzer", +// tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class), +// filters = { +// @TokenFilterDef(factory = WordDelimiterFilterFactory.class), +// @TokenFilterDef(factory = LowerCaseFilterFactory.class), +// @TokenFilterDef(factory = NGramFilterFactory.class, params = { +// @Parameter(name = "minGramSize", value = "3"), +// @Parameter(name = "maxGramSize", value = "20") +// }), +// }), +// @AnalyzerDef(name = "standardAnalyzer", +// tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class), +// filters = { +// @TokenFilterDef(factory = LowerCaseFilterFactory.class), +// }) // Def +// } +//) //@formatter:on public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchParam { diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu21/FhirResourceDaoDstu21SearchFtTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu21/FhirResourceDaoDstu21SearchFtTest.java index 2c28d69de9b..51a975d1131 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu21/FhirResourceDaoDstu21SearchFtTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu21/FhirResourceDaoDstu21SearchFtTest.java @@ -14,25 +14,52 @@ import java.util.List; import javax.servlet.http.HttpServletRequest; import org.hl7.fhir.instance.model.api.IIdType; +import org.junit.Ignore; import org.junit.Test; import ca.uhn.fhir.jpa.dao.FhirSearchDao.Suggestion; import ca.uhn.fhir.jpa.dao.SearchParameterMap; +import ca.uhn.fhir.model.dstu21.composite.QuantityDt; import ca.uhn.fhir.model.dstu21.resource.Device; import ca.uhn.fhir.model.dstu21.resource.Media; import ca.uhn.fhir.model.dstu21.resource.Observation; import ca.uhn.fhir.model.dstu21.resource.Patient; +import ca.uhn.fhir.model.dstu21.valueset.ObservationStatusEnum; import ca.uhn.fhir.model.primitive.Base64BinaryDt; import ca.uhn.fhir.model.primitive.StringDt; import ca.uhn.fhir.rest.param.StringAndListParam; import ca.uhn.fhir.rest.param.StringOrListParam; import ca.uhn.fhir.rest.param.StringParam; +import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.rest.server.Constants; public class FhirResourceDaoDstu21SearchFtTest extends BaseJpaDstu21Test { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu21SearchFtTest.class); + @Test + @Ignore + public void testCodeTextSearch() { + Observation obs1 = new Observation(); + obs1.getCode().setText("Systolic Blood Pressure"); + obs1.setStatus(ObservationStatusEnum.FINAL); + obs1.setValue(new QuantityDt(123)); + IIdType id1 = myObservationDao.create(obs1).getId().toUnqualifiedVersionless(); + + Observation obs2 = new Observation(); + obs2.getCode().setText("Diastolic Blood Pressure"); + obs2.setStatus(ObservationStatusEnum.FINAL); + obs2.setValue(new QuantityDt(81)); + IIdType id2 = myObservationDao.create(obs2).getId().toUnqualifiedVersionless(); + + SearchParameterMap map; + + map = new SearchParameterMap(); + map.add(Observation.SP_CODE, new TokenParam(null, "Systolic").setText(true)); + assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), containsInAnyOrder(id1, id2)); + + } + @Test public void testSuggestIgnoresBase64Content() { Patient patient = new Patient();