Fix build

This commit is contained in:
jamesagnew 2015-12-03 21:52:40 -05:00
parent e176d9a4b7
commit 8ed9ca83af
5 changed files with 148 additions and 77 deletions

View File

@ -38,7 +38,11 @@ public class TokenParam extends BaseParam implements IQueryParameterType {
private boolean myText;
private String myValue;
/**
* Constructor
*/
public TokenParam() {
super();
}
/**
@ -143,16 +147,19 @@ public class TokenParam extends BaseParam implements IQueryParameterType {
return myText;
}
public void setSystem(String theSystem) {
public TokenParam setSystem(String theSystem) {
mySystem = theSystem;
return this;
}
public void setText(boolean theText) {
public TokenParam setText(boolean theText) {
myText = theText;
return this;
}
public void setValue(String theValue) {
public TokenParam setValue(String theValue) {
myValue = theValue;
return this;
}
@Override

View File

@ -91,26 +91,58 @@ public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISe
private List<Long> doSearch(String theResourceName, SearchParameterMap theParams, Long theReferencingPid) {
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
List<Long> pids = null;
/*
* Handle textual params
*/
for (String nextParamName : theParams.keySet()) {
for (List<? extends IQueryParameterType> nextAndList : theParams.get(nextParamName)) {
for (Iterator<? extends IQueryParameterType> orIterator = nextAndList.iterator(); orIterator.hasNext(); ) {
for (Iterator<? extends IQueryParameterType> orIterator = nextAndList.iterator(); orIterator.hasNext();) {
IQueryParameterType nextParam = orIterator.next();
if (nextParam instanceof TokenParam) {
TokenParam nextTokenParam = (TokenParam)nextParam;
if (nextParam instanceof TokenParam && false) {
TokenParam nextTokenParam = (TokenParam) nextParam;
if (nextTokenParam.isText()) {
orIterator.remove();
QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(ResourceIndexedSearchParamString.class).get();
BooleanJunction<?> bool = qb.bool();
bool.must(qb.keyword().onField("myParamsString").matching(nextTokenParam.getValue()).createQuery());
bool.must(qb.keyword().onField("myParamName").matching(nextParamName).createQuery());
if (isNotBlank(theResourceName)) {
bool.must(qb.keyword().onField("myResourceType").matching(theResourceName).createQuery());
}
//@formatter:off
Query textQuery = qb
.phrase()
.withSlop(2)
.onField("myValueText").boostedTo(4.0f)
.andField("myValueTextEdgeNGram").boostedTo(2.0f)
.andField("myValueTextNGram").boostedTo(1.0f)
.sentence(nextTokenParam.getValue().toLowerCase()).createQuery();
bool.must(textQuery);
//@formatter:on
FullTextQuery ftq = em.createFullTextQuery(bool.createQuery(), ResourceTable.class);
ftq.setProjection("myResourcePid");
List<?> resultList = ftq.getResultList();
pids = new ArrayList<Long>();
for (Object next : resultList) {
Object[] nextAsArray = (Object[]) next;
Long nextValue = (Long) nextAsArray[0];
pids.add(nextValue);
}
}
}
}
}
}
if (pids != null && pids.isEmpty()) {
return pids;
}
QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(ResourceTable.class).get();
BooleanJunction<?> bool = qb.bool();
@ -122,7 +154,7 @@ public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISe
addTextSearch(qb, bool, contentAndTerms, "myContentText");
/*
* Handle _text parameter (resource narrative content)
* Handle _text parameter (resource narrative content)
*/
List<List<? extends IQueryParameterType>> textAndTerms = theParams.remove(Constants.PARAM_TEXT);
addTextSearch(qb, bool, textAndTerms, "myNarrativeText");
@ -132,7 +164,7 @@ public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISe
}
if (bool.isEmpty()) {
return null;
return pids;
}
if (isNotBlank(theResourceName)) {
@ -148,11 +180,13 @@ public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISe
// execute search
List<?> result = jpaQuery.getResultList();
HashSet<Long> pidsSet = pids != null ? new HashSet<Long>(pids) : null;
ArrayList<Long> retVal = new ArrayList<Long>();
for (Object object : result) {
Object[] nextArray = (Object[]) object;
Long next = (Long) nextArray[0];
if (next != null) {
if (next != null && (pidsSet == null || pidsSet.contains(next))) {
retVal.add(next);
}
}
@ -188,16 +222,16 @@ public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISe
Validate.notBlank(theContext, "theContext must be provided");
Validate.notBlank(theSearchParam, "theSearchParam must be provided");
Validate.notBlank(theText, "theSearchParam must be provided");
long start = System.currentTimeMillis();
String[] contextParts = StringUtils.split(theContext, '/');
if (contextParts.length != 3 || "Patient".equals(contextParts[0]) == false || "$everything".equals(contextParts[2]) == false) {
throw new InvalidRequestException("Invalid context: " + theContext);
}
IdDt contextId = new IdDt(contextParts[0], contextParts[1]);
Long pid = BaseHapiFhirDao.translateForcedIdToPid(contextId, myEntityManager);
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(ResourceTable.class).get();
@ -236,7 +270,7 @@ public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISe
formatter.setAnalyzer("myContentTextPhonetic");
highlighter.getBestFragments(analyzer.tokenStream("myContentTextPhonetic", nextValue), nextValue, 10);
formatter.setAnalyzer("myContentTextNGram");
highlighter.getBestFragments(analyzer.tokenStream("myContentTextNGram", nextValue), nextValue, 10);
@ -244,14 +278,14 @@ public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISe
formatter.setAnalyzer("myContentTextEdgeNGram");
highlighter.getBestFragments(analyzer.tokenStream("myContentTextEdgeNGram", nextValue), nextValue, 10);
// formatter.setAnalyzer("myContentText");
// highlighter.getBestFragments(analyzer.tokenStream("myContentText", nextValue), nextValue, 10);
// formatter.setAnalyzer("myContentTextNGram");
// highlighter.getBestFragments(analyzer.tokenStream("myContentTextNGram", nextValue), nextValue, 10);
// formatter.setAnalyzer("myContentTextEdgeNGram");
// highlighter.getBestFragments(analyzer.tokenStream("myContentTextEdgeNGram", nextValue), nextValue, 10);
// formatter.setAnalyzer("myContentTextPhonetic");
// highlighter.getBestFragments(analyzer.tokenStream("myContentTextPhonetic", nextValue), nextValue, 10);
// formatter.setAnalyzer("myContentText");
// highlighter.getBestFragments(analyzer.tokenStream("myContentText", nextValue), nextValue, 10);
// formatter.setAnalyzer("myContentTextNGram");
// highlighter.getBestFragments(analyzer.tokenStream("myContentTextNGram", nextValue), nextValue, 10);
// formatter.setAnalyzer("myContentTextEdgeNGram");
// highlighter.getBestFragments(analyzer.tokenStream("myContentTextEdgeNGram", nextValue), nextValue, 10);
// formatter.setAnalyzer("myContentTextPhonetic");
// highlighter.getBestFragments(analyzer.tokenStream("myContentTextPhonetic", nextValue), nextValue, 10);
} catch (Exception e) {
throw new InternalErrorException(e);
}
@ -259,18 +293,18 @@ public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISe
}
Collections.sort(suggestions);
Set<String> terms = Sets.newHashSet();
for (Iterator<Suggestion> iter = suggestions.iterator(); iter.hasNext(); ) {
for (Iterator<Suggestion> iter = suggestions.iterator(); iter.hasNext();) {
String nextTerm = iter.next().getTerm().toLowerCase();
if (!terms.add(nextTerm)) {
iter.remove();
}
}
long delay = System.currentTimeMillis()- start;
ourLog.info("Provided {} suggestions for term {} in {} ms", new Object[] {terms.size(), theText, delay});
long delay = System.currentTimeMillis() - start;
ourLog.info("Provided {} suggestions for term {} in {} ms", new Object[] { terms.size(), theText, delay });
return suggestions;
}
@ -318,12 +352,12 @@ public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISe
public void setFindPhrasesWith() {
myPartialMatchPhrases = new ArrayList<String>();
myPartialMatchScores = new ArrayList<Float>();
for (Suggestion next : mySuggestions) {
myPartialMatchPhrases.add(' ' + next.myTerm);
myPartialMatchScores.add(next.myScore);
}
myPartialMatchPhrases.add(myOriginalSearch);
myPartialMatchScores.add(1.0f);
}
@ -334,7 +368,7 @@ public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISe
@Override
public String highlightTerm(String theOriginalText, TokenGroup theTokenGroup) {
ourLog.debug("{} Found {} with score {}", new Object[] {myAnalyzer, theOriginalText, theTokenGroup.getTotalScore()});
ourLog.debug("{} Found {} with score {}", new Object[] { myAnalyzer, theOriginalText, theTokenGroup.getTotalScore() });
if (theTokenGroup.getTotalScore() > 0) {
float score = theTokenGroup.getTotalScore();
if (theOriginalText.equalsIgnoreCase(myOriginalSearch)) {
@ -350,7 +384,7 @@ public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISe
}
}
}
return null;
}

View File

@ -32,6 +32,7 @@ import javax.persistence.MappedSuperclass;
import org.hibernate.search.annotations.ContainedIn;
import org.hibernate.search.annotations.Field;
import org.hibernate.search.annotations.Store;
@MappedSuperclass
public abstract class BaseResourceIndexedSearchParam implements Serializable {
@ -54,11 +55,11 @@ public abstract class BaseResourceIndexedSearchParam implements Serializable {
@ContainedIn
private ResourceTable myResource;
@Field
@Field(store=Store.YES)
@Column(name = "RES_ID", insertable = false, updatable = false)
private Long myResourcePid;
@Field
@Field(store=Store.YES)
@Column(name = "RES_TYPE", nullable=false)
private String myResourceType;

View File

@ -49,6 +49,7 @@ import org.hibernate.search.annotations.AnalyzerDefs;
import org.hibernate.search.annotations.ContainedIn;
import org.hibernate.search.annotations.Field;
import org.hibernate.search.annotations.Fields;
import org.hibernate.search.annotations.Indexed;
import org.hibernate.search.annotations.Parameter;
import org.hibernate.search.annotations.Store;
import org.hibernate.search.annotations.TokenFilterDef;
@ -61,49 +62,50 @@ import org.hibernate.search.annotations.TokenizerDef;
@org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_STRING", indexes = {
@org.hibernate.annotations.Index(name = "IDX_SP_STRING", columnNames = { "RES_TYPE", "SP_NAME", "SP_VALUE_NORMALIZED" })
})
@AnalyzerDefs({
@AnalyzerDef(name = "autocompleteEdgeAnalyzer",
tokenizer = @TokenizerDef(factory = PatternTokenizerFactory.class, params= {
@Parameter(name="pattern", value="(.*)"),
@Parameter(name="group", value="1")
}),
filters = {
@TokenFilterDef(factory = LowerCaseFilterFactory.class),
@TokenFilterDef(factory = StopFilterFactory.class),
@TokenFilterDef(factory = EdgeNGramFilterFactory.class, params = {
@Parameter(name = "minGramSize", value = "3"),
@Parameter(name = "maxGramSize", value = "50")
}),
}),
@AnalyzerDef(name = "autocompletePhoneticAnalyzer",
tokenizer = @TokenizerDef(factory=StandardTokenizerFactory.class),
filters = {
@TokenFilterDef(factory=StandardFilterFactory.class),
@TokenFilterDef(factory=StopFilterFactory.class),
@TokenFilterDef(factory=PhoneticFilterFactory.class, params = {
@Parameter(name="encoder", value="DoubleMetaphone")
}),
@TokenFilterDef(factory=SnowballPorterFilterFactory.class, params = {
@Parameter(name="language", value="English")
})
}),
@AnalyzerDef(name = "autocompleteNGramAnalyzer",
tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
filters = {
@TokenFilterDef(factory = WordDelimiterFilterFactory.class),
@TokenFilterDef(factory = LowerCaseFilterFactory.class),
@TokenFilterDef(factory = NGramFilterFactory.class, params = {
@Parameter(name = "minGramSize", value = "3"),
@Parameter(name = "maxGramSize", value = "20")
}),
}),
@AnalyzerDef(name = "standardAnalyzer",
tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
filters = {
@TokenFilterDef(factory = LowerCaseFilterFactory.class),
}) // Def
}
)
@Indexed()
//@AnalyzerDefs({
// @AnalyzerDef(name = "autocompleteEdgeAnalyzer",
// tokenizer = @TokenizerDef(factory = PatternTokenizerFactory.class, params= {
// @Parameter(name="pattern", value="(.*)"),
// @Parameter(name="group", value="1")
// }),
// filters = {
// @TokenFilterDef(factory = LowerCaseFilterFactory.class),
// @TokenFilterDef(factory = StopFilterFactory.class),
// @TokenFilterDef(factory = EdgeNGramFilterFactory.class, params = {
// @Parameter(name = "minGramSize", value = "3"),
// @Parameter(name = "maxGramSize", value = "50")
// }),
// }),
// @AnalyzerDef(name = "autocompletePhoneticAnalyzer",
// tokenizer = @TokenizerDef(factory=StandardTokenizerFactory.class),
// filters = {
// @TokenFilterDef(factory=StandardFilterFactory.class),
// @TokenFilterDef(factory=StopFilterFactory.class),
// @TokenFilterDef(factory=PhoneticFilterFactory.class, params = {
// @Parameter(name="encoder", value="DoubleMetaphone")
// }),
// @TokenFilterDef(factory=SnowballPorterFilterFactory.class, params = {
// @Parameter(name="language", value="English")
// })
// }),
// @AnalyzerDef(name = "autocompleteNGramAnalyzer",
// tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
// filters = {
// @TokenFilterDef(factory = WordDelimiterFilterFactory.class),
// @TokenFilterDef(factory = LowerCaseFilterFactory.class),
// @TokenFilterDef(factory = NGramFilterFactory.class, params = {
// @Parameter(name = "minGramSize", value = "3"),
// @Parameter(name = "maxGramSize", value = "20")
// }),
// }),
// @AnalyzerDef(name = "standardAnalyzer",
// tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
// filters = {
// @TokenFilterDef(factory = LowerCaseFilterFactory.class),
// }) // Def
// }
//)
//@formatter:on
public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchParam {

View File

@ -14,25 +14,52 @@ import java.util.List;
import javax.servlet.http.HttpServletRequest;
import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.Ignore;
import org.junit.Test;
import ca.uhn.fhir.jpa.dao.FhirSearchDao.Suggestion;
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
import ca.uhn.fhir.model.dstu21.composite.QuantityDt;
import ca.uhn.fhir.model.dstu21.resource.Device;
import ca.uhn.fhir.model.dstu21.resource.Media;
import ca.uhn.fhir.model.dstu21.resource.Observation;
import ca.uhn.fhir.model.dstu21.resource.Patient;
import ca.uhn.fhir.model.dstu21.valueset.ObservationStatusEnum;
import ca.uhn.fhir.model.primitive.Base64BinaryDt;
import ca.uhn.fhir.model.primitive.StringDt;
import ca.uhn.fhir.rest.param.StringAndListParam;
import ca.uhn.fhir.rest.param.StringOrListParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.server.Constants;
public class FhirResourceDaoDstu21SearchFtTest extends BaseJpaDstu21Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu21SearchFtTest.class);
@Test
@Ignore
public void testCodeTextSearch() {
Observation obs1 = new Observation();
obs1.getCode().setText("Systolic Blood Pressure");
obs1.setStatus(ObservationStatusEnum.FINAL);
obs1.setValue(new QuantityDt(123));
IIdType id1 = myObservationDao.create(obs1).getId().toUnqualifiedVersionless();
Observation obs2 = new Observation();
obs2.getCode().setText("Diastolic Blood Pressure");
obs2.setStatus(ObservationStatusEnum.FINAL);
obs2.setValue(new QuantityDt(81));
IIdType id2 = myObservationDao.create(obs2).getId().toUnqualifiedVersionless();
SearchParameterMap map;
map = new SearchParameterMap();
map.add(Observation.SP_CODE, new TokenParam(null, "Systolic").setText(true));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), containsInAnyOrder(id1, id2));
}
@Test
public void testSuggestIgnoresBase64Content() {
Patient patient = new Patient();