Fix build
This commit is contained in:
parent
e176d9a4b7
commit
8ed9ca83af
|
@ -38,7 +38,11 @@ public class TokenParam extends BaseParam implements IQueryParameterType {
|
|||
private boolean myText;
|
||||
private String myValue;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public TokenParam() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -143,16 +147,19 @@ public class TokenParam extends BaseParam implements IQueryParameterType {
|
|||
return myText;
|
||||
}
|
||||
|
||||
public void setSystem(String theSystem) {
|
||||
public TokenParam setSystem(String theSystem) {
|
||||
mySystem = theSystem;
|
||||
return this;
|
||||
}
|
||||
|
||||
public void setText(boolean theText) {
|
||||
public TokenParam setText(boolean theText) {
|
||||
myText = theText;
|
||||
return this;
|
||||
}
|
||||
|
||||
public void setValue(String theValue) {
|
||||
public TokenParam setValue(String theValue) {
|
||||
myValue = theValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -92,6 +92,8 @@ public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISe
|
|||
private List<Long> doSearch(String theResourceName, SearchParameterMap theParams, Long theReferencingPid) {
|
||||
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
|
||||
|
||||
List<Long> pids = null;
|
||||
|
||||
/*
|
||||
* Handle textual params
|
||||
*/
|
||||
|
@ -99,18 +101,48 @@ public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISe
|
|||
for (List<? extends IQueryParameterType> nextAndList : theParams.get(nextParamName)) {
|
||||
for (Iterator<? extends IQueryParameterType> orIterator = nextAndList.iterator(); orIterator.hasNext();) {
|
||||
IQueryParameterType nextParam = orIterator.next();
|
||||
if (nextParam instanceof TokenParam) {
|
||||
if (nextParam instanceof TokenParam && false) {
|
||||
TokenParam nextTokenParam = (TokenParam) nextParam;
|
||||
if (nextTokenParam.isText()) {
|
||||
orIterator.remove();
|
||||
QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(ResourceIndexedSearchParamString.class).get();
|
||||
BooleanJunction<?> bool = qb.bool();
|
||||
|
||||
bool.must(qb.keyword().onField("myParamsString").matching(nextTokenParam.getValue()).createQuery());
|
||||
bool.must(qb.keyword().onField("myParamName").matching(nextParamName).createQuery());
|
||||
if (isNotBlank(theResourceName)) {
|
||||
bool.must(qb.keyword().onField("myResourceType").matching(theResourceName).createQuery());
|
||||
}
|
||||
|
||||
//@formatter:off
|
||||
Query textQuery = qb
|
||||
.phrase()
|
||||
.withSlop(2)
|
||||
.onField("myValueText").boostedTo(4.0f)
|
||||
.andField("myValueTextEdgeNGram").boostedTo(2.0f)
|
||||
.andField("myValueTextNGram").boostedTo(1.0f)
|
||||
.sentence(nextTokenParam.getValue().toLowerCase()).createQuery();
|
||||
bool.must(textQuery);
|
||||
//@formatter:on
|
||||
|
||||
FullTextQuery ftq = em.createFullTextQuery(bool.createQuery(), ResourceTable.class);
|
||||
ftq.setProjection("myResourcePid");
|
||||
|
||||
List<?> resultList = ftq.getResultList();
|
||||
pids = new ArrayList<Long>();
|
||||
for (Object next : resultList) {
|
||||
Object[] nextAsArray = (Object[]) next;
|
||||
Long nextValue = (Long) nextAsArray[0];
|
||||
pids.add(nextValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (pids != null && pids.isEmpty()) {
|
||||
return pids;
|
||||
}
|
||||
|
||||
QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(ResourceTable.class).get();
|
||||
BooleanJunction<?> bool = qb.bool();
|
||||
|
@ -132,7 +164,7 @@ public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISe
|
|||
}
|
||||
|
||||
if (bool.isEmpty()) {
|
||||
return null;
|
||||
return pids;
|
||||
}
|
||||
|
||||
if (isNotBlank(theResourceName)) {
|
||||
|
@ -148,11 +180,13 @@ public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISe
|
|||
// execute search
|
||||
List<?> result = jpaQuery.getResultList();
|
||||
|
||||
HashSet<Long> pidsSet = pids != null ? new HashSet<Long>(pids) : null;
|
||||
|
||||
ArrayList<Long> retVal = new ArrayList<Long>();
|
||||
for (Object object : result) {
|
||||
Object[] nextArray = (Object[]) object;
|
||||
Long next = (Long) nextArray[0];
|
||||
if (next != null) {
|
||||
if (next != null && (pidsSet == null || pidsSet.contains(next))) {
|
||||
retVal.add(next);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,6 +32,7 @@ import javax.persistence.MappedSuperclass;
|
|||
|
||||
import org.hibernate.search.annotations.ContainedIn;
|
||||
import org.hibernate.search.annotations.Field;
|
||||
import org.hibernate.search.annotations.Store;
|
||||
|
||||
@MappedSuperclass
|
||||
public abstract class BaseResourceIndexedSearchParam implements Serializable {
|
||||
|
@ -54,11 +55,11 @@ public abstract class BaseResourceIndexedSearchParam implements Serializable {
|
|||
@ContainedIn
|
||||
private ResourceTable myResource;
|
||||
|
||||
@Field
|
||||
@Field(store=Store.YES)
|
||||
@Column(name = "RES_ID", insertable = false, updatable = false)
|
||||
private Long myResourcePid;
|
||||
|
||||
@Field
|
||||
@Field(store=Store.YES)
|
||||
@Column(name = "RES_TYPE", nullable=false)
|
||||
private String myResourceType;
|
||||
|
||||
|
|
|
@ -49,6 +49,7 @@ import org.hibernate.search.annotations.AnalyzerDefs;
|
|||
import org.hibernate.search.annotations.ContainedIn;
|
||||
import org.hibernate.search.annotations.Field;
|
||||
import org.hibernate.search.annotations.Fields;
|
||||
import org.hibernate.search.annotations.Indexed;
|
||||
import org.hibernate.search.annotations.Parameter;
|
||||
import org.hibernate.search.annotations.Store;
|
||||
import org.hibernate.search.annotations.TokenFilterDef;
|
||||
|
@ -61,49 +62,50 @@ import org.hibernate.search.annotations.TokenizerDef;
|
|||
@org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_STRING", indexes = {
|
||||
@org.hibernate.annotations.Index(name = "IDX_SP_STRING", columnNames = { "RES_TYPE", "SP_NAME", "SP_VALUE_NORMALIZED" })
|
||||
})
|
||||
@AnalyzerDefs({
|
||||
@AnalyzerDef(name = "autocompleteEdgeAnalyzer",
|
||||
tokenizer = @TokenizerDef(factory = PatternTokenizerFactory.class, params= {
|
||||
@Parameter(name="pattern", value="(.*)"),
|
||||
@Parameter(name="group", value="1")
|
||||
}),
|
||||
filters = {
|
||||
@TokenFilterDef(factory = LowerCaseFilterFactory.class),
|
||||
@TokenFilterDef(factory = StopFilterFactory.class),
|
||||
@TokenFilterDef(factory = EdgeNGramFilterFactory.class, params = {
|
||||
@Parameter(name = "minGramSize", value = "3"),
|
||||
@Parameter(name = "maxGramSize", value = "50")
|
||||
}),
|
||||
}),
|
||||
@AnalyzerDef(name = "autocompletePhoneticAnalyzer",
|
||||
tokenizer = @TokenizerDef(factory=StandardTokenizerFactory.class),
|
||||
filters = {
|
||||
@TokenFilterDef(factory=StandardFilterFactory.class),
|
||||
@TokenFilterDef(factory=StopFilterFactory.class),
|
||||
@TokenFilterDef(factory=PhoneticFilterFactory.class, params = {
|
||||
@Parameter(name="encoder", value="DoubleMetaphone")
|
||||
}),
|
||||
@TokenFilterDef(factory=SnowballPorterFilterFactory.class, params = {
|
||||
@Parameter(name="language", value="English")
|
||||
})
|
||||
}),
|
||||
@AnalyzerDef(name = "autocompleteNGramAnalyzer",
|
||||
tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
|
||||
filters = {
|
||||
@TokenFilterDef(factory = WordDelimiterFilterFactory.class),
|
||||
@TokenFilterDef(factory = LowerCaseFilterFactory.class),
|
||||
@TokenFilterDef(factory = NGramFilterFactory.class, params = {
|
||||
@Parameter(name = "minGramSize", value = "3"),
|
||||
@Parameter(name = "maxGramSize", value = "20")
|
||||
}),
|
||||
}),
|
||||
@AnalyzerDef(name = "standardAnalyzer",
|
||||
tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
|
||||
filters = {
|
||||
@TokenFilterDef(factory = LowerCaseFilterFactory.class),
|
||||
}) // Def
|
||||
}
|
||||
)
|
||||
@Indexed()
|
||||
//@AnalyzerDefs({
|
||||
// @AnalyzerDef(name = "autocompleteEdgeAnalyzer",
|
||||
// tokenizer = @TokenizerDef(factory = PatternTokenizerFactory.class, params= {
|
||||
// @Parameter(name="pattern", value="(.*)"),
|
||||
// @Parameter(name="group", value="1")
|
||||
// }),
|
||||
// filters = {
|
||||
// @TokenFilterDef(factory = LowerCaseFilterFactory.class),
|
||||
// @TokenFilterDef(factory = StopFilterFactory.class),
|
||||
// @TokenFilterDef(factory = EdgeNGramFilterFactory.class, params = {
|
||||
// @Parameter(name = "minGramSize", value = "3"),
|
||||
// @Parameter(name = "maxGramSize", value = "50")
|
||||
// }),
|
||||
// }),
|
||||
// @AnalyzerDef(name = "autocompletePhoneticAnalyzer",
|
||||
// tokenizer = @TokenizerDef(factory=StandardTokenizerFactory.class),
|
||||
// filters = {
|
||||
// @TokenFilterDef(factory=StandardFilterFactory.class),
|
||||
// @TokenFilterDef(factory=StopFilterFactory.class),
|
||||
// @TokenFilterDef(factory=PhoneticFilterFactory.class, params = {
|
||||
// @Parameter(name="encoder", value="DoubleMetaphone")
|
||||
// }),
|
||||
// @TokenFilterDef(factory=SnowballPorterFilterFactory.class, params = {
|
||||
// @Parameter(name="language", value="English")
|
||||
// })
|
||||
// }),
|
||||
// @AnalyzerDef(name = "autocompleteNGramAnalyzer",
|
||||
// tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
|
||||
// filters = {
|
||||
// @TokenFilterDef(factory = WordDelimiterFilterFactory.class),
|
||||
// @TokenFilterDef(factory = LowerCaseFilterFactory.class),
|
||||
// @TokenFilterDef(factory = NGramFilterFactory.class, params = {
|
||||
// @Parameter(name = "minGramSize", value = "3"),
|
||||
// @Parameter(name = "maxGramSize", value = "20")
|
||||
// }),
|
||||
// }),
|
||||
// @AnalyzerDef(name = "standardAnalyzer",
|
||||
// tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
|
||||
// filters = {
|
||||
// @TokenFilterDef(factory = LowerCaseFilterFactory.class),
|
||||
// }) // Def
|
||||
// }
|
||||
//)
|
||||
//@formatter:on
|
||||
public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchParam {
|
||||
|
||||
|
|
|
@ -14,25 +14,52 @@ import java.util.List;
|
|||
import javax.servlet.http.HttpServletRequest;
|
||||
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.FhirSearchDao.Suggestion;
|
||||
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
|
||||
import ca.uhn.fhir.model.dstu21.composite.QuantityDt;
|
||||
import ca.uhn.fhir.model.dstu21.resource.Device;
|
||||
import ca.uhn.fhir.model.dstu21.resource.Media;
|
||||
import ca.uhn.fhir.model.dstu21.resource.Observation;
|
||||
import ca.uhn.fhir.model.dstu21.resource.Patient;
|
||||
import ca.uhn.fhir.model.dstu21.valueset.ObservationStatusEnum;
|
||||
import ca.uhn.fhir.model.primitive.Base64BinaryDt;
|
||||
import ca.uhn.fhir.model.primitive.StringDt;
|
||||
import ca.uhn.fhir.rest.param.StringAndListParam;
|
||||
import ca.uhn.fhir.rest.param.StringOrListParam;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.server.Constants;
|
||||
|
||||
public class FhirResourceDaoDstu21SearchFtTest extends BaseJpaDstu21Test {
|
||||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu21SearchFtTest.class);
|
||||
|
||||
@Test
|
||||
@Ignore
|
||||
public void testCodeTextSearch() {
|
||||
Observation obs1 = new Observation();
|
||||
obs1.getCode().setText("Systolic Blood Pressure");
|
||||
obs1.setStatus(ObservationStatusEnum.FINAL);
|
||||
obs1.setValue(new QuantityDt(123));
|
||||
IIdType id1 = myObservationDao.create(obs1).getId().toUnqualifiedVersionless();
|
||||
|
||||
Observation obs2 = new Observation();
|
||||
obs2.getCode().setText("Diastolic Blood Pressure");
|
||||
obs2.setStatus(ObservationStatusEnum.FINAL);
|
||||
obs2.setValue(new QuantityDt(81));
|
||||
IIdType id2 = myObservationDao.create(obs2).getId().toUnqualifiedVersionless();
|
||||
|
||||
SearchParameterMap map;
|
||||
|
||||
map = new SearchParameterMap();
|
||||
map.add(Observation.SP_CODE, new TokenParam(null, "Systolic").setText(true));
|
||||
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), containsInAnyOrder(id1, id2));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSuggestIgnoresBase64Content() {
|
||||
Patient patient = new Patient();
|
||||
|
|
Loading…
Reference in New Issue