Merge branch 'master' of github.com:jamesagnew/hapi-fhir

This commit is contained in:
James Agnew 2016-08-22 09:19:22 -04:00
commit e07b5299ac
13 changed files with 11377 additions and 20 deletions

View File

@ -27,7 +27,7 @@ public class RuleImplConditional extends BaseRule implements IAuthRule {
return null;
}
if (theOperation == RestOperationTypeEnum.UPDATE) {
if (theOperation == myOperationType) {
switch (myAppliesTo) {
case ALL_RESOURCES:
break;

View File

@ -1077,7 +1077,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
@SuppressWarnings("unchecked")
protected ResourceTable updateEntity(final IBaseResource theResource, ResourceTable theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing,
boolean theUpdateVersion, Date theUpdateTime) {
ourLog.info("Starting entity update");
ourLog.debug("Starting entity update");
/*
* This should be the very first thing..
@ -1094,7 +1094,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
}
if (theEntity.getPublished() == null) {
ourLog.info("Entity has published time: {}", new InstantDt(theUpdateTime));
ourLog.debug("Entity has published time: {}", new InstantDt(theUpdateTime));
theEntity.setPublished(theUpdateTime);
}

View File

@ -27,7 +27,6 @@ import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.persistence.EntityManager;
@ -67,7 +66,7 @@ public class FulltextSearchSvcImpl extends BaseHapiFhirDao<IBaseResource> implem
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
private EntityManager myEntityManager;
private void addTextSearch(QueryBuilder theQueryBuilder, BooleanJunction<?> theBoolean, List<List<? extends IQueryParameterType>> theTerms, String theFieldName) {
private void addTextSearch(QueryBuilder theQueryBuilder, BooleanJunction<?> theBoolean, List<List<? extends IQueryParameterType>> theTerms, String theFieldName, String theFieldNameEdgeNGram, String theFieldNameNGram) {
if (theTerms == null) {
return;
}
@ -81,8 +80,21 @@ public class FulltextSearchSvcImpl extends BaseHapiFhirDao<IBaseResource> implem
}
}
if (terms.isEmpty() == false) {
String joinedTerms = StringUtils.join(terms, ' ');
theBoolean.must(theQueryBuilder.keyword().onField(theFieldName).matching(joinedTerms).createQuery());
if (terms.size() == 1) {
//@formatter:off
Query textQuery = theQueryBuilder
.phrase()
.withSlop(2)
.onField(theFieldName).boostedTo(4.0f)
// .andField(theFieldNameEdgeNGram).boostedTo(2.0f)
// .andField(theFieldNameNGram).boostedTo(1.0f)
.sentence(terms.iterator().next().toLowerCase()).createQuery();
//@formatter:on
theBoolean.must(textQuery);
} else {
String joinedTerms = StringUtils.join(terms, ' ');
theBoolean.must(theQueryBuilder.keyword().onField(theFieldName).matching(joinedTerms).createQuery()); }
}
}
}
@ -144,13 +156,13 @@ public class FulltextSearchSvcImpl extends BaseHapiFhirDao<IBaseResource> implem
* Handle _content parameter (resource body content)
*/
List<List<? extends IQueryParameterType>> contentAndTerms = theParams.remove(Constants.PARAM_CONTENT);
addTextSearch(qb, bool, contentAndTerms, "myContentText");
addTextSearch(qb, bool, contentAndTerms, "myContentText", "myContentTextEdgeNGram", "myContentTextNGram");
/*
* Handle _text parameter (resource narrative content)
*/
List<List<? extends IQueryParameterType>> textAndTerms = theParams.remove(Constants.PARAM_TEXT);
addTextSearch(qb, bool, textAndTerms, "myNarrativeText");
addTextSearch(qb, bool, textAndTerms, "myNarrativeText", "myNarrativeTextEdgeNGram", "myNarrativeTextNGram");
if (theReferencingPid != null) {
bool.must(qb.keyword().onField("myResourceLinks.myTargetResourcePid").matching(theReferencingPid).createQuery());

View File

@ -174,7 +174,12 @@ public class ResourceTable extends BaseHasResource implements Serializable {
* Holds the narrative text only - Used for Fulltext searching but not directly stored in the DB
*/
@Transient()
@Field()
@Fields({
@Field(name = "myNarrativeText", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "standardAnalyzer")),
@Field(name = "myNarrativeTextEdgeNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteEdgeAnalyzer")),
@Field(name = "myNarrativeTextNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteNGramAnalyzer")),
@Field(name = "myNarrativeTextPhonetic", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompletePhoneticAnalyzer"))
})
private String myNarrativeText;
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)

View File

@ -48,7 +48,6 @@ public class FhirResourceDaoDstu3SearchFtTest extends BaseJpaDstu3Test {
@Test
@Ignore
public void testCodeTextSearch() {
Observation obs1 = new Observation();
obs1.getCode().setText("Systolic Blood Pressure");
@ -66,15 +65,51 @@ public class FhirResourceDaoDstu3SearchFtTest extends BaseJpaDstu3Test {
SearchParameterMap map;
map = new SearchParameterMap();
map.add(Observation.SP_CODE, new TokenParam(null, "blood").setModifier(TokenParamModifier.TEXT));
map.add(Observation.SP_CODE, new TokenParam(null, "systolic").setModifier(TokenParamModifier.TEXT));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(map)), containsInAnyOrder(toValues(id1)));
// map = new SearchParameterMap();
// map.add(Observation.SP_CODE, new TokenParam(null, "blood").setModifier(TokenParamModifier.TEXT));
// assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(map)), containsInAnyOrder(toValues(id1, id2)));
//
// map = new SearchParameterMap();
// map.add(Observation.SP_CODE, new TokenParam(null, "blood").setModifier(TokenParamModifier.TEXT));
// assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(map)), empty());
//
// map = new SearchParameterMap();
// map.add(Observation.SP_CODE, new TokenParam(null, "blood").setModifier(TokenParamModifier.TEXT));
// map.add(Constants.PARAM_CONTENT, new StringParam("obs1"));
// assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(map)), containsInAnyOrder(toValues(id1)));
}
@Test
public void testResourceTextSearch() {
Observation obs1 = new Observation();
obs1.getCode().setText("Systolic Blood Pressure");
obs1.setStatus(ObservationStatus.FINAL);
obs1.setValue(new Quantity(123));
obs1.setComment("obs1");
IIdType id1 = myObservationDao.create(obs1, mySrd).getId().toUnqualifiedVersionless();
Observation obs2 = new Observation();
obs2.getCode().setText("Diastolic Blood Pressure");
obs2.setStatus(ObservationStatus.FINAL);
obs2.setValue(new Quantity(81));
IIdType id2 = myObservationDao.create(obs2, mySrd).getId().toUnqualifiedVersionless();
SearchParameterMap map;
map = new SearchParameterMap();
map.add(Constants.PARAM_CONTENT, new StringParam("systolic"));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(map)), containsInAnyOrder(toValues(id1)));
map = new SearchParameterMap();
map.add(Constants.PARAM_CONTENT, new StringParam("blood"));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(map)), containsInAnyOrder(toValues(id1, id2)));
map = new SearchParameterMap();
map.add(Observation.SP_CODE, new TokenParam(null, "blood").setModifier(TokenParamModifier.TEXT));
assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(map)), empty());
map = new SearchParameterMap();
map.add(Observation.SP_CODE, new TokenParam(null, "blood").setModifier(TokenParamModifier.TEXT));
map.add(Constants.PARAM_CONTENT, new StringParam("obs1"));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(map)), containsInAnyOrder(toValues(id1)));

View File

@ -126,6 +126,11 @@ public class FhirSearchDaoDstu3Test extends BaseJpaDstu3Test {
patient.getText().setDivAsString("<div>AAAB<p>FOO</p> CCC </div>");
id2 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless().getIdPartAsLong();
}
{
Patient patient = new Patient();
patient.getText().setDivAsString("<div>ZZYZXY</div>");
myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless().getIdPartAsLong();
}
SearchParameterMap map = new SearchParameterMap();
String resourceName = "Patient";

View File

@ -0,0 +1,129 @@
{
"resourceType": "Patient",
"id": "Patient-66468",
"meta": {
"versionId": "5",
"lastUpdated": "2016-08-14T19:18:18.000-04:00"
},
"text": {
"status": "generated",
"div": "<div xmlns=\"http://www.w3.org/1999/xhtml\"> <div class=\"hapiHeaderText\"> Shawn Freeman <b>MACK </b> </div> <table class=\"hapiPropertyTable\"> <tbody> <tr> <td>Identifier</td> <td>000066468</td> </tr> <tr> <td>Address</td> <td> <span>170 NE Cherry Circle </span> <br/> <span>Detroit </span> <span>MI </span> </td> </tr> <tr> <td>Date of birth</td> <td> <span>12 April 2036</span> </td> </tr> </tbody> </table> </div>"
},
"extension": [
{
"url": "http://hl7.org/fhir/StructureDefinition/us-core-race",
"valueCodeableConcept": {
"coding": [
{
"system": "http://hl7.org/fhir/v3/Race",
"code": "2056-0",
"display": "Black"
}
]
}
},
{
"url": "http://hl7.org/fhir/StructureDefinition/us-core-ethnicity",
"valueCodeableConcept": {
"coding": [
{
"system": "http://hl7.org/fhir/v3/Ethnicity",
"code": "2186-5",
"display": "Not Hispanic or Latino"
}
]
}
},
{
"url": "http://hl7.org/fhir/StructureDefinition/us-core-religion",
"valueCodeableConcept": {
"coding": [
{
"system": "http://hl7.org/fhir/v3/ReligiousAffiliation",
"code": "1013",
"display": "Christian (non-Catholic, non-specific) "
}
]
}
}
],
"identifier": [
{
"use": "official",
"type": {
"coding": [
{
"system": "http://hl7.org/fhir/identifier-type",
"code": "SB",
"display": "Social Beneficiary Identifier"
}
],
"text": "US Social Security Number"
},
"system": "http://hl7.org/fhir/sid/us-ssn",
"value": "000066468"
},
{
"use": "official",
"type": {
"coding": [
{
"system": "http://hl7.org/fhir/identifier-type",
"code": "SB",
"display": "Social Beneficiary Identifier"
}
],
"text": "Michigan Common Key Service Identifier"
},
"system": "http://mihin.org/fhir/cks",
"value": "31d50ab72d5a4239b36594f8f67eb3ab"
}
],
"active": false,
"name": [
{
"family": [
"Mack"
],
"given": [
"Shawn",
"Freeman"
]
}
],
"telecom": [
{
"system": "phone",
"value": "313.555.0956",
"use": "home"
},
{
"system": "phone",
"value": "313.555.8028",
"use": "work"
},
{
"extension": [
{
"url": "http://hl7.org/fhir/StructureDefinition/us-core-direct",
"valueBoolean": true
}
],
"system": "email",
"value": "Shawn.F.Mack@direct.mihintest.org",
"use": "home"
}
],
"gender": "male",
"birthDate": "2036-04-12",
"address": [
{
"line": [
"170 NE Cherry Circle"
],
"city": "Detroit",
"state": "MI",
"postalCode": "48227"
}
]
}

View File

@ -1053,7 +1053,7 @@ public class AuthorizationInterceptorDstu2Test {
httpPost.setEntity(createFhirResourceEntity(createPatient(null)));
status = ourClient.execute(httpPost);
response = extractResponseAndClose(status);
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(201, status.getStatusLine().getStatusCode());
assertTrue(ourHitMethod);
}
@ -1078,11 +1078,13 @@ public class AuthorizationInterceptorDstu2Test {
HttpResponse status;
String response;
ourReturn = Arrays.asList(createPatient(1));
ourHitMethod = false;
httpDelete = new HttpDelete("http://localhost:" + ourPort + "/Patient?foo=bar");
status = ourClient.execute(httpDelete);
response = extractResponseAndClose(status);
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(204, status.getStatusLine().getStatusCode());
assertTrue(ourHitMethod);
}

View File

@ -161,6 +161,10 @@
if it contained custom fields that also used custom
types. Thanks to GitHub user @sjanic for reporting!
</action>
<action type="add">
Inprove handling of _text and _content searches in JPA server to do better
matching on partial strings
</action>
<action type="add">
Servers in STU3 mode will now ignore any ID or VersionID found in the
resource body provided by the client when processing FHIR

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff