Merge branch 'master' of github.com:jamesagnew/hapi-fhir

This commit is contained in:
James Agnew 2015-12-10 08:24:30 -05:00
commit 117f9eb977
18 changed files with 307 additions and 65 deletions

View File

@ -6,7 +6,7 @@ jdk:
- oraclejdk8
env:
global:
- MAVEN_OPTS="-XX:MaxPermSize=512m -Xmx2g"
- MAVEN_OPTS="-Xmx1500m"
cache:
directories:

View File

@ -40,7 +40,7 @@ abstract class BaseParam implements IQueryParameterType {
@Override
public final String getQueryParameterQualifier() {
if (myMissing != null) {
if (myMissing != null && myMissing.booleanValue()) {
return Constants.PARAMQUALIFIER_MISSING;
}
return doGetQueryParameterQualifier();

View File

@ -19,8 +19,7 @@ package ca.uhn.fhir.rest.param;
* limitations under the License.
* #L%
*/
import static org.apache.commons.lang3.StringUtils.*;
import static org.apache.commons.lang3.StringUtils.defaultString;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.builder.ToStringBuilder;
@ -34,6 +33,7 @@ public class StringParam extends BaseParam implements IQueryParameterType {
private boolean myExact;
private String myValue;
private boolean myContains;
public StringParam() {
}
@ -51,6 +51,8 @@ public class StringParam extends BaseParam implements IQueryParameterType {
String doGetQueryParameterQualifier() {
if (isExact()) {
return Constants.PARAMQUALIFIER_STRING_EXACT;
} else if (isContains()) {
return Constants.PARAMQUALIFIER_STRING_CONTAINS;
} else {
return null;
}
@ -68,6 +70,11 @@ public class StringParam extends BaseParam implements IQueryParameterType {
} else {
setExact(false);
}
if (Constants.PARAMQUALIFIER_STRING_CONTAINS.equals(theQualifier)) {
setContains(true);
} else {
setContains(false);
}
myValue = ParameterUtil.unescape(theValue);
}
@ -91,12 +98,25 @@ public class StringParam extends BaseParam implements IQueryParameterType {
return myExact;
}
public void setExact(boolean theExact) {
public StringParam setExact(boolean theExact) {
myExact = theExact;
if (myExact) {
setContains(false);
setMissing(null);
}
return this;
}
public void setValue(String theValue) {
/**
* String parameter modifier <code>:contains</code>
*/
public boolean isContains() {
return myContains;
}
public StringParam setValue(String theValue) {
myValue = theValue;
return this;
}
@Override
@ -106,10 +126,25 @@ public class StringParam extends BaseParam implements IQueryParameterType {
if (myExact) {
builder.append("exact", myExact);
}
if (myContains) {
builder.append("contains", myContains);
}
if (getMissing() != null) {
builder.append("missing", getMissing().booleanValue());
}
return builder.toString();
}
/**
* String parameter modifier <code>:contains</code>
*/
public StringParam setContains(boolean theContains) {
myContains = theContains;
if (myContains) {
setExact(false);
setMissing(null);
}
return this;
}
}

View File

@ -38,7 +38,11 @@ public class TokenParam extends BaseParam implements IQueryParameterType {
private boolean myText;
private String myValue;
/**
* Constructor
*/
public TokenParam() {
super();
}
/**
@ -143,16 +147,19 @@ public class TokenParam extends BaseParam implements IQueryParameterType {
return myText;
}
public void setSystem(String theSystem) {
public TokenParam setSystem(String theSystem) {
mySystem = theSystem;
return this;
}
public void setText(boolean theText) {
public TokenParam setText(boolean theText) {
myText = theText;
return this;
}
public void setValue(String theValue) {
public TokenParam setValue(String theValue) {
myValue = theValue;
return this;
}
@Override

View File

@ -127,6 +127,7 @@ public class Constants {
public static final String PARAMQUALIFIER_MISSING_FALSE = "false";
public static final String PARAMQUALIFIER_MISSING_TRUE = "true";
public static final String PARAMQUALIFIER_STRING_EXACT = ":exact";
public static final String PARAMQUALIFIER_STRING_CONTAINS = ":contains";
public static final String PARAMQUALIFIER_TOKEN_TEXT = ":text";
public static final int STATUS_HTTP_200_OK = 200;
public static final int STATUS_HTTP_201_CREATED = 201;

View File

@ -15,6 +15,7 @@ import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.text.WordUtils;
import org.fusesource.jansi.Ansi;
import org.fusesource.jansi.Ansi.Color;
import org.fusesource.jansi.AnsiConsole;
import org.slf4j.LoggerFactory;
@ -159,6 +160,13 @@ public class App {
}
}
if (command == null) {
System.out.println("Unrecognized command: " + ansi().bold().fg(Color.RED) + theArgs[0] + ansi().boldOff().fg(Ansi.Color.WHITE));
System.out.println();
logUsage();
return;
}
Options options = command.getOptions();
DefaultParser parser = new DefaultParser();
CommandLine parsedOptions;

View File

@ -36,7 +36,6 @@ import javax.persistence.PersistenceContextType;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.Token;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.highlight.Formatter;
import org.apache.lucene.search.highlight.Highlighter;
@ -53,11 +52,13 @@ import org.springframework.transaction.annotation.Transactional;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamString;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.model.dstu.resource.BaseResource;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.server.Constants;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
@ -68,11 +69,11 @@ public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISe
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
private EntityManager myEntityManager;
private void addTextSearch(QueryBuilder qb, BooleanJunction<?> bool, List<List<? extends IQueryParameterType>> contentAndTerms, String field) {
if (contentAndTerms == null) {
private void addTextSearch(QueryBuilder theQueryBuilder, BooleanJunction<?> theBoolean, List<List<? extends IQueryParameterType>> theTerms, String theFieldName) {
if (theTerms == null) {
return;
}
for (List<? extends IQueryParameterType> nextAnd : contentAndTerms) {
for (List<? extends IQueryParameterType> nextAnd : theTerms) {
Set<String> terms = new HashSet<String>();
for (IQueryParameterType nextOr : nextAnd) {
StringParam nextOrString = (StringParam) nextOr;
@ -83,7 +84,7 @@ public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISe
}
if (terms.isEmpty() == false) {
String joinedTerms = StringUtils.join(terms, ' ');
bool.must(qb.keyword().onField(field).matching(joinedTerms).createQuery());
theBoolean.must(theQueryBuilder.keyword().onField(theFieldName).matching(joinedTerms).createQuery());
}
}
}
@ -91,13 +92,65 @@ public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISe
private List<Long> doSearch(String theResourceName, SearchParameterMap theParams, Long theReferencingPid) {
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(ResourceTable.class).get();
List<Long> pids = null;
/*
* Handle textual params
*/
/*
for (String nextParamName : theParams.keySet()) {
for (List<? extends IQueryParameterType> nextAndList : theParams.get(nextParamName)) {
for (Iterator<? extends IQueryParameterType> orIterator = nextAndList.iterator(); orIterator.hasNext();) {
IQueryParameterType nextParam = orIterator.next();
if (nextParam instanceof TokenParam) {
TokenParam nextTokenParam = (TokenParam) nextParam;
if (nextTokenParam.isText()) {
orIterator.remove();
QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(ResourceIndexedSearchParamString.class).get();
BooleanJunction<?> bool = qb.bool();
bool.must(qb.keyword().onField("myParamName").matching(nextParamName).createQuery());
if (isNotBlank(theResourceName)) {
bool.must(qb.keyword().onField("myResourceType").matching(theResourceName).createQuery());
}
//
//@formatter:off
String value = nextTokenParam.getValue().toLowerCase();
bool.must(qb.keyword().onField("myValueTextEdgeNGram").matching(value).createQuery());
//@formatter:on
FullTextQuery ftq = em.createFullTextQuery(bool.createQuery(), ResourceIndexedSearchParamString.class);
List<?> resultList = ftq.getResultList();
pids = new ArrayList<Long>();
for (Object next : resultList) {
ResourceIndexedSearchParamString nextAsArray = (ResourceIndexedSearchParamString) next;
pids.add(nextAsArray.getResourcePid());
}
}
}
}
}
}
if (pids != null && pids.isEmpty()) {
return pids;
}
*/
QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(ResourceTable.class).get();
BooleanJunction<?> bool = qb.bool();
/*
* Handle _content parameter (resource body content)
*/
List<List<? extends IQueryParameterType>> contentAndTerms = theParams.remove(Constants.PARAM_CONTENT);
addTextSearch(qb, bool, contentAndTerms, "myContentText");
/*
* Handle _text parameter (resource narrative content)
*/
List<List<? extends IQueryParameterType>> textAndTerms = theParams.remove(Constants.PARAM_TEXT);
addTextSearch(qb, bool, textAndTerms, "myNarrativeText");
@ -106,7 +159,7 @@ public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISe
}
if (bool.isEmpty()) {
return null;
return pids;
}
if (isNotBlank(theResourceName)) {
@ -122,11 +175,13 @@ public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISe
// execute search
List<?> result = jpaQuery.getResultList();
HashSet<Long> pidsSet = pids != null ? new HashSet<Long>(pids) : null;
ArrayList<Long> retVal = new ArrayList<Long>();
for (Object object : result) {
Object[] nextArray = (Object[]) object;
Long next = (Long) nextArray[0];
if (next != null) {
if (next != null && (pidsSet == null || pidsSet.contains(next))) {
retVal.add(next);
}
}

View File

@ -32,6 +32,7 @@ import javax.persistence.MappedSuperclass;
import org.hibernate.search.annotations.ContainedIn;
import org.hibernate.search.annotations.Field;
import org.hibernate.search.annotations.Store;
@MappedSuperclass
public abstract class BaseResourceIndexedSearchParam implements Serializable {
@ -54,11 +55,10 @@ public abstract class BaseResourceIndexedSearchParam implements Serializable {
@ContainedIn
private ResourceTable myResource;
@Field
@Column(name = "RES_ID", insertable = false, updatable = false)
private Long myResourcePid;
@Field
@Field()
@Column(name = "RES_TYPE", nullable = false)
private String myResourceType;
@ -74,6 +74,10 @@ public abstract class BaseResourceIndexedSearchParam implements Serializable {
return myResource;
}
public Long getResourcePid() {
return myResourcePid;
}
public void setParamName(String theName) {
myParamName = theName;
}

View File

@ -32,15 +32,81 @@ import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.apache.lucene.analysis.core.LowerCaseFilterFactory;
import org.apache.lucene.analysis.core.StopFilterFactory;
import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilterFactory;
import org.apache.lucene.analysis.ngram.EdgeNGramFilterFactory;
import org.apache.lucene.analysis.ngram.NGramFilterFactory;
import org.apache.lucene.analysis.pattern.PatternTokenizerFactory;
import org.apache.lucene.analysis.phonetic.PhoneticFilterFactory;
import org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory;
import org.apache.lucene.analysis.standard.StandardFilterFactory;
import org.apache.lucene.analysis.standard.StandardTokenizerFactory;
import org.hibernate.search.annotations.Analyze;
import org.hibernate.search.annotations.Analyzer;
import org.hibernate.search.annotations.AnalyzerDef;
import org.hibernate.search.annotations.AnalyzerDefs;
import org.hibernate.search.annotations.ContainedIn;
import org.hibernate.search.annotations.Field;
import org.hibernate.search.annotations.Fields;
import org.hibernate.search.annotations.Indexed;
import org.hibernate.search.annotations.Parameter;
import org.hibernate.search.annotations.Store;
import org.hibernate.search.annotations.TokenFilterDef;
import org.hibernate.search.annotations.TokenizerDef;
//@formatter:off
@Embeddable
@Entity
@Table(name = "HFJ_SPIDX_STRING"/* , indexes= {@Index(name="IDX_SP_STRING", columnList="SP_VALUE_NORMALIZED")} */)
@org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_STRING", indexes = {
@org.hibernate.annotations.Index(name = "IDX_SP_STRING", columnNames = { "RES_TYPE", "SP_NAME", "SP_VALUE_NORMALIZED" })
})
@Indexed()
//@AnalyzerDefs({
// @AnalyzerDef(name = "autocompleteEdgeAnalyzer",
// tokenizer = @TokenizerDef(factory = PatternTokenizerFactory.class, params= {
// @Parameter(name="pattern", value="(.*)"),
// @Parameter(name="group", value="1")
// }),
// filters = {
// @TokenFilterDef(factory = LowerCaseFilterFactory.class),
// @TokenFilterDef(factory = StopFilterFactory.class),
// @TokenFilterDef(factory = EdgeNGramFilterFactory.class, params = {
// @Parameter(name = "minGramSize", value = "3"),
// @Parameter(name = "maxGramSize", value = "50")
// }),
// }),
// @AnalyzerDef(name = "autocompletePhoneticAnalyzer",
// tokenizer = @TokenizerDef(factory=StandardTokenizerFactory.class),
// filters = {
// @TokenFilterDef(factory=StandardFilterFactory.class),
// @TokenFilterDef(factory=StopFilterFactory.class),
// @TokenFilterDef(factory=PhoneticFilterFactory.class, params = {
// @Parameter(name="encoder", value="DoubleMetaphone")
// }),
// @TokenFilterDef(factory=SnowballPorterFilterFactory.class, params = {
// @Parameter(name="language", value="English")
// })
// }),
// @AnalyzerDef(name = "autocompleteNGramAnalyzer",
// tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
// filters = {
// @TokenFilterDef(factory = WordDelimiterFilterFactory.class),
// @TokenFilterDef(factory = LowerCaseFilterFactory.class),
// @TokenFilterDef(factory = NGramFilterFactory.class, params = {
// @Parameter(name = "minGramSize", value = "3"),
// @Parameter(name = "maxGramSize", value = "20")
// }),
// }),
// @AnalyzerDef(name = "standardAnalyzer",
// tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
// filters = {
// @TokenFilterDef(factory = LowerCaseFilterFactory.class),
// }) // Def
// }
//)
//@formatter:on
public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchParam {
/*
@ -51,6 +117,12 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
private static final long serialVersionUID = 1L;
@Column(name = "SP_VALUE_EXACT", length = MAX_LENGTH, nullable = true)
@Fields({
@Field(name = "myValueText", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "standardAnalyzer")),
@Field(name = "myValueTextEdgeNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteEdgeAnalyzer")),
@Field(name = "myValueTextNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteNGramAnalyzer")),
@Field(name = "myValueTextPhonetic", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompletePhoneticAnalyzer"))
})
private String myValueExact;
@Column(name = "SP_VALUE_NORMALIZED", length = MAX_LENGTH, nullable = true)
@ -129,4 +201,5 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
b.append("value", getValueNormalized());
return b.build();
}
}

View File

@ -48,7 +48,6 @@ import org.apache.lucene.analysis.ngram.EdgeNGramFilterFactory;
import org.apache.lucene.analysis.ngram.NGramFilterFactory;
import org.apache.lucene.analysis.pattern.PatternTokenizerFactory;
import org.apache.lucene.analysis.phonetic.PhoneticFilterFactory;
import org.apache.lucene.analysis.shingle.ShingleFilterFactory;
import org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory;
import org.apache.lucene.analysis.standard.StandardFilterFactory;
import org.apache.lucene.analysis.standard.StandardTokenizerFactory;
@ -86,11 +85,6 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
@Parameter(name="group", value="1")
}),
filters = {
// @TokenFilterDef(factory = PatternReplaceFilterFactory.class, params = {
// @Parameter(name = "pattern",value = "([^a-zA-Z0-9\\.])"),
// @Parameter(name = "replacement", value = " "),
// @Parameter(name = "replace", value = "all")
// }),
@TokenFilterDef(factory = LowerCaseFilterFactory.class),
@TokenFilterDef(factory = StopFilterFactory.class),
@TokenFilterDef(factory = EdgeNGramFilterFactory.class, params = {
@ -119,22 +113,11 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
@Parameter(name = "minGramSize", value = "3"),
@Parameter(name = "maxGramSize", value = "20")
}),
// @TokenFilterDef(factory = PatternReplaceFilterFactory.class, params = {
// @Parameter(name = "pattern",value = "([^a-zA-Z0-9\\.])"),
// @Parameter(name = "replacement", value = " "),
// @Parameter(name = "replace", value = "all")
// })
}),
@AnalyzerDef(name = "standardAnalyzer",
tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
filters = {
// @TokenFilterDef(factory = WordDelimiterFilterFactory.class),
@TokenFilterDef(factory = LowerCaseFilterFactory.class),
// @TokenFilterDef(factory = PatternReplaceFilterFactory.class, params = {
// @Parameter(name = "pattern", value = "([^a-zA-Z0-9\\.])"),
// @Parameter(name = "replacement", value = " "),
// @Parameter(name = "replace", value = "all")
// })
}) // Def
}
)

View File

@ -191,6 +191,7 @@ public abstract class BaseJpaDstu2Test extends BaseJpaTest {
public void beforeFlushFT() {
FullTextEntityManager ftem = Search.getFullTextEntityManager(myEntityManager);
ftem.purgeAll(ResourceTable.class);
ftem.purgeAll(ResourceIndexedSearchParamString.class);
ftem.flushToIndexes();
myDaoConfig.setSchedulingDisabled(true);

View File

@ -185,6 +185,7 @@ public abstract class BaseJpaDstu21Test extends BaseJpaTest {
public void beforeFlushFT() {
FullTextEntityManager ftem = Search.getFullTextEntityManager(myEntityManager);
ftem.purgeAll(ResourceTable.class);
ftem.purgeAll(ResourceIndexedSearchParamString.class);
ftem.flushToIndexes();
myDaoConfig.setSchedulingDisabled(true);

View File

@ -14,25 +14,86 @@ import java.util.List;
import javax.servlet.http.HttpServletRequest;
import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.Ignore;
import org.junit.Test;
import ca.uhn.fhir.jpa.dao.FhirSearchDao.Suggestion;
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
import ca.uhn.fhir.model.dstu21.composite.QuantityDt;
import ca.uhn.fhir.model.dstu21.resource.Device;
import ca.uhn.fhir.model.dstu21.resource.Media;
import ca.uhn.fhir.model.dstu21.resource.Observation;
import ca.uhn.fhir.model.dstu21.resource.Patient;
import ca.uhn.fhir.model.dstu21.valueset.ObservationStatusEnum;
import ca.uhn.fhir.model.primitive.Base64BinaryDt;
import ca.uhn.fhir.model.primitive.StringDt;
import ca.uhn.fhir.rest.param.StringAndListParam;
import ca.uhn.fhir.rest.param.StringOrListParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.server.Constants;
public class FhirResourceDaoDstu21SearchFtTest extends BaseJpaDstu21Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu21SearchFtTest.class);
@Test
@Ignore
public void testCodeTextSearch() {
Observation obs1 = new Observation();
obs1.getCode().setText("Systolic Blood Pressure");
obs1.setStatus(ObservationStatusEnum.FINAL);
obs1.setValue(new QuantityDt(123));
obs1.setComments("obs1");
IIdType id1 = myObservationDao.create(obs1).getId().toUnqualifiedVersionless();
Observation obs2 = new Observation();
obs2.getCode().setText("Diastolic Blood Pressure");
obs2.setStatus(ObservationStatusEnum.FINAL);
obs2.setValue(new QuantityDt(81));
IIdType id2 = myObservationDao.create(obs2).getId().toUnqualifiedVersionless();
SearchParameterMap map;
map = new SearchParameterMap();
map.add(Observation.SP_CODE, new TokenParam(null, "blood").setText(true));
assertThat(toUnqualifiedVersionlessIds(myObservationDao.search(map)), containsInAnyOrder(id1, id2));
map = new SearchParameterMap();
map.add(Observation.SP_CODE, new TokenParam(null, "blood").setText(true));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), empty());
map = new SearchParameterMap();
map.add(Observation.SP_CODE, new TokenParam(null, "blood").setText(true));
map.add(Constants.PARAM_CONTENT, new StringParam("obs1"));
assertThat(toUnqualifiedVersionlessIds(myObservationDao.search(map)), containsInAnyOrder(id1));
}
@Test
@Ignore
public void testStringTextSearch() {
Observation obs1 = new Observation();
obs1.getCode().setText("AAAAA");
obs1.setValue(new StringDt("Systolic Blood Pressure"));
obs1.setStatus(ObservationStatusEnum.FINAL);
IIdType id1 = myObservationDao.create(obs1).getId().toUnqualifiedVersionless();
Observation obs2 = new Observation();
obs1.getCode().setText("AAAAA");
obs1.setValue(new StringDt("Diastolic Blood Pressure"));
obs2.setStatus(ObservationStatusEnum.FINAL);
IIdType id2 = myObservationDao.create(obs2).getId().toUnqualifiedVersionless();
SearchParameterMap map;
map = new SearchParameterMap();
map.add(Observation.SP_VALUE_STRING, new StringParam("sure").setContains(true));
assertThat(toUnqualifiedVersionlessIds(myObservationDao.search(map)), containsInAnyOrder(id1, id2));
}
@Test
public void testSuggestIgnoresBase64Content() {
Patient patient = new Patient();

View File

@ -36,6 +36,18 @@ public class StringParameterTest {
private static Server ourServer;
@Test
public void testContains() {
StringParam sp = new StringParam("VAL");
sp.setContains(true);
assertEquals(":contains", sp.getQueryParameterQualifier());
sp = new StringParam("VAL");
sp.setValueAsQueryToken(":contains", "VAL");
assertEquals(true, sp.isContains());
assertEquals("VAL", sp.getValue());
}
@Test
public void testRawString() throws Exception {
{

View File

@ -55,7 +55,7 @@ public class ServerProfileProvider implements IResourceProvider {
@Read()
public StructureDefinition getProfileById(HttpServletRequest theRequest, @IdParam IdDt theId) {
RuntimeResourceDefinition retVal = myContext.getResourceDefinitionById(theId.getValue());
RuntimeResourceDefinition retVal = myContext.getResourceDefinitionById(theId.getIdPart());
if (retVal==null) {
return null;
}

View File

@ -55,7 +55,7 @@ public class ServerProfileProvider implements IResourceProvider {
@Read()
public StructureDefinition getProfileById(HttpServletRequest theRequest, @IdParam IdDt theId) {
RuntimeResourceDefinition retVal = myContext.getResourceDefinitionById(theId.getValue());
RuntimeResourceDefinition retVal = myContext.getResourceDefinitionById(theId.getIdPart());
if (retVal==null) {
return null;
}

View File

@ -56,7 +56,7 @@ public class ServerProfileProvider implements IResourceProvider {
@Read()
public StructureDefinition getProfileById(HttpServletRequest theRequest, @IdParam IdDt theId) {
RuntimeResourceDefinition retVal = myContext.getResourceDefinitionById(theId.getValue());
RuntimeResourceDefinition retVal = myContext.getResourceDefinitionById(theId.getIdPart());
if (retVal==null) {
return null;
}

View File

@ -742,6 +742,7 @@
<redirectTestOutputToFile>true</redirectTestOutputToFile>
<runOrder>random</runOrder>
<argLine>-Dfile.encoding=UTF-8</argLine>
<!--<reuseForks>false</reuseForks>-->
</configuration>
</plugin>
<plugin>