Merge branch 'master' of github.com:jamesagnew/hapi-fhir

This commit is contained in:
James Agnew 2015-10-13 12:13:43 -04:00
commit 0430eb1626
96 changed files with 2758 additions and 1246 deletions

1
.gitignore vendored
View File

@ -14,6 +14,7 @@ nohup.out
.DS_Store .DS_Store
*.orig *.orig
tmp.txt tmp.txt
*.hprof
# Vagrant stuff. # Vagrant stuff.
.vagrant .vagrant

View File

@ -56,6 +56,12 @@
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>slf4j-android</artifactId> <artifactId>slf4j-android</artifactId>
</dependency> </dependency>
<dependency>
<groupId>net.sourceforge.cobertura</groupId>
<artifactId>cobertura</artifactId>
<version>2.1.1</version>
<optional>true</optional>
</dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId> <artifactId>slf4j-api</artifactId>
@ -74,7 +80,11 @@
</dependency> </dependency>
<!-- Android does not come with the Servlet API bundled, and MethodUtil <!-- Android does not come with the Servlet API bundled, and MethodUtil
requires it --> requires it.
We provide a dummy implementation of servlet api to reduce size
and prevent from rewriting the BaseMethodBinding and friends.
-->
<dependency> <dependency>
<groupId>javax.servlet</groupId> <groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId> <artifactId>javax.servlet-api</artifactId>
@ -120,138 +130,134 @@
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId> <artifactId>maven-shade-plugin</artifactId>
<version>2.3</version> <version>2.3</version>
<configuration>
<createDependencyReducedPom>true</createDependencyReducedPom>
<createSourcesJar>true</createSourcesJar>
<shadedArtifactAttached>true</shadedArtifactAttached>
<artifactSet>
<includes combine.children="append">
<include>ca.uhn.hapi.fhir:hapi-fhir-base</include>
<include>org.glassfish:javax.json</include>
<include>org.codehaus.woodstox:woodstox-core-asl</include>
<include>javax.xml.stream:stax-api</include>
<include>org.codehaus.woodstox:stax2-api</include>
<include>org.glassfish:javax.json</include>
<include>net.sourceforge.cobertura:cobertura</include>
<!--
<include>org.apache.commons:*</include>
<include>org.apache.httpcomponents:*</include>
<include>commons-codec:commons-codec</include>
-->
<include>javax.servlet:javax.servlet-api</include>
</includes>
</artifactSet>
<relocations>
<relocation>
<pattern>javax.xml.stream</pattern>
<shadedPattern>ca.uhn.fhir.repackage.javax.xml.stream</shadedPattern>
</relocation>
<relocation>
<pattern>javax.json</pattern>
<shadedPattern>ca.uhn.fhir.repackage.javax.json</shadedPattern>
</relocation>
</relocations>
<filters combine.children="append">
<!-- Make CoverageIgnore annotation available for android. -->
<filter>
<artifact>net.sourceforge.cobertura:cobertura</artifact>
<includes>
<include>net/sourceforge/cobertura/CoverageIgnore*</include>
</includes>
</filter>
<!-- Exclude server side stuff, except exceptions which are used clientside -->
<filter>
<artifact>ca.uhn.hapi.fhir:hapi-fhir-base</artifact>
<excludes>
<!-- Exclude test stuff which depends on httpservlet -->
<exclude>ca/uhn/fhir/util/ITestingUiClientFactory</exclude>
<!-- Exclude server side stuff. This could be made nicer if code was divided into shared, server and client -->
<!--exclude>ca/uhn/fhir/rest/server/Add*</exclude>
<exclude>ca/uhn/fhir/rest/server/Apache*</exclude>
<exclude>ca/uhn/fhir/rest/server/Bundle*</exclude>
<exclude>ca/uhn/fhir/rest/server/ETag*</exclude>
<exclude>ca/uhn/fhir/rest/server/Fifo*</exclude>
<exclude>ca/uhn/fhir/rest/server/Hard*</exclude>
<exclude>ca/uhn/fhir/rest/server/IBundle*</exclude>
<exclude>ca/uhn/fhir/rest/server/IDynamic*</exclude>
<exclude>ca/uhn/fhir/rest/server/Incoming*</exclude>
<exclude>ca/uhn/fhir/rest/server/IPaging*</exclude>
<exclude>ca/uhn/fhir/rest/server/IServerAdd*</exclude>
<exclude>ca/uhn/fhir/rest/server/Resource*</exclude>
<exclude>ca/uhn/fhir/rest/server/Rest*</exclude>
<exclude>ca/uhn/fhir/rest/server/Search*</exclude>
<exclude>ca/uhn/fhir/rest/server/Simple*</exclude>
<exclude>ca/uhn/fhir/rest/server/audit/**</exclude>
<exclude>ca/uhn/fhir/rest/server/interceptor/**</exclude-->
<!-- Exclude Phloc validator to make android libs small -->
<exclude>ca/uhn/fhir/validation/schematron/SchematronBaseValidator*</exclude>
<!-- Exclude Thymeleaf to make android libs small -->
<exclude>ca/uhn/fhir/narrative/*Thymeleaf*</exclude>
</excludes>
</filter>
</filters>
</configuration>
<executions> <executions>
<!-- shaded jar -->
<execution> <execution>
<id>normal</id> <id>shaded</id>
<phase>package</phase> <phase>package</phase>
<goals> <goals>
<goal>shade</goal> <goal>shade</goal>
</goals> </goals>
<configuration> </execution>
<createDependencyReducedPom>true</createDependencyReducedPom> <!-- dstu jar -->
<createSourcesJar>true</createSourcesJar>
<shadedArtifactAttached>true</shadedArtifactAttached>
<artifactSet>
<includes>
<include>ca.uhn.hapi.fhir:hapi-fhir-base</include>
<include>org.glassfish:javax.json</include>
<include>org.codehaus.woodstox:woodstox-core-asl</include>
<include>javax.xml.stream:stax-api</include>
<include>org.codehaus.woodstox:stax2-api</include>
<include>org.glassfish:javax.json</include>
</includes>
</artifactSet>
<relocations>
<relocation>
<pattern>javax.xml.stream</pattern>
<shadedPattern>ca.uhn.fhir.repackage.javax.xml.stream</shadedPattern>
</relocation>
<relocation>
<pattern>javax.json</pattern>
<shadedPattern>ca.uhn.fhir.repackage.javax.json</shadedPattern>
</relocation>
</relocations>
<filters>
<filter>
<artifact>ca.uhn.hapi.fhir:hapi-fhir-base</artifact>
<excludes>
<exclude>**/*.java</exclude>
</excludes>
</filter>
</filters>
</configuration>
</execution>
<execution> <execution>
<id>dstu</id> <id>dstu</id>
<phase>package</phase> <phase>package</phase>
<goals> <goals>
<goal>shade</goal> <goal>shade</goal>
</goals> </goals>
<configuration> <configuration>
<createDependencyReducedPom>true</createDependencyReducedPom>
<createSourcesJar>true</createSourcesJar>
<shadedArtifactAttached>true</shadedArtifactAttached>
<shadedClassifierName>dstu</shadedClassifierName> <shadedClassifierName>dstu</shadedClassifierName>
<artifactSet> <artifactSet>
<includes> <includes>
<!--
<include>commons-codec:commons-codec</include>
-->
<include>ca.uhn.hapi.fhir:hapi-fhir-base</include>
<include>ca.uhn.hapi.fhir:hapi-fhir-structures-dstu</include> <include>ca.uhn.hapi.fhir:hapi-fhir-structures-dstu</include>
<include>org.glassfish:javax.json</include>
<include>org.codehaus.woodstox:woodstox-core-asl</include>
<include>javax.xml.stream:stax-api</include>
<!-- <include>javax.servlet:javax.servlet-api</include>-->
<include>org.codehaus.woodstox:stax2-api</include>
<!-- <include>org.slf4j:slf4j*</include> -->
<!--
<include>org.apache.commons:*</include>
<include>org.apache.httpcomponents:*</include>
-->
<include>org.glassfish:javax.json</include>
</includes> </includes>
</artifactSet> </artifactSet>
<relocations> <!-- Exclude server side stuff, except exceptions which are used clientside -->
<relocation> <!--filters>
<pattern>javax.xml.stream</pattern>
<shadedPattern>ca.uhn.fhir.repackage.javax.xml.stream</shadedPattern>
</relocation>
<relocation>
<pattern>javax.json</pattern>
<shadedPattern>ca.uhn.fhir.repackage.javax.json</shadedPattern>
</relocation>
</relocations>
<filters>
<filter> <filter>
<artifact>ca.uhn.hapi.fhir:hapi-fhir-base</artifact> <artifact>ca.uhn.hapi.fhir:hapi-fhir-structures-dstu</artifact>
<excludes> <excludes>
<exclude>**/*.java</exclude> <exclude>ca/uhn/fhir/rest/server/**</exclude>
</excludes> </excludes>
</filter> </filter>
</filters> </filters-->
</configuration> </configuration>
</execution> </execution>
<!-- dstu2 jar -->
<execution> <execution>
<id>dstu2</id> <id>dstu2</id>
<phase>package</phase> <phase>package</phase>
<goals> <goals>
<goal>shade</goal> <goal>shade</goal>
</goals> </goals>
<configuration> <configuration>
<createDependencyReducedPom>true</createDependencyReducedPom>
<createSourcesJar>true</createSourcesJar>
<shadedArtifactAttached>true</shadedArtifactAttached>
<shadedClassifierName>dstu2</shadedClassifierName> <shadedClassifierName>dstu2</shadedClassifierName>
<artifactSet> <artifactSet>
<includes> <includes>
<!--
<include>commons-codec:commons-codec</include>
-->
<include>ca.uhn.hapi.fhir:hapi-fhir-base</include>
<include>ca.uhn.hapi.fhir:hapi-fhir-structures-dstu2</include> <include>ca.uhn.hapi.fhir:hapi-fhir-structures-dstu2</include>
<include>org.glassfish:javax.json</include>
<include>org.codehaus.woodstox:woodstox-core-asl</include>
<include>javax.xml.stream:stax-api</include>
<!-- <include>javax.servlet:javax.servlet-api</include>-->
<include>org.codehaus.woodstox:stax2-api</include>
<!--
<include>org.apache.commons:*</include>
<include>org.apache.httpcomponents:*</include>
-->
<include>org.glassfish:javax.json</include>
</includes> </includes>
</artifactSet> </artifactSet>
<relocations> <!-- Exclude server side stuff, except exceptions which are used clientside -->
<relocation> <!--filters>
<pattern>javax.xml.stream</pattern> <filter>
<shadedPattern>ca.uhn.fhir.repackage.javax.xml.stream</shadedPattern> <artifact>ca.uhn.hapi.fhir:hapi-fhir-structures-dstu2</artifact>
</relocation> <excludes>
<relocation> <exclude>ca/uhn/fhir/rest/server/**</exclude>
<pattern>javax.json</pattern> </excludes>
<shadedPattern>ca.uhn.fhir.repackage.javax.json</shadedPattern> </filter>
</relocation> </filters-->
</relocations>
</configuration> </configuration>
</execution> </execution>
</executions> </executions>

View File

@ -132,6 +132,20 @@
</dependencies> </dependencies>
<build> <build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
<resources> <resources>
<resource> <resource>
<directory>src/main/resources</directory> <directory>src/main/resources</directory>

View File

@ -800,8 +800,14 @@ public class JsonParser extends BaseParser implements IParser {
if (theResource instanceof IBaseBinary) { if (theResource instanceof IBaseBinary) {
IBaseBinary bin = (IBaseBinary) theResource; IBaseBinary bin = (IBaseBinary) theResource;
theEventWriter.write("contentType", bin.getContentType()); String contentType = bin.getContentType();
theEventWriter.write("content", bin.getContentAsBase64()); if (isNotBlank(contentType)) {
theEventWriter.write("contentType", contentType);
}
String contentAsBase64 = bin.getContentAsBase64();
if (isNotBlank(contentAsBase64)) {
theEventWriter.write("content", contentAsBase64);
}
} else { } else {
encodeCompositeElementToStreamWriter(theResDef, theResource, theResource, theEventWriter, resDef, theContainedResource, new CompositeChildElement(resDef)); encodeCompositeElementToStreamWriter(theResDef, theResource, theResource, theEventWriter, resDef, theContainedResource, new CompositeChildElement(resDef));
} }

View File

@ -58,20 +58,6 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
setRangeFromDatesInclusive(theLowerBound, theUpperBound); setRangeFromDatesInclusive(theLowerBound, theUpperBound);
} }
/**
* Constructor which takes two Dates representing the lower and upper bounds of the range (inclusive on both ends)
*
* @param theLowerBound
* A qualified date param representing the lower date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
* @param theUpperBound
* A qualified date param representing the upper date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
*/
public DateRangeParam(DateTimeDt theLowerBound, DateTimeDt theUpperBound) {
setRangeFromDatesInclusive(theLowerBound, theUpperBound);
}
/** /**
* Sets the range from a single date param. If theDateParam has no qualifier, treats it as the lower and upper bound * Sets the range from a single date param. If theDateParam has no qualifier, treats it as the lower and upper bound
* (e.g. 2011-01-02 would match any time on that day). If theDateParam has a qualifier, treats it as either the * (e.g. 2011-01-02 would match any time on that day). If theDateParam has a qualifier, treats it as either the
@ -106,6 +92,20 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
validateAndThrowDataFormatExceptionIfInvalid(); validateAndThrowDataFormatExceptionIfInvalid();
} }
/**
* Constructor which takes two Dates representing the lower and upper bounds of the range (inclusive on both ends)
*
* @param theLowerBound
* A qualified date param representing the lower date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
* @param theUpperBound
* A qualified date param representing the upper date bound (optionally may include time), e.g.
* "2011-02-22" or "2011-02-22T13:12:00Z". Will be treated inclusively. Either theLowerBound or theUpperBound may both be populated, or one may be null, but it is not valid for both to be null.
*/
public DateRangeParam(DateTimeDt theLowerBound, DateTimeDt theUpperBound) {
setRangeFromDatesInclusive(theLowerBound, theUpperBound);
}
/** /**
* Constructor which takes two strings representing the lower and upper bounds of the range (inclusive on both ends) * Constructor which takes two strings representing the lower and upper bounds of the range (inclusive on both ends)
* *
@ -120,6 +120,39 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
setRangeFromDatesInclusive(theLowerBound, theUpperBound); setRangeFromDatesInclusive(theLowerBound, theUpperBound);
} }
private void addParam(DateParam theParsed) throws InvalidRequestException {
if (theParsed.getComparator() == null) {
if (myLowerBound != null || myUpperBound != null) {
throw new InvalidRequestException("Can not have multiple date range parameters for the same param without a qualifier");
}
myLowerBound = new DateParam(QuantityCompararatorEnum.GREATERTHAN_OR_EQUALS, theParsed.getValueAsString());
myUpperBound = new DateParam(QuantityCompararatorEnum.LESSTHAN_OR_EQUALS, theParsed.getValueAsString());
} else {
switch (theParsed.getComparator()) {
case GREATERTHAN:
case GREATERTHAN_OR_EQUALS:
if (myLowerBound != null) {
throw new InvalidRequestException("Can not have multiple date range parameters for the same param that specify a lower bound");
}
myLowerBound = theParsed;
break;
case LESSTHAN:
case LESSTHAN_OR_EQUALS:
if (myUpperBound != null) {
throw new InvalidRequestException("Can not have multiple date range parameters for the same param that specify an upper bound");
}
myUpperBound = theParsed;
break;
default:
throw new InvalidRequestException("Unknown comparator: " + theParsed.getComparator());
}
}
}
public DateParam getLowerBound() { public DateParam getLowerBound() {
return myLowerBound; return myLowerBound;
} }
@ -182,6 +215,18 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
return retVal; return retVal;
} }
private boolean haveLowerBound() {
return myLowerBound != null && myLowerBound.isEmpty() == false;
}
private boolean haveUpperBound() {
return myUpperBound != null && myUpperBound.isEmpty() == false;
}
public boolean isEmpty() {
return (getLowerBoundAsInstant() == null) && (getUpperBoundAsInstant() == null);
}
public void setLowerBound(DateParam theLowerBound) { public void setLowerBound(DateParam theLowerBound) {
myLowerBound = theLowerBound; myLowerBound = theLowerBound;
validateAndThrowDataFormatExceptionIfInvalid(); validateAndThrowDataFormatExceptionIfInvalid();
@ -275,37 +320,32 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
} }
private void addParam(DateParam theParsed) throws InvalidRequestException { @Override
if (theParsed.getComparator() == null) { public String toString() {
if (myLowerBound != null || myUpperBound != null) { StringBuilder b = new StringBuilder();
throw new InvalidRequestException("Can not have multiple date range parameters for the same param without a qualifier"); b.append(getClass().getSimpleName());
b.append("[");
if (haveLowerBound()) {
if (myLowerBound.getComparator() != null) {
b.append(myLowerBound.getComparator().getCode());
} }
b.append(myLowerBound.getValueAsString());
myLowerBound = new DateParam(QuantityCompararatorEnum.GREATERTHAN_OR_EQUALS, theParsed.getValueAsString());
myUpperBound = new DateParam(QuantityCompararatorEnum.LESSTHAN_OR_EQUALS, theParsed.getValueAsString());
} else {
switch (theParsed.getComparator()) {
case GREATERTHAN:
case GREATERTHAN_OR_EQUALS:
if (myLowerBound != null) {
throw new InvalidRequestException("Can not have multiple date range parameters for the same param that specify a lower bound");
}
myLowerBound = theParsed;
break;
case LESSTHAN:
case LESSTHAN_OR_EQUALS:
if (myUpperBound != null) {
throw new InvalidRequestException("Can not have multiple date range parameters for the same param that specify an upper bound");
}
myUpperBound = theParsed;
break;
default:
throw new InvalidRequestException("Unknown comparator: " + theParsed.getComparator());
}
} }
if (haveUpperBound()) {
if(haveLowerBound()) {
b.append(" ");
}
if (myUpperBound.getComparator() != null) {
b.append(myUpperBound.getComparator().getCode());
}
b.append(myUpperBound.getValueAsString());
} else {
if (!haveLowerBound()) {
b.append("empty");
}
}
b.append("]");
return b.toString();
} }
private void validateAndThrowDataFormatExceptionIfInvalid() { private void validateAndThrowDataFormatExceptionIfInvalid() {
@ -354,40 +394,4 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
} }
private boolean haveUpperBound() {
return myUpperBound != null && myUpperBound.isEmpty() == false;
}
private boolean haveLowerBound() {
return myLowerBound != null && myLowerBound.isEmpty() == false;
}
@Override
public String toString() {
StringBuilder b = new StringBuilder();
b.append(getClass().getSimpleName());
b.append("[");
if (haveLowerBound()) {
if (myLowerBound.getComparator() != null) {
b.append(myLowerBound.getComparator().getCode());
}
b.append(myLowerBound.getValueAsString());
}
if (haveUpperBound()) {
if(haveLowerBound()) {
b.append(" ");
}
if (myUpperBound.getComparator() != null) {
b.append(myUpperBound.getComparator().getCode());
}
b.append(myUpperBound.getValueAsString());
} else {
if (!haveLowerBound()) {
b.append("empty");
}
}
b.append("]");
return b.toString();
}
} }

View File

@ -147,6 +147,8 @@ public class Constants {
public static final String TAG_SUBSETTED_SYSTEM = "http://hl7.org/fhir/v3/ObservationValue"; public static final String TAG_SUBSETTED_SYSTEM = "http://hl7.org/fhir/v3/ObservationValue";
public static final String URL_TOKEN_HISTORY = "_history"; public static final String URL_TOKEN_HISTORY = "_history";
public static final String URL_TOKEN_METADATA = "metadata"; public static final String URL_TOKEN_METADATA = "metadata";
public static final String PARAM_CONTENT = "_content";
public static final String PARAM_TEXT = "_text";
static { static {
Map<String, EncodingEnum> valToEncoding = new HashMap<String, EncodingEnum>(); Map<String, EncodingEnum> valToEncoding = new HashMap<String, EncodingEnum>();

View File

@ -20,49 +20,37 @@ package ca.uhn.fhir.util;
* #L% * #L%
*/ */
import java.beans.BeanInfo;
import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import ca.uhn.fhir.util.reflection.IBeanUtils;
public class BeanUtils { public class BeanUtils {
public static Method findAccessor(Class<?> theClassToIntrospect, Class<?> theTargetReturnType, String thePropertyName) throws NoSuchFieldException { private static IBeanUtils beanUtils;
BeanInfo info;
try { private static IBeanUtils getBeanUtils() {
info = Introspector.getBeanInfo(theClassToIntrospect); if (beanUtils == null) {
} catch (IntrospectionException e) { try {
throw new NoSuchFieldException(e.getMessage()); beanUtils = (IBeanUtils) Class.forName("ca.uhn.fhir.util.reflection.JavaBeansBeanUtil").newInstance();
} } catch (ReflectiveOperationException e) {
for (PropertyDescriptor pd : info.getPropertyDescriptors()) { try {
if (thePropertyName.equals(pd.getName())) { beanUtils = (IBeanUtils) Class.forName("ca.uhn.fhir.util.reflection.JavaReflectBeanUtil")
if (theTargetReturnType.isAssignableFrom(pd.getPropertyType())) { .newInstance();
return pd.getReadMethod(); } catch (ReflectiveOperationException e1) {
}else { throw new RuntimeException("Could not resolve BeanUtil implementation");
throw new NoSuchFieldException(theClassToIntrospect + " has an accessor for field " + thePropertyName + " but it does not return type " + theTargetReturnType);
} }
} }
} }
throw new NoSuchFieldException(theClassToIntrospect + " has no accessor for field " + thePropertyName); return beanUtils;
} }
public static Method findMutator(Class<?> theClassToIntrospect, Class<?> theTargetReturnType, String thePropertyName) throws NoSuchFieldException { public static Method findAccessor(Class<?> theClassToIntrospect, Class<?> theTargetReturnType, String thePropertyName)
BeanInfo info; throws NoSuchFieldException {
try { return getBeanUtils().findAccessor(theClassToIntrospect, theTargetReturnType, thePropertyName);
info = Introspector.getBeanInfo(theClassToIntrospect);
} catch (IntrospectionException e) {
throw new NoSuchFieldException(e.getMessage());
}
for (PropertyDescriptor pd : info.getPropertyDescriptors()) {
if (thePropertyName.equals(pd.getName())) {
if (theTargetReturnType.isAssignableFrom(pd.getPropertyType())) {
return pd.getWriteMethod();
}else {
throw new NoSuchFieldException(theClassToIntrospect + " has an mutator for field " + thePropertyName + " but it does not return type " + theTargetReturnType);
}
}
}
throw new NoSuchFieldException(theClassToIntrospect + " has no mutator for field " + thePropertyName);
} }
public static Method findMutator(Class<?> theClassToIntrospect, Class<?> theTargetReturnType, String thePropertyName)
throws NoSuchFieldException {
return getBeanUtils().findMutator(theClassToIntrospect, theTargetReturnType, thePropertyName);
}
} }

View File

@ -1,5 +1,7 @@
package ca.uhn.fhir.util; package ca.uhn.fhir.util;
import org.apache.commons.lang3.StringUtils;
/* /*
* #%L * #%L
* HAPI FHIR - Core Library * HAPI FHIR - Core Library
@ -32,4 +34,16 @@ public class ObjectUtil {
return object1.equals(object2); return object1.equals(object2);
} }
public static <T> T requireNonNull(T obj, String message) {
if (obj == null)
throw new NullPointerException(message);
return obj;
}
public static void requireNotEmpty(String str, String message) {
if (StringUtils.isBlank(str)) {
throw new IllegalArgumentException(message);
}
}
} }

View File

@ -21,20 +21,15 @@ package ca.uhn.fhir.util;
*/ */
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream; import java.io.OutputStream;
import java.io.OutputStreamWriter; import java.io.OutputStreamWriter;
import java.io.Reader; import java.io.Reader;
import java.io.StringWriter; import java.io.StringWriter;
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
import java.io.Writer; import java.io.Writer;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.jar.Attributes;
import java.util.jar.Manifest;
import javax.xml.stream.FactoryConfigurationError; import javax.xml.stream.FactoryConfigurationError;
import javax.xml.stream.XMLEventReader; import javax.xml.stream.XMLEventReader;
@ -52,18 +47,15 @@ import org.codehaus.stax2.io.EscapingWriterFactory;
import com.ctc.wstx.api.WstxInputProperties; import com.ctc.wstx.api.WstxInputProperties;
import com.ctc.wstx.stax.WstxOutputFactory; import com.ctc.wstx.stax.WstxOutputFactory;
import ca.uhn.fhir.util.jar.DependencyLogFactory;
import ca.uhn.fhir.util.jar.IDependencyLog;
/** /**
* Utility methods for working with the StAX API. * Utility methods for working with the StAX API.
* *
* This class contains code adapted from the Apache Axiom project. * This class contains code adapted from the Apache Axiom project.
*/ */
public class XmlUtil { public class XmlUtil {
private static final Attributes.Name BUNDLE_SYMBOLIC_NAME = new Attributes.Name("Bundle-SymbolicName");
private static final Attributes.Name BUNDLE_VENDOR = new Attributes.Name("Bundle-Vendor");
private static final Attributes.Name BUNDLE_VERSION = new Attributes.Name("Bundle-Version");
private static final Attributes.Name IMPLEMENTATION_TITLE = new Attributes.Name("Implementation-Title");
private static final Attributes.Name IMPLEMENTATION_VENDOR = new Attributes.Name("Implementation-Vendor");
private static final Attributes.Name IMPLEMENTATION_VERSION = new Attributes.Name("Implementation-Version");
private static volatile boolean ourHaveLoggedStaxImplementation; private static volatile boolean ourHaveLoggedStaxImplementation;
private static volatile XMLInputFactory ourInputFactory; private static volatile XMLInputFactory ourInputFactory;
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(XmlUtil.class); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(XmlUtil.class);
@ -1645,73 +1637,12 @@ public class XmlUtil {
return outputFactory; return outputFactory;
} }
private static URL getRootUrlForClass(Class<?> cls) {
ClassLoader classLoader = cls.getClassLoader();
String resource = cls.getName().replace('.', '/') + ".class";
if (classLoader == null) {
// A null class loader means the bootstrap class loader. In this case we use the
// system class loader. This is safe since we can assume that the system class
// loader uses parent first as delegation policy.
classLoader = ClassLoader.getSystemClassLoader();
}
URL url = classLoader.getResource(resource);
if (url == null) {
return null;
}
String file = url.getFile();
if (file.endsWith(resource)) {
try {
return new URL(url.getProtocol(), url.getHost(), url.getPort(), file.substring(0, file.length() - resource.length()));
} catch (MalformedURLException ex) {
return null;
}
} else {
return null;
}
}
private static void logStaxImplementation(Class<?> theClass) { private static void logStaxImplementation(Class<?> theClass) {
try { IDependencyLog logger = DependencyLogFactory.createJarLogger();
URL rootUrl = getRootUrlForClass(theClass); if (logger != null) {
if (rootUrl == null) { logger.logStaxImplementation(theClass);
ourLog.info("Unable to determine location of StAX implementation containing class");
} else {
Manifest manifest;
URL metaInfUrl = new URL(rootUrl, "META-INF/MANIFEST.MF");
InputStream is = metaInfUrl.openStream();
try {
manifest = new Manifest(is);
} finally {
is.close();
}
Attributes attrs = manifest.getMainAttributes();
String title = attrs.getValue(IMPLEMENTATION_TITLE);
String symbolicName = attrs.getValue(BUNDLE_SYMBOLIC_NAME);
if (symbolicName != null) {
int i = symbolicName.indexOf(';');
if (i != -1) {
symbolicName = symbolicName.substring(0, i);
}
}
String vendor = attrs.getValue(IMPLEMENTATION_VENDOR);
if (vendor == null) {
vendor = attrs.getValue(BUNDLE_VENDOR);
}
String version = attrs.getValue(IMPLEMENTATION_VERSION);
if (version == null) {
version = attrs.getValue(BUNDLE_VERSION);
}
if (ourLog.isDebugEnabled()) {
ourLog.debug("FHIR XML procesing will use StAX implementation at {}\n Title: {}\n Symbolic name: {}\n Vendor: {}\n Version: {}", new Object[] { rootUrl, title, symbolicName, vendor, version } );
} else {
ourLog.info("FHIR XML procesing will use StAX implementation '{}' version '{}'", title, version);
}
}
} catch (Throwable e) {
ourLog.info("Unable to determine StAX implementation: " + e.getMessage());
} finally {
ourHaveLoggedStaxImplementation = true;
} }
ourHaveLoggedStaxImplementation = true;
} }
public static void main(String[] args) throws FactoryConfigurationError, XMLStreamException { public static void main(String[] args) throws FactoryConfigurationError, XMLStreamException {

View File

@ -0,0 +1,36 @@
package ca.uhn.fhir.util.jar;
/*
* #%L
* HAPI FHIR - Core Library
* %%
* Copyright (C) 2014 - 2015 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public class DependencyLogFactory {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(DependencyLogFactory.class);
@SuppressWarnings("unchecked")
public static IDependencyLog createJarLogger() {
try {
Class<IDependencyLog> clas = (Class<IDependencyLog>) Class.forName("ca.uhn.fhir.util.jar.DependencyLogImpl");
return clas.newInstance();
} catch (ReflectiveOperationException e) {
ourLog.info("Could not log dependency.");
return null;
}
}
}

View File

@ -0,0 +1,107 @@
package ca.uhn.fhir.util.jar;
/*
* #%L
* HAPI FHIR - Core Library
* %%
* Copyright (C) 2014 - 2015 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.jar.Attributes;
import java.util.jar.Manifest;
import ca.uhn.fhir.util.XmlUtil;
public class DependencyLogImpl implements IDependencyLog {
private static final Attributes.Name BUNDLE_SYMBOLIC_NAME = new Attributes.Name("Bundle-SymbolicName");
private static final Attributes.Name BUNDLE_VENDOR = new Attributes.Name("Bundle-Vendor");
private static final Attributes.Name BUNDLE_VERSION = new Attributes.Name("Bundle-Version");
private static final Attributes.Name IMPLEMENTATION_TITLE = new Attributes.Name("Implementation-Title");
private static final Attributes.Name IMPLEMENTATION_VENDOR = new Attributes.Name("Implementation-Vendor");
private static final Attributes.Name IMPLEMENTATION_VERSION = new Attributes.Name("Implementation-Version");
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(XmlUtil.class);
@Override
public void logStaxImplementation(Class<?> theClass) {
try {
URL rootUrl = getRootUrlForClass(theClass);
if (rootUrl == null) {
ourLog.info("Unable to determine location of StAX implementation containing class");
} else {
Manifest manifest;
URL metaInfUrl = new URL(rootUrl, "META-INF/MANIFEST.MF");
InputStream is = metaInfUrl.openStream();
try {
manifest = new Manifest(is);
} finally {
is.close();
}
Attributes attrs = manifest.getMainAttributes();
String title = attrs.getValue(IMPLEMENTATION_TITLE);
String symbolicName = attrs.getValue(BUNDLE_SYMBOLIC_NAME);
if (symbolicName != null) {
int i = symbolicName.indexOf(';');
if (i != -1) {
symbolicName = symbolicName.substring(0, i);
}
}
String vendor = attrs.getValue(IMPLEMENTATION_VENDOR);
if (vendor == null) {
vendor = attrs.getValue(BUNDLE_VENDOR);
}
String version = attrs.getValue(IMPLEMENTATION_VERSION);
if (version == null) {
version = attrs.getValue(BUNDLE_VERSION);
}
if (ourLog.isDebugEnabled()) {
ourLog.debug("FHIR XML procesing will use StAX implementation at {}\n Title: {}\n Symbolic name: {}\n Vendor: {}\n Version: {}", new Object[] { rootUrl, title, symbolicName, vendor, version } );
} else {
ourLog.info("FHIR XML procesing will use StAX implementation '{}' version '{}'", title, version);
}
}
} catch (Throwable e) {
ourLog.info("Unable to determine StAX implementation: " + e.getMessage());
}
}
private static URL getRootUrlForClass(Class<?> cls) {
ClassLoader classLoader = cls.getClassLoader();
String resource = cls.getName().replace('.', '/') + ".class";
if (classLoader == null) {
// A null class loader means the bootstrap class loader. In this case we use the
// system class loader. This is safe since we can assume that the system class
// loader uses parent first as delegation policy.
classLoader = ClassLoader.getSystemClassLoader();
}
URL url = classLoader.getResource(resource);
if (url == null) {
return null;
}
String file = url.getFile();
if (file.endsWith(resource)) {
try {
return new URL(url.getProtocol(), url.getHost(), url.getPort(), file.substring(0, file.length() - resource.length()));
} catch (MalformedURLException ex) {
return null;
}
} else {
return null;
}
}
}

View File

@ -0,0 +1,27 @@
package ca.uhn.fhir.util.jar;
/*
* #%L
* HAPI FHIR - Core Library
* %%
* Copyright (C) 2014 - 2015 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public interface IDependencyLog {
void logStaxImplementation(Class<?> theClass);
}

View File

@ -0,0 +1,31 @@
package ca.uhn.fhir.util.reflection;
import java.lang.reflect.Method;
/*
* #%L
* HAPI FHIR - Core Library
* %%
* Copyright (C) 2014 - 2015 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public interface IBeanUtils {
Method findAccessor(Class<?> theClassToIntrospect, Class<?> theTargetReturnType, String thePropertyName)
throws NoSuchFieldException;
Method findMutator(Class<?> theClassToIntrospect, Class<?> theTargetReturnType, String thePropertyName)
throws NoSuchFieldException;
}

View File

@ -0,0 +1,70 @@
package ca.uhn.fhir.util.reflection;
/*
* #%L
* HAPI FHIR - Core Library
* %%
* Copyright (C) 2014 - 2015 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.beans.BeanInfo;
import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Method;
public class JavaBeansBeanUtil implements IBeanUtils {
@Override
public Method findAccessor(Class<?> theClassToIntrospect, Class<?> theTargetReturnType, String thePropertyName) throws NoSuchFieldException {
BeanInfo info;
try {
info = Introspector.getBeanInfo(theClassToIntrospect);
} catch (IntrospectionException e) {
throw new NoSuchFieldException(e.getMessage());
}
for (PropertyDescriptor pd : info.getPropertyDescriptors()) {
if (thePropertyName.equals(pd.getName())) {
if (theTargetReturnType.isAssignableFrom(pd.getPropertyType())) {
return pd.getReadMethod();
}else {
throw new NoSuchFieldException(theClassToIntrospect + " has an accessor for field " + thePropertyName + " but it does not return type " + theTargetReturnType);
}
}
}
throw new NoSuchFieldException(theClassToIntrospect + " has no accessor for field " + thePropertyName);
}
@Override
public Method findMutator(Class<?> theClassToIntrospect, Class<?> theTargetReturnType, String thePropertyName) throws NoSuchFieldException {
BeanInfo info;
try {
info = Introspector.getBeanInfo(theClassToIntrospect);
} catch (IntrospectionException e) {
throw new NoSuchFieldException(e.getMessage());
}
for (PropertyDescriptor pd : info.getPropertyDescriptors()) {
if (thePropertyName.equals(pd.getName())) {
if (theTargetReturnType.isAssignableFrom(pd.getPropertyType())) {
return pd.getWriteMethod();
}else {
throw new NoSuchFieldException(theClassToIntrospect + " has an mutator for field " + thePropertyName + " but it does not return type " + theTargetReturnType);
}
}
}
throw new NoSuchFieldException(theClassToIntrospect + " has no mutator for field " + thePropertyName);
}
}

View File

@ -0,0 +1,63 @@
package ca.uhn.fhir.util.reflection;
import java.lang.reflect.Method;
import org.apache.commons.lang3.text.WordUtils;
import ca.uhn.fhir.context.ConfigurationException;
/*
* #%L
* HAPI FHIR - Core Library
* %%
* Copyright (C) 2014 - 2015 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public class JavaReflectBeanUtil implements IBeanUtils {
@Override
public Method findAccessor(Class<?> theClassToIntrospect, Class<?> theTargetReturnType, String thePropertyName)
throws NoSuchFieldException {
String methodName = "get" + WordUtils.capitalize(thePropertyName);
try {
Method method = theClassToIntrospect.getMethod(methodName);
if (theTargetReturnType.isAssignableFrom(method.getReturnType())) {
return method;
}
} catch (NoSuchMethodException e) {
// fall through
} catch (SecurityException e) {
throw new ConfigurationException("Failed to scan class '" + theClassToIntrospect + "' because of a security exception", e);
}
throw new NoSuchFieldException(theClassToIntrospect + " has no accessor for field " + thePropertyName);
}
@Override
public Method findMutator(Class<?> theClassToIntrospect, Class<?> theTargetArgumentType, String thePropertyName)
throws NoSuchFieldException {
String methodName = "set" + WordUtils.capitalize(thePropertyName);
try {
return theClassToIntrospect.getMethod(methodName, theTargetArgumentType);
} catch (NoSuchMethodException e) {
//fall through
} catch (SecurityException e) {
throw new ConfigurationException("Failed to scan class '" + theClassToIntrospect + "' because of a security exception", e);
}
throw new NoSuchFieldException(theClassToIntrospect + " has an mutator for field " + thePropertyName + " but it does not return type " + theTargetArgumentType);
}
}

View File

@ -23,9 +23,8 @@ package ca.uhn.fhir.validation;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.thymeleaf.util.Validate;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.util.ObjectUtil;
abstract class BaseValidationContext<T> implements IValidationContext<T> { abstract class BaseValidationContext<T> implements IValidationContext<T> {
@ -38,7 +37,7 @@ abstract class BaseValidationContext<T> implements IValidationContext<T> {
@Override @Override
public void addValidationMessage(SingleValidationMessage theMessage) { public void addValidationMessage(SingleValidationMessage theMessage) {
Validate.notNull(theMessage, "theMessage must not be null"); ObjectUtil.requireNonNull(theMessage, "theMessage must not be null");
myMessages.add(theMessage); myMessages.add(theMessage);
} }

View File

@ -32,6 +32,7 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.model.api.Bundle; import ca.uhn.fhir.model.api.Bundle;
import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.util.OperationOutcomeUtil; import ca.uhn.fhir.util.OperationOutcomeUtil;
import ca.uhn.fhir.validation.schematron.SchematronProvider;
/** /**
* Resource validator, which checks resources for compliance against various validation schemes (schemas, schematrons, profiles, etc.) * Resource validator, which checks resources for compliance against various validation schemes (schemas, schematrons, profiles, etc.)
@ -49,9 +50,6 @@ public class FhirValidator {
private static final String I18N_KEY_NO_PHLOC_ERROR = FhirValidator.class.getName() + ".noPhlocError"; private static final String I18N_KEY_NO_PHLOC_ERROR = FhirValidator.class.getName() + ".noPhlocError";
private static final String I18N_KEY_NO_PHLOC_WARNING = FhirValidator.class.getName() + ".noPhlocWarningOnStartup";
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirValidator.class);
private static volatile Boolean ourPhlocPresentOnClasspath; private static volatile Boolean ourPhlocPresentOnClasspath;
private final FhirContext myContext; private final FhirContext myContext;
private List<IValidatorModule> myValidators = new ArrayList<IValidatorModule>(); private List<IValidatorModule> myValidators = new ArrayList<IValidatorModule>();
@ -63,13 +61,7 @@ public class FhirValidator {
myContext = theFhirContext; myContext = theFhirContext;
if (ourPhlocPresentOnClasspath == null) { if (ourPhlocPresentOnClasspath == null) {
try { ourPhlocPresentOnClasspath = SchematronProvider.isScematronAvailable(theFhirContext);
Class.forName("com.phloc.schematron.ISchematronResource");
ourPhlocPresentOnClasspath = true;
} catch (ClassNotFoundException e) {
ourLog.info(theFhirContext.getLocalizer().getMessage(I18N_KEY_NO_PHLOC_WARNING));
ourPhlocPresentOnClasspath = false;
}
} }
} }
@ -110,7 +102,12 @@ public class FhirValidator {
* Should the validator validate the resource against the base schema (the schema provided with the FHIR distribution itself) * Should the validator validate the resource against the base schema (the schema provided with the FHIR distribution itself)
*/ */
public synchronized boolean isValidateAgainstStandardSchematron() { public synchronized boolean isValidateAgainstStandardSchematron() {
return haveValidatorOfType(SchematronBaseValidator.class); if (!ourPhlocPresentOnClasspath) {
return false; // No need to ask since we dont have Phloc. Also Class.forname will complain
// about missing phloc import.
}
Class<? extends IValidatorModule> cls = SchematronProvider.getSchematronValidatorClass();
return haveValidatorOfType(cls);
} }
/** /**
@ -147,7 +144,9 @@ public class FhirValidator {
if (theValidateAgainstStandardSchematron && !ourPhlocPresentOnClasspath) { if (theValidateAgainstStandardSchematron && !ourPhlocPresentOnClasspath) {
throw new IllegalArgumentException(myContext.getLocalizer().getMessage(I18N_KEY_NO_PHLOC_ERROR)); throw new IllegalArgumentException(myContext.getLocalizer().getMessage(I18N_KEY_NO_PHLOC_ERROR));
} }
addOrRemoveValidator(theValidateAgainstStandardSchematron, SchematronBaseValidator.class, new SchematronBaseValidator(myContext)); Class<? extends IValidatorModule> cls = SchematronProvider.getSchematronValidatorClass();
IValidatorModule instance = SchematronProvider.getSchematronValidatorInstance(myContext);
addOrRemoveValidator(theValidateAgainstStandardSchematron, cls, instance);
return this; return this;
} }

View File

@ -53,7 +53,9 @@ import ca.uhn.fhir.model.api.Bundle;
import ca.uhn.fhir.rest.server.EncodingEnum; import ca.uhn.fhir.rest.server.EncodingEnum;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
class SchemaBaseValidator implements IValidatorModule { public class SchemaBaseValidator implements IValidatorModule {
public static final String RESOURCES_JAR_NOTE = "Note that as of HAPI FHIR 1.2, DSTU2 validation files are kept in a separate JAR (hapi-fhir-validation-resources-XXX.jar) which must be added to your classpath. See the HAPI FHIR download page for more information.";
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SchemaBaseValidator.class); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SchemaBaseValidator.class);
private static final Set<String> SCHEMA_NAMES; private static final Set<String> SCHEMA_NAMES;
@ -129,7 +131,7 @@ class SchemaBaseValidator implements IValidatorModule {
ourLog.debug("Going to load resource: {}", pathToBase); ourLog.debug("Going to load resource: {}", pathToBase);
InputStream baseIs = FhirValidator.class.getResourceAsStream(pathToBase); InputStream baseIs = FhirValidator.class.getResourceAsStream(pathToBase);
if (baseIs == null) { if (baseIs == null) {
throw new InternalErrorException("Schema not found. " + SchematronBaseValidator.RESOURCES_JAR_NOTE); throw new InternalErrorException("Schema not found. " + RESOURCES_JAR_NOTE);
} }
baseIs = new BOMInputStream(baseIs, false); baseIs = new BOMInputStream(baseIs, false);
InputStreamReader baseReader = new InputStreamReader(baseIs, Charset.forName("UTF-8")); InputStreamReader baseReader = new InputStreamReader(baseIs, Charset.forName("UTF-8"));

View File

@ -21,7 +21,6 @@ package ca.uhn.fhir.validation;
*/ */
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
import org.thymeleaf.util.Validate;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.model.api.Bundle; import ca.uhn.fhir.model.api.Bundle;
@ -29,8 +28,9 @@ import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.rest.method.MethodUtil; import ca.uhn.fhir.rest.method.MethodUtil;
import ca.uhn.fhir.rest.server.EncodingEnum; import ca.uhn.fhir.rest.server.EncodingEnum;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.util.ObjectUtil;
class ValidationContext<T> extends BaseValidationContext<T> implements IValidationContext<T> { public class ValidationContext<T> extends BaseValidationContext<T> implements IValidationContext<T> {
private final IEncoder myEncoder; private final IEncoder myEncoder;
private final T myResource; private final T myResource;
@ -136,8 +136,8 @@ class ValidationContext<T> extends BaseValidationContext<T> implements IValidati
} }
public static IValidationContext<IBaseResource> forText(final FhirContext theContext, final String theResourceBody) { public static IValidationContext<IBaseResource> forText(final FhirContext theContext, final String theResourceBody) {
Validate.notNull(theContext, "theContext can not be null"); ObjectUtil.requireNonNull(theContext, "theContext can not be null");
Validate.notEmpty(theResourceBody, "theResourceBody can not be null or empty"); ObjectUtil.requireNotEmpty(theResourceBody, "theResourceBody can not be null or empty");
return new BaseValidationContext<IBaseResource>(theContext) { return new BaseValidationContext<IBaseResource>(theContext) {
private EncodingEnum myEncoding; private EncodingEnum myEncoding;
@ -169,5 +169,4 @@ class ValidationContext<T> extends BaseValidationContext<T> implements IValidati
}; };
} }
} }

View File

@ -1,4 +1,4 @@
package ca.uhn.fhir.validation; package ca.uhn.fhir.validation.schematron;
/* /*
* #%L * #%L
@ -37,6 +37,13 @@ import ca.uhn.fhir.model.api.Bundle;
import ca.uhn.fhir.model.api.BundleEntry; import ca.uhn.fhir.model.api.BundleEntry;
import ca.uhn.fhir.rest.server.EncodingEnum; import ca.uhn.fhir.rest.server.EncodingEnum;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.validation.FhirValidator;
import ca.uhn.fhir.validation.IValidationContext;
import ca.uhn.fhir.validation.IValidatorModule;
import ca.uhn.fhir.validation.ResultSeverityEnum;
import ca.uhn.fhir.validation.SchemaBaseValidator;
import ca.uhn.fhir.validation.SingleValidationMessage;
import ca.uhn.fhir.validation.ValidationContext;
import com.phloc.commons.error.IResourceError; import com.phloc.commons.error.IResourceError;
import com.phloc.commons.error.IResourceErrorGroup; import com.phloc.commons.error.IResourceErrorGroup;
@ -44,13 +51,16 @@ import com.phloc.schematron.ISchematronResource;
import com.phloc.schematron.SchematronHelper; import com.phloc.schematron.SchematronHelper;
import com.phloc.schematron.xslt.SchematronResourceSCH; import com.phloc.schematron.xslt.SchematronResourceSCH;
/**
* This class is only used using reflection from {@link SchematronProvider} in order
* to be truly optional.
*/
public class SchematronBaseValidator implements IValidatorModule { public class SchematronBaseValidator implements IValidatorModule {
static final String RESOURCES_JAR_NOTE = "Note that as of HAPI FHIR 1.2, DSTU2 validation files are kept in a separate JAR (hapi-fhir-validation-resources-XXX.jar) which must be added to your classpath. See the HAPI FHIR download page for more information.";
private Map<Class<? extends IBaseResource>, ISchematronResource> myClassToSchematron = new HashMap<Class<? extends IBaseResource>, ISchematronResource>(); private Map<Class<? extends IBaseResource>, ISchematronResource> myClassToSchematron = new HashMap<Class<? extends IBaseResource>, ISchematronResource>();
private FhirContext myCtx; private FhirContext myCtx;
SchematronBaseValidator(FhirContext theContext) { public SchematronBaseValidator(FhirContext theContext) {
myCtx = theContext; myCtx = theContext;
} }
@ -126,7 +136,7 @@ public class SchematronBaseValidator implements IValidatorModule {
InputStream baseIs = FhirValidator.class.getResourceAsStream(pathToBase); InputStream baseIs = FhirValidator.class.getResourceAsStream(pathToBase);
try { try {
if (baseIs == null) { if (baseIs == null) {
throw new InternalErrorException("Failed to load schematron for resource '" + theCtx.getFhirContext().getResourceDefinition(theCtx.getResource()).getBaseDefinition().getName() + "'. " + RESOURCES_JAR_NOTE); throw new InternalErrorException("Failed to load schematron for resource '" + theCtx.getFhirContext().getResourceDefinition(theCtx.getResource()).getBaseDefinition().getName() + "'. " + SchemaBaseValidator.RESOURCES_JAR_NOTE);
} }
} finally { } finally {
IOUtils.closeQuietly(baseIs); IOUtils.closeQuietly(baseIs);

View File

@ -0,0 +1,42 @@
package ca.uhn.fhir.validation.schematron;
import java.lang.reflect.Constructor;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.validation.FhirValidator;
import ca.uhn.fhir.validation.IValidatorModule;
public class SchematronProvider {
private static final String I18N_KEY_NO_PHLOC_WARNING = FhirValidator.class.getName() + ".noPhlocWarningOnStartup";
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirValidator.class);
public static boolean isScematronAvailable(FhirContext theFhirContext) {
try {
Class.forName("com.phloc.schematron.ISchematronResource");
return true;
} catch (ClassNotFoundException e) {
ourLog.info(theFhirContext.getLocalizer().getMessage(I18N_KEY_NO_PHLOC_WARNING));
return false;
}
}
public static Class<? extends IValidatorModule> getSchematronValidatorClass() {
try {
return (Class<? extends IValidatorModule>) Class.forName("ca.uhn.fhir.validation.schematron.SchematronBaseValidator");
} catch (ClassNotFoundException e) {
throw new IllegalStateException("Cannot resolve schematron validator ", e);
}
}
public static IValidatorModule getSchematronValidatorInstance(FhirContext myContext) {
try {
Class<? extends IValidatorModule> cls = getSchematronValidatorClass();
Constructor<? extends IValidatorModule> constructor = cls.getConstructor(FhirContext.class);
return constructor.newInstance(myContext);
} catch (ReflectiveOperationException e) {
throw new IllegalStateException("Cannot construct schematron validator ", e);
}
}
}

View File

@ -0,0 +1,78 @@
package ca.uhn.fhir.util;
import static org.junit.Assert.*;
import java.lang.reflect.Method;
import org.junit.Test;
import ca.uhn.fhir.util.reflection.IBeanUtils;
import ca.uhn.fhir.util.reflection.JavaBeansBeanUtil;
import ca.uhn.fhir.util.reflection.JavaReflectBeanUtil;
public class BeanUtilTest {
@Test
public void testFindAccessor() throws Exception {
JavaBeansBeanUtil javaBeansBeanUtil = new JavaBeansBeanUtil();
testBeanUtilsAccessor(javaBeansBeanUtil);
JavaReflectBeanUtil javaReflectBeanUtil = new JavaReflectBeanUtil();
testBeanUtilsAccessor(javaReflectBeanUtil);
assertNotNull(BeanUtils.findAccessor(BeanUtilTestClass.class, String.class, "field"));
Method jbMGet = javaBeansBeanUtil.findAccessor(BeanUtilTestClass.class, String.class, "field");
Method jrMGet = javaReflectBeanUtil.findAccessor(BeanUtilTestClass.class, String.class, "field");
assertNotNull(jbMGet);
assertNotNull(jrMGet);
assertEquals(jbMGet, jrMGet);
}
@Test
public void testFindMutator() throws Exception {
JavaBeansBeanUtil javaBeansBeanUtil = new JavaBeansBeanUtil();
testBeanUtilsMutator(javaBeansBeanUtil);
JavaReflectBeanUtil javaReflectBeanUtil = new JavaReflectBeanUtil();
testBeanUtilsMutator(javaReflectBeanUtil);
assertNotNull(BeanUtils.findMutator(BeanUtilTestClass.class, String.class, "field"));
Method jbMSet = javaBeansBeanUtil.findMutator(BeanUtilTestClass.class, String.class, "field");
Method jrMSet = javaReflectBeanUtil.findMutator(BeanUtilTestClass.class, String.class, "field");
assertNotNull(jbMSet);
assertNotNull(jrMSet);
assertEquals(jbMSet, jrMSet);
}
private void testBeanUtilsAccessor(IBeanUtils util) throws Exception {
assertNotNull(util.findAccessor(BeanUtilTestClass.class, String.class, "field"));
try {
assertNull(util.findAccessor(BeanUtilTestClass.class, String.class, "fieldX"));
fail("Field is not in class");
} catch (NoSuchFieldException e) { }
try {
assertNull(util.findAccessor(BeanUtilTestClass.class, Integer.class, "field"));
fail("Field is in class, but we expect Integer as return type");
} catch (NoSuchFieldException e) { }
}
private void testBeanUtilsMutator(IBeanUtils util) throws Exception {
assertNotNull(util.findMutator(BeanUtilTestClass.class, String.class, "field"));
try {
assertNull(util.findMutator(BeanUtilTestClass.class, String.class, "fieldX"));
fail("Field is not in class");
} catch (NoSuchFieldException e) { }
try {
assertNull(util.findMutator(BeanUtilTestClass.class, Integer.class, "field"));
fail("Field is in class, but we expect Integer as parameter type");
} catch (NoSuchFieldException e) { }
}
public static class BeanUtilTestClass {
private String myField;
public String getField() {
return myField;
}
public void setField(String value) {
this.myField = value;
}
}
}

View File

@ -0,0 +1,18 @@
package ca.uhn.fhir.util;
import static org.junit.Assert.*;
import org.junit.Test;
import ca.uhn.fhir.util.jar.DependencyLogFactory;
import ca.uhn.fhir.util.jar.IDependencyLog;
public class DependencyLogUtilTest {
@Test
public void testDependencyLogFactory() {
IDependencyLog logger = DependencyLogFactory.createJarLogger();
assertNotNull(logger);
logger.logStaxImplementation(DependencyLogUtilTest.class);
}
}

View File

@ -0,0 +1,57 @@
package ca.uhn.fhir.util;
import static org.junit.Assert.*;
import org.junit.Test;
public class ObjectUtilTest {
@Test
public void testEquals() {
String a = new String("a");
String b = new String("b");
assertFalse(ObjectUtil.equals(b, a));
assertFalse(ObjectUtil.equals(a, b));
assertFalse(ObjectUtil.equals(a, null));
assertFalse(ObjectUtil.equals(null, a));
assertTrue(ObjectUtil.equals(null, null));
assertTrue(ObjectUtil.equals(a, a));
}
@Test
public void testRequireNonNull() {
String message = "Must not be null in test";
try {
ObjectUtil.requireNonNull(null, message);
fail("should not get here.");
} catch (NullPointerException e) {
assertEquals(message, e.getMessage());
}
assertNotNull(ObjectUtil.requireNonNull("some string", message));
}
@Test
public void testRequireNotEmpty() {
//All these are empty, null or whitespace strings.
testRequireNotEmptyErrorScenario(null);
testRequireNotEmptyErrorScenario("");
testRequireNotEmptyErrorScenario(" ");
testRequireNotEmptyErrorScenario(" ");
//All these are non empty, some non whitespace char in the string.
ObjectUtil.requireNotEmpty("abc ", "");
ObjectUtil.requireNotEmpty(" abc ", "");
ObjectUtil.requireNotEmpty(" abc", "");
}
private void testRequireNotEmptyErrorScenario(String string) {
String message = "must not be empty in test";
try {
ObjectUtil.requireNotEmpty(string, message);
fail("should not get here.");
} catch (IllegalArgumentException e) {
assertEquals(message, e.getMessage());
}
}
}

View File

@ -8,7 +8,7 @@
</encoder> </encoder>
</appender> </appender>
<logger name="ca.uhn.fhir.cli" additivity="false" level="info"> <logger name="ca.uhn.fhir" additivity="false" level="info">
<appender-ref ref="STDOUT" /> <appender-ref ref="STDOUT" />
</logger> </logger>

View File

@ -1,8 +1,8 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<!-- The parent of this project is the deployable POM. This project isn't deployable, but this keeps it before the root pom in the reactor order when building the site. I don't know why this works... <!-- The parent of this project is the deployable POM. This project isn't deployable, but this keeps it before the root pom in the reactor order when building the
Need to investigate this. --> site. I don't know why this works... Need to investigate this. -->
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
@ -160,21 +160,21 @@
<artifactId>spring-test</artifactId> <artifactId>spring-test</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.eclipse.jetty.websocket</groupId> <groupId>org.eclipse.jetty.websocket</groupId>
<artifactId>websocket-api</artifactId> <artifactId>websocket-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.eclipse.jetty.websocket</groupId> <groupId>org.eclipse.jetty.websocket</groupId>
<artifactId>websocket-client</artifactId> <artifactId>websocket-client</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.eclipse.jetty.websocket</groupId> <groupId>org.eclipse.jetty.websocket</groupId>
<artifactId>websocket-server</artifactId> <artifactId>websocket-server</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> </dependencies>
<properties> <properties>
@ -321,10 +321,11 @@
</execution> </execution>
</executions> </executions>
</plugin> </plugin>
<!-- <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>cobertura-maven-plugin</artifactId> <version>${maven_cobertura_plugin_version}</version> <configuration> <check> <branchRate>85</branchRate> <!-- <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>cobertura-maven-plugin</artifactId> <version>${maven_cobertura_plugin_version}</version> <configuration>
<lineRate>85</lineRate> <haltOnFailure>true</haltOnFailure> <totalBranchRate>85</totalBranchRate> <totalLineRate>85</totalLineRate> <packageLineRate>85</packageLineRate> <packageBranchRate>85</packageBranchRate> <check> <branchRate>85</branchRate> <lineRate>85</lineRate> <haltOnFailure>true</haltOnFailure> <totalBranchRate>85</totalBranchRate> <totalLineRate>85</totalLineRate>
<regexes> <regex> <pattern>com.example.reallyimportant.*</pattern> <branchRate>90</branchRate> <lineRate>80</lineRate> </regex> <regex> <pattern>com.example.boringcode.*</pattern> <branchRate>40</branchRate> <packageLineRate>85</packageLineRate> <packageBranchRate>85</packageBranchRate> <regexes> <regex> <pattern>com.example.reallyimportant.*</pattern> <branchRate>90</branchRate>
<lineRate>30</lineRate> </regex> </regexes> </check> </configuration> <executions> <execution> <goals> <goal>clean</goal> <goal>check</goal> </goals> </execution> </executions> </plugin> --> <lineRate>80</lineRate> </regex> <regex> <pattern>com.example.boringcode.*</pattern> <branchRate>40</branchRate> <lineRate>30</lineRate> </regex> </regexes> </check>
</configuration> <executions> <execution> <goals> <goal>clean</goal> <goal>check</goal> </goals> </execution> </executions> </plugin> -->
</plugins> </plugins>
<resources> <resources>
</resources> </resources>

View File

@ -109,6 +109,7 @@
<descriptor>${project.basedir}/src/assembly/hapi-fhir-android-distribution.xml</descriptor> <descriptor>${project.basedir}/src/assembly/hapi-fhir-android-distribution.xml</descriptor>
<descriptor>${project.basedir}/src/assembly/hapi-fhir-cli.xml</descriptor> <descriptor>${project.basedir}/src/assembly/hapi-fhir-cli.xml</descriptor>
</descriptors> </descriptors>
<finalName>hapi-fhir-${project.version}</finalName>
</configuration> </configuration>
</execution> </execution>
</executions> </executions>

View File

@ -25,6 +25,7 @@
<include>hapi-fhir-cli</include> <include>hapi-fhir-cli</include>
<include>hapi-fhir-cli.cmd</include> <include>hapi-fhir-cli.cmd</include>
</includes> </includes>
<fileMode>0555</fileMode>
</fileSet> </fileSet>
</fileSets> </fileSets>

View File

@ -1,6 +1,7 @@
target/ target/
/bin /bin
nohup.out nohup.out
lucene_indexes/
# Created by https://www.gitignore.io # Created by https://www.gitignore.io

View File

@ -343,8 +343,10 @@
<targetResourceSpringBeansFile>hapi-fhir-server-resourceproviders-dstu2.xml</targetResourceSpringBeansFile> <targetResourceSpringBeansFile>hapi-fhir-server-resourceproviders-dstu2.xml</targetResourceSpringBeansFile>
<baseResourceNames></baseResourceNames> <baseResourceNames></baseResourceNames>
<excludeResourceNames> <excludeResourceNames>
<!--
<excludeResourceName>OperationDefinition</excludeResourceName> <excludeResourceName>OperationDefinition</excludeResourceName>
<excludeResourceName>OperationOutcome</excludeResourceName> <excludeResourceName>OperationOutcome</excludeResourceName>
-->
</excludeResourceNames> </excludeResourceNames>
</configuration> </configuration>
</execution> </execution>

View File

@ -45,13 +45,17 @@ import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root; import javax.persistence.criteria.Root;
import javax.xml.stream.events.Characters;
import javax.xml.stream.events.XMLEvent;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.apache.http.NameValuePair; import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URLEncodedUtils; import org.apache.http.client.utils.URLEncodedUtils;
import org.hl7.fhir.instance.model.api.IBaseDatatype;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.TransactionStatus;
@ -250,8 +254,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
try { try {
resourceDefinition = getContext().getResourceDefinition(typeString); resourceDefinition = getContext().getResourceDefinition(typeString);
} catch (DataFormatException e) { } catch (DataFormatException e) {
throw new InvalidRequestException( throw new InvalidRequestException("Invalid resource reference found at path[" + nextPathsUnsplit + "] - Resource type is unknown or not supported on this server - " + nextValue.getReference().getValue());
"Invalid resource reference found at path[" + nextPathsUnsplit + "] - Resource type is unknown or not supported on this server - " + nextValue.getReference().getValue());
} }
Class<? extends IBaseResource> type = resourceDefinition.getImplementingClass(); Class<? extends IBaseResource> type = resourceDefinition.getImplementingClass();
@ -287,8 +290,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
} }
if (!typeString.equals(target.getResourceType())) { if (!typeString.equals(target.getResourceType())) {
throw new UnprocessableEntityException("Resource contains reference to " + nextValue.getReference().getValue() + " but resource with ID " + nextValue.getReference().getIdPart() throw new UnprocessableEntityException("Resource contains reference to " + nextValue.getReference().getValue() + " but resource with ID " + nextValue.getReference().getIdPart() + " is actually of type " + target.getResourceType());
+ " is actually of type " + target.getResourceType());
} }
/* /*
@ -695,7 +697,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
} }
} }
for (ResourceTag next : new ArrayList<ResourceTag>(theEntity.getTags())) { ArrayList<ResourceTag> existingTags = new ArrayList<ResourceTag>();
if (theEntity.isHasTags()) {
existingTags.addAll(theEntity.getTags());
}
for (ResourceTag next : existingTags) {
TagDefinition nextDef = next.getTag(); TagDefinition nextDef = next.getTag();
if (!allDefs.contains(nextDef)) { if (!allDefs.contains(nextDef)) {
if (shouldDroppedTagBeRemovedOnUpdate(theEntity, next)) { if (shouldDroppedTagBeRemovedOnUpdate(theEntity, next)) {
@ -716,9 +722,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
} }
/** /**
* This method is called when an update to an existing resource detects that the resource supplied for update is missing a tag/profile/security label that the currently persisted resource holds. * This method is called when an update to an existing resource detects that the resource supplied for update is
* missing a tag/profile/security label that the currently persisted resource holds.
* <p> * <p>
* The default implementation removes any profile declarations, but leaves tags and security labels in place. Subclasses may choose to override and change this behaviour. * The default implementation removes any profile declarations, but leaves tags and security labels in place.
* Subclasses may choose to override and change this behaviour.
* </p> * </p>
* *
* @param theEntity * @param theEntity
@ -726,7 +734,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
* @param theTag * @param theTag
* The tag * The tag
* @return Retturns <code>true</code> if the tag should be removed * @return Retturns <code>true</code> if the tag should be removed
* @see <a href="http://hl7.org/fhir/2015Sep/resource.html#1.11.3.7">Updates to Tags, Profiles, and Security Labels</a> for a description of the logic that the default behaviour folows. * @see <a href="http://hl7.org/fhir/2015Sep/resource.html#1.11.3.7">Updates to Tags, Profiles, and Security
* Labels</a> for a description of the logic that the default behaviour folows.
*/ */
protected boolean shouldDroppedTagBeRemovedOnUpdate(ResourceTable theEntity, ResourceTag theTag) { protected boolean shouldDroppedTagBeRemovedOnUpdate(ResourceTable theEntity, ResourceTag theTag) {
if (theTag.getTag().getTagType() == TagTypeEnum.PROFILE) { if (theTag.getTag().getTagType() == TagTypeEnum.PROFILE) {
@ -763,13 +772,13 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
RuntimeResourceDefinition resourceDef = getContext().getResourceDefinition(theResourceType); RuntimeResourceDefinition resourceDef = getContext().getResourceDefinition(theResourceType);
SearchParameterMap paramMap = translateMatchUrl(theMatchUrl, resourceDef); SearchParameterMap paramMap = translateMatchUrl(theMatchUrl, resourceDef);
if (paramMap.isEmpty()) { if (paramMap.isEmpty()) {
throw new InvalidRequestException("Invalid match URL[" + theMatchUrl + "] - URL has no search parameters"); throw new InvalidRequestException("Invalid match URL[" + theMatchUrl + "] - URL has no search parameters");
} }
IFhirResourceDao<R> dao = getDao(theResourceType); IFhirResourceDao<R> dao = getDao(theResourceType);
Set<Long> ids = dao.searchForIdsWithAndOr(paramMap); Set<Long> ids = dao.searchForIdsWithAndOr(paramMap, new HashSet<Long>(), paramMap.getLastUpdated());
return ids; return ids;
} }
@ -845,7 +854,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
if (RESOURCE_META_PARAMS.containsKey(nextParamName)) { if (RESOURCE_META_PARAMS.containsKey(nextParamName)) {
if (isNotBlank(paramList.get(0).getQualifier()) && paramList.get(0).getQualifier().startsWith(".")) { if (isNotBlank(paramList.get(0).getQualifier()) && paramList.get(0).getQualifier().startsWith(".")) {
throw new InvalidRequestException("Invalid parameter chain: " + nextParamName + paramList.get(0).getQualifier()); throw new InvalidRequestException("Invalid parameter chain: " + nextParamName + paramList.get(0).getQualifier());
} }
IQueryParameterAnd<?> type = newInstanceAnd(nextParamName); IQueryParameterAnd<?> type = newInstanceAnd(nextParamName);
type.setValuesAsQueryTokens((paramList)); type.setValuesAsQueryTokens((paramList));
@ -857,7 +866,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
if (paramDef == null) { if (paramDef == null) {
throw new InvalidRequestException("Failed to parse match URL[" + theMatchUrl + "] - Resource type " + resourceDef.getName() + " does not have a parameter with name: " + nextParamName); throw new InvalidRequestException("Failed to parse match URL[" + theMatchUrl + "] - Resource type " + resourceDef.getName() + " does not have a parameter with name: " + nextParamName);
} }
IQueryParameterAnd<?> param = MethodUtil.parseQueryParams(paramDef, nextParamName, paramList); IQueryParameterAnd<?> param = MethodUtil.parseQueryParams(paramDef, nextParamName, paramList);
paramMap.add(nextParamName, param); paramMap.add(nextParamName, param);
} }
@ -1100,7 +1109,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
} }
} else if (theForHistoryOperation) { } else if (theForHistoryOperation) {
/* /*
* If the create and update times match, this was when the resource was created so we should mark it as a POST. Otherwise, it's a PUT. * If the create and update times match, this was when the resource was created so we should mark it as a POST.
* Otherwise, it's a PUT.
*/ */
Date published = theEntity.getPublished().getValue(); Date published = theEntity.getPublished().getValue();
Date updated = theEntity.getUpdated().getValue(); Date updated = theEntity.getUpdated().getValue();
@ -1194,8 +1204,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
protected ResourceTable updateEntity(final IResource theResource, ResourceTable theEntity, boolean theUpdateHistory, Date theDeletedTimestampOrNull, boolean thePerformIndexing, protected ResourceTable updateEntity(final IResource theResource, ResourceTable theEntity, boolean theUpdateHistory, Date theDeletedTimestampOrNull, boolean thePerformIndexing, boolean theUpdateVersion, Date theUpdateTime) {
boolean theUpdateVersion, Date theUpdateTime) {
/* /*
* This should be the very first thing.. * This should be the very first thing..
@ -1204,8 +1213,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
validateResourceForStorage((T) theResource, theEntity); validateResourceForStorage((T) theResource, theEntity);
String resourceType = myContext.getResourceDefinition(theResource).getName(); String resourceType = myContext.getResourceDefinition(theResource).getName();
if (isNotBlank(theEntity.getResourceType()) && !theEntity.getResourceType().equals(resourceType)) { if (isNotBlank(theEntity.getResourceType()) && !theEntity.getResourceType().equals(resourceType)) {
throw new UnprocessableEntityException( throw new UnprocessableEntityException("Existing resource ID[" + theEntity.getIdDt().toUnqualifiedVersionless() + "] is of type[" + theEntity.getResourceType() + "] - Cannot update with [" + resourceType + "]");
"Existing resource ID[" + theEntity.getIdDt().toUnqualifiedVersionless() + "] is of type[" + theEntity.getResourceType() + "] - Cannot update with [" + resourceType + "]");
} }
} }
@ -1222,23 +1230,38 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
theEntity.setVersion(theEntity.getVersion() + 1); theEntity.setVersion(theEntity.getVersion() + 1);
} }
boolean paramsStringPopulated = theEntity.isParamsStringPopulated(); Collection<ResourceIndexedSearchParamString> paramsString = new ArrayList<ResourceIndexedSearchParamString>();
boolean paramsTokenPopulated = theEntity.isParamsTokenPopulated(); if (theEntity.isParamsStringPopulated()) {
boolean paramsNumberPopulated = theEntity.isParamsNumberPopulated(); paramsString.addAll(theEntity.getParamsString());
boolean paramsQuantityPopulated = theEntity.isParamsQuantityPopulated(); }
boolean paramsDatePopulated = theEntity.isParamsDatePopulated(); Collection<ResourceIndexedSearchParamToken> paramsToken = new ArrayList<ResourceIndexedSearchParamToken>();
boolean paramsCoordsPopulated = theEntity.isParamsCoordsPopulated(); if (theEntity.isParamsTokenPopulated()) {
boolean paramsUriPopulated = theEntity.isParamsUriPopulated(); paramsToken.addAll(theEntity.getParamsToken());
boolean hasLinks = theEntity.isHasLinks(); }
Collection<ResourceIndexedSearchParamNumber> paramsNumber = new ArrayList<ResourceIndexedSearchParamNumber>();
Collection<ResourceIndexedSearchParamString> paramsString = new ArrayList<ResourceIndexedSearchParamString>(theEntity.getParamsString()); if (theEntity.isParamsNumberPopulated()) {
Collection<ResourceIndexedSearchParamToken> paramsToken = new ArrayList<ResourceIndexedSearchParamToken>(theEntity.getParamsToken()); paramsNumber.addAll(theEntity.getParamsNumber());
Collection<ResourceIndexedSearchParamNumber> paramsNumber = new ArrayList<ResourceIndexedSearchParamNumber>(theEntity.getParamsNumber()); }
Collection<ResourceIndexedSearchParamQuantity> paramsQuantity = new ArrayList<ResourceIndexedSearchParamQuantity>(theEntity.getParamsQuantity()); Collection<ResourceIndexedSearchParamQuantity> paramsQuantity = new ArrayList<ResourceIndexedSearchParamQuantity>();
Collection<ResourceIndexedSearchParamDate> paramsDate = new ArrayList<ResourceIndexedSearchParamDate>(theEntity.getParamsDate()); if (theEntity.isParamsQuantityPopulated()) {
Collection<ResourceIndexedSearchParamUri> paramsUri = new ArrayList<ResourceIndexedSearchParamUri>(theEntity.getParamsUri()); paramsQuantity.addAll(theEntity.getParamsQuantity());
Collection<ResourceIndexedSearchParamCoords> paramsCoords = new ArrayList<ResourceIndexedSearchParamCoords>(theEntity.getParamsCoords()); }
Collection<ResourceLink> resourceLinks = new ArrayList<ResourceLink>(theEntity.getResourceLinks()); Collection<ResourceIndexedSearchParamDate> paramsDate = new ArrayList<ResourceIndexedSearchParamDate>();
if (theEntity.isParamsDatePopulated()) {
paramsDate.addAll(theEntity.getParamsDate());
}
Collection<ResourceIndexedSearchParamUri> paramsUri = new ArrayList<ResourceIndexedSearchParamUri>();
if (theEntity.isParamsUriPopulated()) {
paramsUri.addAll(theEntity.getParamsUri());
}
Collection<ResourceIndexedSearchParamCoords> paramsCoords = new ArrayList<ResourceIndexedSearchParamCoords>();
if (theEntity.isParamsCoordsPopulated()) {
paramsCoords.addAll(theEntity.getParamsCoords());
}
Collection<ResourceLink> resourceLinks = new ArrayList<ResourceLink>();
if (theEntity.isHasLinks()) {
resourceLinks.addAll(theEntity.getResourceLinks());
}
Set<ResourceIndexedSearchParamString> stringParams = null; Set<ResourceIndexedSearchParamString> stringParams = null;
Set<ResourceIndexedSearchParamToken> tokenParams = null; Set<ResourceIndexedSearchParamToken> tokenParams = null;
@ -1261,6 +1284,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
links = Collections.emptySet(); links = Collections.emptySet();
theEntity.setDeleted(theDeletedTimestampOrNull); theEntity.setDeleted(theDeletedTimestampOrNull);
theEntity.setUpdated(theDeletedTimestampOrNull); theEntity.setUpdated(theDeletedTimestampOrNull);
theEntity.setNarrativeTextParsedIntoWords(null);
theEntity.setContentTextParsedIntoWords(null);
} else { } else {
@ -1309,6 +1334,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
theEntity.setResourceLinks(links); theEntity.setResourceLinks(links);
theEntity.setHasLinks(links.isEmpty() == false); theEntity.setHasLinks(links.isEmpty() == false);
theEntity.setIndexStatus(INDEX_STATUS_INDEXED); theEntity.setIndexStatus(INDEX_STATUS_INDEXED);
theEntity.setNarrativeTextParsedIntoWords(parseNarrativeTextIntoWords(theResource));
theEntity.setContentTextParsedIntoWords(parseContentTextIntoWords(theResource));
} else { } else {
@ -1338,76 +1365,60 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
if (thePerformIndexing) { if (thePerformIndexing) {
if (paramsStringPopulated) { for (ResourceIndexedSearchParamString next : paramsString) {
for (ResourceIndexedSearchParamString next : paramsString) { myEntityManager.remove(next);
myEntityManager.remove(next);
}
} }
for (ResourceIndexedSearchParamString next : stringParams) { for (ResourceIndexedSearchParamString next : stringParams) {
myEntityManager.persist(next); myEntityManager.persist(next);
} }
if (paramsTokenPopulated) { for (ResourceIndexedSearchParamToken next : paramsToken) {
for (ResourceIndexedSearchParamToken next : paramsToken) { myEntityManager.remove(next);
myEntityManager.remove(next);
}
} }
for (ResourceIndexedSearchParamToken next : tokenParams) { for (ResourceIndexedSearchParamToken next : tokenParams) {
myEntityManager.persist(next); myEntityManager.persist(next);
} }
if (paramsNumberPopulated) { for (ResourceIndexedSearchParamNumber next : paramsNumber) {
for (ResourceIndexedSearchParamNumber next : paramsNumber) { myEntityManager.remove(next);
myEntityManager.remove(next);
}
} }
for (ResourceIndexedSearchParamNumber next : numberParams) { for (ResourceIndexedSearchParamNumber next : numberParams) {
myEntityManager.persist(next); myEntityManager.persist(next);
} }
if (paramsQuantityPopulated) { for (ResourceIndexedSearchParamQuantity next : paramsQuantity) {
for (ResourceIndexedSearchParamQuantity next : paramsQuantity) { myEntityManager.remove(next);
myEntityManager.remove(next);
}
} }
for (ResourceIndexedSearchParamQuantity next : quantityParams) { for (ResourceIndexedSearchParamQuantity next : quantityParams) {
myEntityManager.persist(next); myEntityManager.persist(next);
} }
// Store date SP's // Store date SP's
if (paramsDatePopulated) { for (ResourceIndexedSearchParamDate next : paramsDate) {
for (ResourceIndexedSearchParamDate next : paramsDate) { myEntityManager.remove(next);
myEntityManager.remove(next);
}
} }
for (ResourceIndexedSearchParamDate next : dateParams) { for (ResourceIndexedSearchParamDate next : dateParams) {
myEntityManager.persist(next); myEntityManager.persist(next);
} }
// Store URI SP's // Store URI SP's
if (paramsUriPopulated) { for (ResourceIndexedSearchParamUri next : paramsUri) {
for (ResourceIndexedSearchParamUri next : paramsUri) { myEntityManager.remove(next);
myEntityManager.remove(next);
}
} }
for (ResourceIndexedSearchParamUri next : uriParams) { for (ResourceIndexedSearchParamUri next : uriParams) {
myEntityManager.persist(next); myEntityManager.persist(next);
} }
// Store Coords SP's // Store Coords SP's
if (paramsCoordsPopulated) { for (ResourceIndexedSearchParamCoords next : paramsCoords) {
for (ResourceIndexedSearchParamCoords next : paramsCoords) { myEntityManager.remove(next);
myEntityManager.remove(next);
}
} }
for (ResourceIndexedSearchParamCoords next : coordsParams) { for (ResourceIndexedSearchParamCoords next : coordsParams) {
myEntityManager.persist(next); myEntityManager.persist(next);
} }
if (hasLinks) { for (ResourceLink next : resourceLinks) {
for (ResourceLink next : resourceLinks) { myEntityManager.remove(next);
myEntityManager.remove(next);
}
} }
for (ResourceLink next : links) { for (ResourceLink next : links) {
myEntityManager.persist(next); myEntityManager.persist(next);
@ -1415,6 +1426,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
} // if thePerformIndexing } // if thePerformIndexing
theEntity = myEntityManager.merge(theEntity);
myEntityManager.flush(); myEntityManager.flush();
if (theResource != null) { if (theResource != null) {
@ -1425,7 +1438,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
} }
/** /**
* Subclasses may override to provide behaviour. Called when a resource has been inserved into the database for the first time. * Subclasses may override to provide behaviour. Called when a resource has been inserved into the database for the
* first time.
* *
* @param theEntity * @param theEntity
* The resource * The resource
@ -1437,7 +1451,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
} }
/** /**
* Subclasses may override to provide behaviour. Called when a resource has been inserved into the database for the first time. * Subclasses may override to provide behaviour. Called when a resource has been inserved into the database for the
* first time.
* *
* @param theEntity * @param theEntity
* The resource * The resource
@ -1449,8 +1464,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
} }
/** /**
* This method is invoked immediately before storing a new resource, or an update to an existing resource to allow the DAO to ensure that it is valid for persistence. By default, checks for the * This method is invoked immediately before storing a new resource, or an update to an existing resource to allow
* "subsetted" tag and rejects resources which have it. Subclasses should call the superclass implementation to preserve this check. * the DAO to ensure that it is valid for persistence. By default, checks for the "subsetted" tag and rejects
* resources which have it. Subclasses should call the superclass implementation to preserve this check.
* *
* @param theResource * @param theResource
* The resource that is about to be persisted * The resource that is about to be persisted
@ -1481,4 +1497,34 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao {
return new String(out).toUpperCase(); return new String(out).toUpperCase();
} }
private static String parseNarrativeTextIntoWords(IResource theResource) {
StringBuilder b = new StringBuilder();
List<XMLEvent> xmlEvents = theResource.getText().getDiv().getValue();
if (xmlEvents != null) {
for (XMLEvent next : xmlEvents) {
if (next.isCharacters()) {
Characters characters = next.asCharacters();
b.append(characters.getData()).append(" ");
}
}
}
return b.toString();
}
private String parseContentTextIntoWords(IResource theResource) {
StringBuilder b = new StringBuilder();
@SuppressWarnings("rawtypes")
List<IPrimitiveType> childElements = getContext().newTerser().getAllPopulatedChildElementsOfType(theResource, IPrimitiveType.class);
for (@SuppressWarnings("rawtypes") IPrimitiveType nextType : childElements) {
String nextValue = nextType.getValueAsString();
if (isNotBlank(nextValue)) {
if (b.length() > 0 && b.charAt(b.length() - 1) != ' ') {
b.append(' ');
}
b.append(nextValue);
}
}
return b.toString();
}
} }

View File

@ -158,6 +158,9 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
@Autowired @Autowired
private DaoConfig myDaoConfig; private DaoConfig myDaoConfig;
@Autowired(required=false)
private ISearchDao mySearchDao;
private String myResourceName; private String myResourceName;
private Class<T> myResourceType; private Class<T> myResourceType;
private String mySecondaryPrimaryKeyParamName; private String mySecondaryPrimaryKeyParamName;
@ -225,7 +228,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
codePredicates.add(p); codePredicates.add(p);
} }
Predicate masterCodePredicate = builder.or(codePredicates.toArray(new Predicate[0])); Predicate masterCodePredicate = builder.or(toArray(codePredicates));
Predicate type = builder.equal(from.get("myResourceType"), myResourceName); Predicate type = builder.equal(from.get("myResourceType"), myResourceName);
Predicate name = builder.equal(from.get("myParamName"), theParamName); Predicate name = builder.equal(from.get("myParamName"), theParamName);
@ -240,7 +243,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
return new HashSet<Long>(q.getResultList()); return new HashSet<Long>(q.getResultList());
} }
private Set<Long> addPredicateId(Set<Long> theExistingPids, Set<Long> thePids) { private Set<Long> addPredicateId(Set<Long> theExistingPids, Set<Long> thePids, DateRangeParam theLastUpdated) {
if (thePids == null || thePids.isEmpty()) { if (thePids == null || thePids.isEmpty()) {
return Collections.emptySet(); return Collections.emptySet();
} }
@ -250,10 +253,12 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
Root<ResourceTable> from = cq.from(ResourceTable.class); Root<ResourceTable> from = cq.from(ResourceTable.class);
cq.select(from.get("myId").as(Long.class)); cq.select(from.get("myId").as(Long.class));
Predicate typePredicate = builder.equal(from.get("myResourceType"), myResourceName); List<Predicate> predicates = new ArrayList<Predicate>();
Predicate idPrecidate = from.get("myId").in(thePids); predicates.add(builder.equal(from.get("myResourceType"), myResourceName));
predicates.add(from.get("myId").in(thePids));
cq.where(builder.and(typePredicate, idPrecidate)); predicates.addAll(createLastUpdatedPredicates(theLastUpdated, builder, from));
cq.where(toArray(predicates));
TypedQuery<Long> q = myEntityManager.createQuery(cq); TypedQuery<Long> q = myEntityManager.createQuery(cq);
HashSet<Long> found = new HashSet<Long>(q.getResultList()); HashSet<Long> found = new HashSet<Long>(q.getResultList());
@ -269,7 +274,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
// IQueryParameterType> theList) { // IQueryParameterType> theList) {
// } // }
private Set<Long> addPredicateLanguage(Set<Long> thePids, List<List<? extends IQueryParameterType>> theList) { private Set<Long> addPredicateLanguage(Set<Long> thePids, List<List<? extends IQueryParameterType>> theList, DateRangeParam theLastUpdated) {
Set<Long> retVal = thePids; Set<Long> retVal = thePids;
if (theList == null || theList.isEmpty()) { if (theList == null || theList.isEmpty()) {
return retVal; return retVal;
@ -298,17 +303,18 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
return retVal; return retVal;
} }
Predicate typePredicate = builder.equal(from.get("myResourceType"), myResourceName); List<Predicate> predicates = new ArrayList<Predicate>();
Predicate langPredicate = from.get("myLanguage").as(String.class).in(values); predicates.add(builder.equal(from.get("myResourceType"), myResourceName));
Predicate masterCodePredicate = builder.and(typePredicate, langPredicate); predicates.add(from.get("myLanguage").as(String.class).in(values));
Predicate notDeletedPredicate = builder.isNull(from.get("myDeleted"));
if (retVal.size() > 0) { if (retVal.size() > 0) {
Predicate inPids = (from.get("myId").in(retVal)); Predicate inPids = (from.get("myId").in(retVal));
cq.where(builder.and(masterCodePredicate, inPids, notDeletedPredicate)); predicates.add(inPids);
} else { }
cq.where(builder.and(masterCodePredicate, notDeletedPredicate));
} predicates.add(builder.isNull(from.get("myDeleted")));
cq.where(toArray(predicates));
TypedQuery<Long> q = myEntityManager.createQuery(cq); TypedQuery<Long> q = myEntityManager.createQuery(cq);
retVal = new HashSet<Long>(q.getResultList()); retVal = new HashSet<Long>(q.getResultList());
@ -410,7 +416,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
} }
Predicate masterCodePredicate = builder.or(codePredicates.toArray(new Predicate[0])); Predicate masterCodePredicate = builder.or(toArray(codePredicates));
Predicate type = builder.equal(from.get("myResourceType"), myResourceName); Predicate type = builder.equal(from.get("myResourceType"), myResourceName);
Predicate name = builder.equal(from.get("myParamName"), theParamName); Predicate name = builder.equal(from.get("myParamName"), theParamName);
@ -589,7 +595,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
} }
} }
Predicate masterCodePredicate = builder.or(codePredicates.toArray(new Predicate[0])); Predicate masterCodePredicate = builder.or(toArray(codePredicates));
Predicate type = builder.equal(from.get("myResourceType"), myResourceName); Predicate type = builder.equal(from.get("myResourceType"), myResourceName);
Predicate name = builder.equal(from.get("myParamName"), theParamName); Predicate name = builder.equal(from.get("myParamName"), theParamName);
@ -743,7 +749,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
} }
Predicate masterCodePredicate = builder.or(codePredicates.toArray(new Predicate[0])); Predicate masterCodePredicate = builder.or(toArray(codePredicates));
Predicate type = createResourceLinkPathPredicate(theParamName, builder, from); Predicate type = createResourceLinkPathPredicate(theParamName, builder, from);
if (pidsToRetain.size() > 0) { if (pidsToRetain.size() > 0) {
@ -782,7 +788,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
codePredicates.add(singleCode); codePredicates.add(singleCode);
} }
Predicate masterCodePredicate = builder.or(codePredicates.toArray(new Predicate[0])); Predicate masterCodePredicate = builder.or(toArray(codePredicates));
Predicate type = builder.equal(from.get("myResourceType"), myResourceName); Predicate type = builder.equal(from.get("myResourceType"), myResourceName);
Predicate name = builder.equal(from.get("myParamName"), theParamName); Predicate name = builder.equal(from.get("myParamName"), theParamName);
@ -797,7 +803,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
return new HashSet<Long>(q.getResultList()); return new HashSet<Long>(q.getResultList());
} }
private Set<Long> addPredicateTag(Set<Long> thePids, List<List<? extends IQueryParameterType>> theList, String theParamName) { private Set<Long> addPredicateTag(Set<Long> thePids, List<List<? extends IQueryParameterType>> theList, String theParamName, DateRangeParam theLastUpdated) {
Set<Long> pids = thePids; Set<Long> pids = thePids;
if (theList == null || theList.isEmpty()) { if (theList == null || theList.isEmpty()) {
return pids; return pids;
@ -871,14 +877,17 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
} }
if (orPredicates.isEmpty() == false) { if (orPredicates.isEmpty() == false) {
andPredicates.add(builder.or(orPredicates.toArray(new Predicate[0]))); andPredicates.add(builder.or(toArray(orPredicates)));
} }
From<ResourceTag, ResourceTable> defJoin = from.join("myResource"); From<ResourceTag, ResourceTable> defJoin = from.join("myResource");
Predicate notDeletedPredicatePrediate = builder.isNull(defJoin.get("myDeleted")); Predicate notDeletedPredicatePrediate = builder.isNull(defJoin.get("myDeleted"));
andPredicates.add(notDeletedPredicatePrediate); andPredicates.add(notDeletedPredicatePrediate);
if (theLastUpdated != null) {
Predicate masterCodePredicate = builder.and(andPredicates.toArray(new Predicate[0])); andPredicates.addAll(createLastUpdatedPredicates(theLastUpdated, builder, defJoin));
}
Predicate masterCodePredicate = builder.and(toArray(andPredicates));
if (pids.size() > 0) { if (pids.size() > 0) {
Predicate inPids = (from.get("myResourceId").in(pids)); Predicate inPids = (from.get("myResourceId").in(pids));
@ -925,7 +934,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
codePredicates.add(singleCode); codePredicates.add(singleCode);
} }
Predicate masterCodePredicate = builder.or(codePredicates.toArray(new Predicate[0])); Predicate masterCodePredicate = builder.or(toArray(codePredicates));
Predicate type = builder.equal(from.get("myResourceType"), myResourceName); Predicate type = builder.equal(from.get("myResourceType"), myResourceName);
Predicate name = builder.equal(from.get("myParamName"), theParamName); Predicate name = builder.equal(from.get("myParamName"), theParamName);
@ -940,6 +949,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
return new HashSet<Long>(q.getResultList()); return new HashSet<Long>(q.getResultList());
} }
private static Predicate[] toArray(List<Predicate> thePredicates) {
return thePredicates.toArray(new Predicate[thePredicates.size()]);
}
private Set<Long> addPredicateUri(String theParamName, Set<Long> thePids, List<? extends IQueryParameterType> theList) { private Set<Long> addPredicateUri(String theParamName, Set<Long> thePids, List<? extends IQueryParameterType> theList) {
if (theList == null || theList.isEmpty()) { if (theList == null || theList.isEmpty()) {
return thePids; return thePids;
@ -978,7 +991,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
} }
Predicate masterCodePredicate = builder.or(codePredicates.toArray(new Predicate[0])); Predicate masterCodePredicate = builder.or(toArray(codePredicates));
Predicate type = builder.equal(from.get("myResourceType"), myResourceName); Predicate type = builder.equal(from.get("myResourceType"), myResourceName);
Predicate name = builder.equal(from.get("myParamName"), theParamName); Predicate name = builder.equal(from.get("myParamName"), theParamName);
@ -1224,7 +1237,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
} else { } else {
singleCodePredicates.add(theBuilder.isNull(theFrom.get("myValue"))); singleCodePredicates.add(theBuilder.isNull(theFrom.get("myValue")));
} }
Predicate singleCode = theBuilder.and(singleCodePredicates.toArray(new Predicate[0])); Predicate singleCode = theBuilder.and(toArray(singleCodePredicates));
return singleCode; return singleCode;
} }
@ -1659,87 +1672,87 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
} }
} }
@Override // @Override
public IBundleProvider everything(IIdType theId) { // public IBundleProvider everything(IIdType theId) {
Search search = new Search(); // Search search = new Search();
search.setUuid(UUID.randomUUID().toString()); // search.setUuid(UUID.randomUUID().toString());
search.setCreated(new Date()); // search.setCreated(new Date());
myEntityManager.persist(search); // myEntityManager.persist(search);
//
List<SearchResult> results = new ArrayList<SearchResult>(); // List<SearchResult> results = new ArrayList<SearchResult>();
if (theId != null) { // if (theId != null) {
Long pid = translateForcedIdToPid(theId); // Long pid = translateForcedIdToPid(theId);
ResourceTable entity = myEntityManager.find(ResourceTable.class, pid); // ResourceTable entity = myEntityManager.find(ResourceTable.class, pid);
validateGivenIdIsAppropriateToRetrieveResource(theId, entity); // validateGivenIdIsAppropriateToRetrieveResource(theId, entity);
SearchResult res = new SearchResult(search); // SearchResult res = new SearchResult(search);
res.setResourcePid(pid); // res.setResourcePid(pid);
results.add(res); // results.add(res);
} else { // } else {
TypedQuery<Tuple> query = createSearchAllByTypeQuery(); // TypedQuery<Tuple> query = createSearchAllByTypeQuery();
for (Tuple next : query.getResultList()) { // for (Tuple next : query.getResultList()) {
SearchResult res = new SearchResult(search); // SearchResult res = new SearchResult(search);
res.setResourcePid(next.get(0, Long.class)); // res.setResourcePid(next.get(0, Long.class));
results.add(res); // results.add(res);
} // }
} // }
//
int totalCount = results.size(); // int totalCount = results.size();
mySearchResultDao.save(results); // mySearchResultDao.save(results);
mySearchResultDao.flush(); // mySearchResultDao.flush();
//
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder(); // CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
//
// Load _revincludes // // Load _revincludes
CriteriaQuery<Long> cq = builder.createQuery(Long.class); // CriteriaQuery<Long> cq = builder.createQuery(Long.class);
Root<ResourceLink> from = cq.from(ResourceLink.class); // Root<ResourceLink> from = cq.from(ResourceLink.class);
cq.select(from.get("mySourceResourcePid").as(Long.class)); // cq.select(from.get("mySourceResourcePid").as(Long.class));
//
Subquery<Long> pidsSubquery = cq.subquery(Long.class); // Subquery<Long> pidsSubquery = cq.subquery(Long.class);
Root<SearchResult> pidsSubqueryFrom = pidsSubquery.from(SearchResult.class); // Root<SearchResult> pidsSubqueryFrom = pidsSubquery.from(SearchResult.class);
pidsSubquery.select(pidsSubqueryFrom.get("myResourcePid").as(Long.class)); // pidsSubquery.select(pidsSubqueryFrom.get("myResourcePid").as(Long.class));
pidsSubquery.where(pidsSubqueryFrom.get("mySearch").in(search)); // pidsSubquery.where(pidsSubqueryFrom.get("mySearch").in(search));
//
cq.where(from.get("myTargetResourceId").in(pidsSubquery)); // cq.where(from.get("myTargetResourceId").in(pidsSubquery));
TypedQuery<Long> query = myEntityManager.createQuery(cq); // TypedQuery<Long> query = myEntityManager.createQuery(cq);
//
results = new ArrayList<SearchResult>(); // results = new ArrayList<SearchResult>();
for (Long next : query.getResultList()) { // for (Long next : query.getResultList()) {
SearchResult res = new SearchResult(search); // SearchResult res = new SearchResult(search);
res.setResourcePid(next); // res.setResourcePid(next);
results.add(res); // results.add(res);
} // }
//
// Save _revincludes // // Save _revincludes
totalCount += results.size(); // totalCount += results.size();
mySearchResultDao.save(results); // mySearchResultDao.save(results);
mySearchResultDao.flush(); // mySearchResultDao.flush();
//
final int finalTotalCount = totalCount; // final int finalTotalCount = totalCount;
return new IBundleProvider() { // return new IBundleProvider() {
//
@Override // @Override
public int size() { // public int size() {
return finalTotalCount; // return finalTotalCount;
} // }
//
@Override // @Override
public Integer preferredPageSize() { // public Integer preferredPageSize() {
return null; // return null;
} // }
//
@Override // @Override
public List<IBaseResource> getResources(int theFromIndex, int theToIndex) { // public List<IBaseResource> getResources(int theFromIndex, int theToIndex) {
// TODO Auto-generated method stub // // TODO Auto-generated method stub
return null; // return null;
} // }
//
@Override // @Override
public InstantDt getPublished() { // public InstantDt getPublished() {
// TODO Auto-generated method stub // // TODO Auto-generated method stub
return null; // return null;
} // }
}; // };
} // }
/** /**
* THIS SHOULD RETURN HASHSET and not jsut Set because we add to it later (so it can't be Collections.emptySet()) * THIS SHOULD RETURN HASHSET and not jsut Set because we add to it later (so it can't be Collections.emptySet())
@ -2147,7 +2160,12 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
StopWatch w = new StopWatch(); StopWatch w = new StopWatch();
final InstantDt now = InstantDt.withCurrentTime(); final InstantDt now = InstantDt.withCurrentTime();
Set<Long> loadPids; DateRangeParam lu = theParams.getLastUpdated();
if (lu != null && lu.isEmpty()) {
lu = null;
}
Collection<Long> loadPids;
if (theParams.getEverythingMode() != null) { if (theParams.getEverythingMode() != null) {
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder(); CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
@ -2160,7 +2178,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
} }
predicates.add(builder.equal(from.get("myResourceType"), myResourceName)); predicates.add(builder.equal(from.get("myResourceType"), myResourceName));
predicates.add(builder.isNull(from.get("myDeleted"))); predicates.add(builder.isNull(from.get("myDeleted")));
cq.where(builder.and(predicates.toArray(new Predicate[predicates.size()]))); cq.where(builder.and(toArray(predicates)));
Join<Object, Object> join = from.join("myIncomingResourceLinks", JoinType.LEFT); Join<Object, Object> join = from.join("myIncomingResourceLinks", JoinType.LEFT);
cq.multiselect(from.get("myId").as(Long.class), join.get("mySourceResourcePid").as(Long.class)); cq.multiselect(from.get("myId").as(Long.class), join.get("mySourceResourcePid").as(Long.class));
@ -2178,19 +2196,37 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
} else if (theParams.isEmpty()) { } else if (theParams.isEmpty()) {
loadPids = new HashSet<Long>(); loadPids = new HashSet<Long>();
TypedQuery<Tuple> query = createSearchAllByTypeQuery(); TypedQuery<Tuple> query = createSearchAllByTypeQuery(lu);
lu = null;
for (Tuple next : query.getResultList()) { for (Tuple next : query.getResultList()) {
loadPids.add(next.get(0, Long.class)); loadPids.add(next.get(0, Long.class));
} }
if (loadPids.isEmpty()) { if (loadPids.isEmpty()) {
return new SimpleBundleProvider(); return new SimpleBundleProvider();
} }
} else {
loadPids = searchForIdsWithAndOr(theParams); } else {
List<Long> searchResultPids;
if (mySearchDao == null) {
if (theParams.containsKey(Constants.PARAM_TEXT)) {
throw new InvalidRequestException("Fulltext search is not enabled on this service, can not process parameter: " + Constants.PARAM_TEXT);
} else if (theParams.containsKey(Constants.PARAM_CONTENT)) {
throw new InvalidRequestException("Fulltext search is not enabled on this service, can not process parameter: " + Constants.PARAM_CONTENT);
}
searchResultPids = null;
} else {
searchResultPids = mySearchDao.search(getResourceName(), theParams);
}
if (theParams.isEmpty()) {
loadPids = searchResultPids;
} else {
loadPids = searchForIdsWithAndOr(theParams, searchResultPids, lu);
}
if (loadPids.isEmpty()) { if (loadPids.isEmpty()) {
return new SimpleBundleProvider(); return new SimpleBundleProvider();
} }
} }
// // Load _include and _revinclude before filter and sort in everything mode // // Load _include and _revinclude before filter and sort in everything mode
@ -2202,9 +2238,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
// } // }
// Handle _lastUpdated // Handle _lastUpdated
final DateRangeParam lu = theParams.getLastUpdated(); if (lu != null) {
if (lu != null && (lu.getLowerBoundAsInstant() != null || lu.getUpperBoundAsInstant() != null)) {
List<Long> resultList = filterResourceIdsByLastUpdated(loadPids, lu); List<Long> resultList = filterResourceIdsByLastUpdated(loadPids, lu);
loadPids.clear(); loadPids.clear();
for (Long next : resultList) { for (Long next : resultList) {
@ -2251,7 +2285,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
// Load includes // Load includes
pidsSubList = new ArrayList<Long>(pidsSubList); pidsSubList = new ArrayList<Long>(pidsSubList);
revIncludedPids.addAll(loadReverseIncludes(pidsSubList, theParams.getIncludes(), false, null, lu)); revIncludedPids.addAll(loadReverseIncludes(pidsSubList, theParams.getIncludes(), false, null, theParams.getLastUpdated()));
// Execute the query and make sure we return distinct results // Execute the query and make sure we return distinct results
List<IBaseResource> resources = new ArrayList<IBaseResource>(); List<IBaseResource> resources = new ArrayList<IBaseResource>();
@ -2279,56 +2313,71 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
return retVal; return retVal;
} }
private List<Long> filterResourceIdsByLastUpdated(Set<Long> loadPids, final DateRangeParam lu) { private List<Long> filterResourceIdsByLastUpdated(Collection<Long> thePids, final DateRangeParam theLastUpdated) {
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder(); CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Long> cq = builder.createQuery(Long.class); CriteriaQuery<Long> cq = builder.createQuery(Long.class);
Root<ResourceTable> from = cq.from(ResourceTable.class); Root<ResourceTable> from = cq.from(ResourceTable.class);
cq.select(from.get("myId").as(Long.class)); cq.select(from.get("myId").as(Long.class));
Predicate predicateIds = (from.get("myId").in(loadPids)); List<Predicate> lastUpdatedPredicates = createLastUpdatedPredicates(theLastUpdated, builder, from);
Predicate predicateLower = lu.getLowerBoundAsInstant() != null ? builder.greaterThanOrEqualTo(from.<Date> get("myUpdated"), lu.getLowerBoundAsInstant()) : null; lastUpdatedPredicates.add(0, from.get("myId").in(thePids));
Predicate predicateUpper = lu.getUpperBoundAsInstant() != null ? builder.lessThanOrEqualTo(from.<Date> get("myUpdated"), lu.getUpperBoundAsInstant()) : null;
if (predicateLower != null && predicateUpper != null) { cq.where(toArray(lastUpdatedPredicates));
cq.where(predicateIds, predicateLower, predicateUpper);
} else if (predicateLower != null) {
cq.where(predicateIds, predicateLower);
} else {
cq.where(predicateIds, predicateUpper);
}
TypedQuery<Long> query = myEntityManager.createQuery(cq); TypedQuery<Long> query = myEntityManager.createQuery(cq);
List<Long> resultList = query.getResultList(); List<Long> resultList = query.getResultList();
return resultList; return resultList;
} }
private TypedQuery<Tuple> createSearchAllByTypeQuery() { private List<Predicate> createLastUpdatedPredicates(final DateRangeParam theLastUpdated, CriteriaBuilder builder, From<?, ResourceTable> from) {
List<Predicate> lastUpdatedPredicates = new ArrayList<Predicate>();
if (theLastUpdated != null) {
if (theLastUpdated.getLowerBoundAsInstant() != null) {
Predicate predicateLower = builder.greaterThanOrEqualTo(from.<Date> get("myUpdated"), theLastUpdated.getLowerBoundAsInstant());
lastUpdatedPredicates.add(predicateLower);
}
if (theLastUpdated.getUpperBoundAsInstant() != null) {
Predicate predicateUpper = builder.lessThanOrEqualTo(from.<Date> get("myUpdated"), theLastUpdated.getUpperBoundAsInstant());
lastUpdatedPredicates.add(predicateUpper);
}
}
return lastUpdatedPredicates;
}
private TypedQuery<Tuple> createSearchAllByTypeQuery(DateRangeParam theLastUpdated) {
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder(); CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Tuple> cq = builder.createTupleQuery(); CriteriaQuery<Tuple> cq = builder.createTupleQuery();
Root<ResourceTable> from = cq.from(ResourceTable.class); Root<ResourceTable> from = cq.from(ResourceTable.class);
cq.multiselect(from.get("myId").as(Long.class)); cq.multiselect(from.get("myId").as(Long.class));
Predicate typeEquals = builder.equal(from.get("myResourceType"), myResourceName); List<Predicate> predicates = new ArrayList<Predicate>();
Predicate notDeleted = builder.isNull(from.get("myDeleted")); predicates.add(builder.equal(from.get("myResourceType"), myResourceName));
cq.where(builder.and(typeEquals, notDeleted)); predicates.add(builder.isNull(from.get("myDeleted")));
if (theLastUpdated != null) {
predicates.addAll(createLastUpdatedPredicates(theLastUpdated, builder, from));
}
cq.where(toArray(predicates));
TypedQuery<Tuple> query = myEntityManager.createQuery(cq); TypedQuery<Tuple> query = myEntityManager.createQuery(cq);
return query; return query;
} }
private List<Long> processSort(final SearchParameterMap theParams, Set<Long> theLoadPids) { private List<Long> processSort(final SearchParameterMap theParams, Collection<Long> theLoadPids) {
final List<Long> pids; final List<Long> pids;
Set<Long> loadPids = theLoadPids; // Set<Long> loadPids = theLoadPids;
if (theParams.getSort() != null && isNotBlank(theParams.getSort().getParamName())) { if (theParams.getSort() != null && isNotBlank(theParams.getSort().getParamName())) {
List<Order> orders = new ArrayList<Order>(); List<Order> orders = new ArrayList<Order>();
List<Predicate> predicates = new ArrayList<Predicate>(); List<Predicate> predicates = new ArrayList<Predicate>();
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder(); CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Tuple> cq = builder.createTupleQuery(); CriteriaQuery<Tuple> cq = builder.createTupleQuery();
Root<ResourceTable> from = cq.from(ResourceTable.class); Root<ResourceTable> from = cq.from(ResourceTable.class);
predicates.add(from.get("myId").in(loadPids)); predicates.add(from.get("myId").in(theLoadPids));
createSort(builder, from, theParams.getSort(), orders, predicates); createSort(builder, from, theParams.getSort(), orders, predicates);
if (orders.size() > 0) { if (orders.size() > 0) {
Set<Long> originalPids = loadPids; Collection<Long> originalPids = theLoadPids;
loadPids = new LinkedHashSet<Long>(); LinkedHashSet<Long> loadPids = new LinkedHashSet<Long>();
cq.multiselect(from.get("myId").as(Long.class)); cq.multiselect(from.get("myId").as(Long.class));
cq.where(predicates.toArray(new Predicate[0])); cq.where(toArray(predicates));
cq.orderBy(orders); cq.orderBy(orders);
TypedQuery<Tuple> query = myEntityManager.createQuery(cq); TypedQuery<Tuple> query = myEntityManager.createQuery(cq);
@ -2349,10 +2398,20 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
} }
} else { } else {
pids = new ArrayList<Long>(loadPids); pids = toList(theLoadPids);
} }
} else { } else {
pids = new ArrayList<Long>(loadPids); pids = toList(theLoadPids);
}
return pids;
}
private List<Long> toList(Collection<Long> theLoadPids) {
final List<Long> pids;
if (theLoadPids instanceof List) {
pids = (List<Long>) theLoadPids;
} else {
pids = new ArrayList<Long>(theLoadPids);
} }
return pids; return pids;
} }
@ -2368,7 +2427,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
for (Entry<String, IQueryParameterType> nextEntry : theParams.entrySet()) { for (Entry<String, IQueryParameterType> nextEntry : theParams.entrySet()) {
map.add(nextEntry.getKey(), (nextEntry.getValue())); map.add(nextEntry.getKey(), (nextEntry.getValue()));
} }
return searchForIdsWithAndOr(map); return searchForIdsWithAndOr(map, null, null);
} }
@Override @Override
@ -2377,7 +2436,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
} }
@Override @Override
public Set<Long> searchForIdsWithAndOr(SearchParameterMap theParams) { public Set<Long> searchForIdsWithAndOr(SearchParameterMap theParams, Collection<Long> theInitialPids, DateRangeParam theLastUpdated) {
SearchParameterMap params = theParams; SearchParameterMap params = theParams;
if (params == null) { if (params == null) {
params = new SearchParameterMap(); params = new SearchParameterMap();
@ -2386,6 +2445,9 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
RuntimeResourceDefinition resourceDef = getContext().getResourceDefinition(myResourceType); RuntimeResourceDefinition resourceDef = getContext().getResourceDefinition(myResourceType);
Set<Long> pids = new HashSet<Long>(); Set<Long> pids = new HashSet<Long>();
if (theInitialPids != null) {
pids.addAll(theInitialPids);
}
for (Entry<String, List<List<? extends IQueryParameterType>>> nextParamEntry : params.entrySet()) { for (Entry<String, List<List<? extends IQueryParameterType>>> nextParamEntry : params.entrySet()) {
String nextParamName = nextParamEntry.getKey(); String nextParamName = nextParamEntry.getKey();
@ -2418,7 +2480,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
} }
} }
pids = addPredicateId(pids, joinPids); pids = addPredicateId(pids, joinPids, theLastUpdated);
if (pids.isEmpty()) { if (pids.isEmpty()) {
return new HashSet<Long>(); return new HashSet<Long>();
} }
@ -2433,11 +2495,11 @@ public abstract class BaseHapiFhirResourceDao<T extends IResource> extends BaseH
} else if (nextParamName.equals(BaseResource.SP_RES_LANGUAGE)) { } else if (nextParamName.equals(BaseResource.SP_RES_LANGUAGE)) {
pids = addPredicateLanguage(pids, nextParamEntry.getValue()); pids = addPredicateLanguage(pids, nextParamEntry.getValue(), theLastUpdated);
} else if (nextParamName.equals(Constants.PARAM_TAG) || nextParamName.equals(Constants.PARAM_PROFILE) || nextParamName.equals(Constants.PARAM_SECURITY)) { } else if (nextParamName.equals(Constants.PARAM_TAG) || nextParamName.equals(Constants.PARAM_PROFILE) || nextParamName.equals(Constants.PARAM_SECURITY)) {
pids = addPredicateTag(pids, nextParamEntry.getValue(), nextParamName); pids = addPredicateTag(pids, nextParamEntry.getValue(), nextParamName, theLastUpdated);
} else { } else {

View File

@ -92,6 +92,9 @@ public abstract class BaseHapiFhirSystemDao<T> extends BaseHapiFhirDao<IBaseReso
q.setMaxResults(maxResult); q.setMaxResults(maxResult);
List<ResourceTable> resources = q.getResultList(); List<ResourceTable> resources = q.getResultList();
if (resources.isEmpty()) {
return 0;
}
ourLog.info("Indexing {} resources", resources.size()); ourLog.info("Indexing {} resources", resources.size());

View File

@ -0,0 +1,28 @@
package ca.uhn.fhir.jpa.dao;
import org.apache.commons.lang3.time.DateUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import ca.uhn.fhir.model.dstu2.resource.Bundle;
import ca.uhn.fhir.model.dstu2.resource.SearchParameter;
public class FhirResourceDaoSearchParameterDstu2 extends FhirResourceDaoDstu2<SearchParameter>implements IFhirResourceDaoSearchParameter<SearchParameter> {
@Autowired
private IFhirSystemDao<Bundle> mySystemDao;
/**
* This method is called once per minute to perform any required re-indexing. During most passes this will
* just check and find that there are no resources requiring re-indexing. In that case the method just returns
* immediately. If the search finds that some resources require reindexing, the system will do a bunch of
* reindexing and then return.
*/
@Scheduled(fixedDelay=DateUtils.MILLIS_PER_MINUTE)
public void performReindexingPass() {
mySystemDao.performReindexingPass(250);
}
}

View File

@ -28,7 +28,6 @@ import java.util.Date;
import java.util.List; import java.util.List;
import javax.persistence.Query; import javax.persistence.Query;
import javax.persistence.TypedQuery;
import org.apache.commons.lang3.time.DateUtils; import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
@ -77,6 +76,9 @@ public class FhirResourceDaoSubscriptionDstu2 extends FhirResourceDaoDstu2<Subsc
@Autowired @Autowired
private ISubscriptionTableDao mySubscriptionTableDao; private ISubscriptionTableDao mySubscriptionTableDao;
@Autowired
private PlatformTransactionManager myTxManager;
private void createSubscriptionTable(ResourceTable theEntity, Subscription theSubscription) { private void createSubscriptionTable(ResourceTable theEntity, Subscription theSubscription) {
SubscriptionTable subscriptionEntity = new SubscriptionTable(); SubscriptionTable subscriptionEntity = new SubscriptionTable();
subscriptionEntity.setCreated(new Date()); subscriptionEntity.setCreated(new Date());
@ -87,170 +89,16 @@ public class FhirResourceDaoSubscriptionDstu2 extends FhirResourceDaoDstu2<Subsc
myEntityManager.persist(subscriptionEntity); myEntityManager.persist(subscriptionEntity);
} }
@Autowired
private PlatformTransactionManager myTxManager;
@Scheduled(fixedDelay = 10 * DateUtils.MILLIS_PER_SECOND)
@Transactional(propagation = Propagation.NOT_SUPPORTED)
@Override @Override
public synchronized void pollForNewUndeliveredResources() { public Long getSubscriptionTablePidForSubscriptionResource(IIdType theId) {
if (getConfig().isSubscriptionEnabled() == false) { ResourceTable entity = readEntityLatestVersion(theId);
return; SubscriptionTable table = mySubscriptionTableDao.findOneByResourcePid(entity.getId());
} if (table == null) {
ourLog.trace("Beginning pollForNewUndeliveredResources()"); return null;
// SubscriptionCandidateResource
Collection<Long> subscriptions = mySubscriptionTableDao.finsSubscriptionsWhichNeedToBeChecked(SubscriptionStatusEnum.ACTIVE, new Date());
TransactionTemplate txTemplate = new TransactionTemplate(myTxManager);
txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
for (final Long nextSubscriptionTablePid : subscriptions) {
txTemplate.execute(new TransactionCallback<Void>() {
@Override
public Void doInTransaction(TransactionStatus theStatus) {
SubscriptionTable nextSubscriptionTable = mySubscriptionTableDao.findOne(nextSubscriptionTablePid);
pollForNewUndeliveredResources(nextSubscriptionTable);
return null;
}
});
} }
return table.getId();
} }
private void pollForNewUndeliveredResources(SubscriptionTable theSubscriptionTable) {
Subscription subscription = toResource(Subscription.class, theSubscriptionTable.getSubscriptionResource(), false);
RuntimeResourceDefinition resourceDef = validateCriteriaAndReturnResourceDefinition(subscription);
SearchParameterMap criteriaUrl = translateMatchUrl(subscription.getCriteria(), resourceDef);
criteriaUrl = new SearchParameterMap();
long start = theSubscriptionTable.getMostRecentMatch().getTime();
long end = System.currentTimeMillis() - getConfig().getSubscriptionPollDelay();
if (end <= start) {
ourLog.trace("Skipping search for subscription");
return;
}
ourLog.debug("Subscription {} search from {} to {}", new Object[] { subscription.getId().getIdPart(), new InstantDt(new Date(start)), new InstantDt(new Date(end)) });
DateRangeParam range = new DateRangeParam();
range.setLowerBound(new DateParam(QuantityCompararatorEnum.GREATERTHAN, start));
range.setUpperBound(new DateParam(QuantityCompararatorEnum.LESSTHAN, end));
criteriaUrl.setLastUpdated(range);
criteriaUrl.setSort(new SortSpec(Constants.PARAM_LASTUPDATED, SortOrderEnum.ASC));
IFhirResourceDao<? extends IBaseResource> dao = getDao(resourceDef.getImplementingClass());
IBundleProvider results = dao.search(criteriaUrl);
if (results.size() == 0) {
return;
}
ourLog.info("Found {} new results for Subscription {}", results.size(), subscription.getId().getIdPart());
List<SubscriptionFlaggedResource> flags = new ArrayList<SubscriptionFlaggedResource>();
Date mostRecentMatch = null;
for (IBaseResource next : results.getResources(0, results.size())) {
Date updated = ResourceMetadataKeyEnum.PUBLISHED.get((IResource) next).getValue();
if (mostRecentMatch == null || mostRecentMatch.getTime() < updated.getTime()) {
mostRecentMatch = updated;
}
SubscriptionFlaggedResource nextFlag = new SubscriptionFlaggedResource();
Long pid = IDao.RESOURCE_PID.get((IResource) next);
nextFlag.setResource(myEntityManager.find(ResourceTable.class, pid));
nextFlag.setSubscription(theSubscriptionTable);
nextFlag.setVersion(next.getIdElement().getVersionIdPartAsLong());
flags.add(nextFlag);
}
mySubscriptionFlaggedResourceDataDao.save(flags);
ourLog.debug("Updating most recent match for subcription {} to {}", subscription.getId().getIdPart(), new InstantDt(mostRecentMatch));
theSubscriptionTable.setMostRecentMatch(mostRecentMatch);
myEntityManager.merge(theSubscriptionTable);
}
@Override
protected void postPersist(ResourceTable theEntity, Subscription theSubscription) {
super.postPersist(theEntity, theSubscription);
createSubscriptionTable(theEntity, theSubscription);
}
@Override
protected ResourceTable updateEntity(IResource theResource, ResourceTable theEntity, boolean theUpdateHistory, Date theDeletedTimestampOrNull, boolean thePerformIndexing, boolean theUpdateVersion,
Date theUpdateTime) {
ResourceTable retVal = super.updateEntity(theResource, theEntity, theUpdateHistory, theDeletedTimestampOrNull, thePerformIndexing, theUpdateVersion, theUpdateTime);
Subscription resource = (Subscription) theResource;
Long resourceId = theEntity.getId();
if (theDeletedTimestampOrNull != null) {
Long subscriptionId = getSubscriptionTablePidForSubscriptionResource(theEntity.getIdDt());
if (subscriptionId != null) {
mySubscriptionFlaggedResourceDataDao.deleteAllForSubscription(subscriptionId);
mySubscriptionTableDao.deleteAllForSubscription(subscriptionId);
}
} else {
Query q = myEntityManager.createNamedQuery("Q_HFJ_SUBSCRIPTION_SET_STATUS");
q.setParameter("res_id", resourceId);
q.setParameter("status", resource.getStatusElement().getValueAsEnum());
if (q.executeUpdate() > 0) {
ourLog.info("Updated subscription status for subscription {} to {}", resourceId, resource.getStatusElement().getValueAsEnum());
} else {
createSubscriptionTable(retVal, resource);
}
}
return retVal;
}
@Override
protected void validateResourceForStorage(Subscription theResource, ResourceTable theEntityToSave) {
super.validateResourceForStorage(theResource, theEntityToSave);
RuntimeResourceDefinition resDef = validateCriteriaAndReturnResourceDefinition(theResource);
IFhirResourceDao<? extends IBaseResource> dao = getDao(resDef.getImplementingClass());
if (dao == null) {
throw new UnprocessableEntityException("Subscription.criteria contains invalid/unsupported resource type: " + resDef);
}
if (theResource.getChannel().getType() == null) {
throw new UnprocessableEntityException("Subscription.channel.type must be populated on this server");
}
SubscriptionStatusEnum status = theResource.getStatusElement().getValueAsEnum();
if (status == null) {
throw new UnprocessableEntityException("Subscription.status must be populated on this server");
}
}
private RuntimeResourceDefinition validateCriteriaAndReturnResourceDefinition(Subscription theResource) {
String query = theResource.getCriteria();
if (isBlank(query)) {
throw new UnprocessableEntityException("Subscription.criteria must be populated");
}
int sep = query.indexOf('?');
if (sep <= 1) {
throw new UnprocessableEntityException("Subscription.criteria must be in the form \"{Resource Type}?[params]\"");
}
String resType = query.substring(0, sep);
if (resType.contains("/")) {
throw new UnprocessableEntityException("Subscription.criteria must be in the form \"{Resource Type}?[params]\"");
}
RuntimeResourceDefinition resDef;
try {
resDef = getContext().getResourceDefinition(resType);
} catch (DataFormatException e) {
throw new UnprocessableEntityException("Subscription.criteria contains invalid/unsupported resource type: " + resType);
}
return resDef;
}
@Override @Override
public synchronized List<IBaseResource> getUndeliveredResourcesAndPurge(Long theSubscriptionPid) { public synchronized List<IBaseResource> getUndeliveredResourcesAndPurge(Long theSubscriptionPid) {
List<IBaseResource> retVal = new ArrayList<IBaseResource>(); List<IBaseResource> retVal = new ArrayList<IBaseResource>();
@ -268,13 +116,109 @@ public class FhirResourceDaoSubscriptionDstu2 extends FhirResourceDaoDstu2<Subsc
} }
@Override @Override
public Long getSubscriptionTablePidForSubscriptionResource(IIdType theId) { @Transactional(propagation = Propagation.NOT_SUPPORTED)
ResourceTable entity = readEntityLatestVersion(theId); public synchronized int pollForNewUndeliveredResources() {
SubscriptionTable table = mySubscriptionTableDao.findOneByResourcePid(entity.getId()); if (getConfig().isSubscriptionEnabled() == false) {
if (table == null) { return 0;
return null;
} }
return table.getId(); ourLog.trace("Beginning pollForNewUndeliveredResources()");
// SubscriptionCandidateResource
Collection<Long> subscriptions = mySubscriptionTableDao.finsSubscriptionsWhichNeedToBeChecked(SubscriptionStatusEnum.ACTIVE, new Date());
TransactionTemplate txTemplate = new TransactionTemplate(myTxManager);
txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
int retVal = 0;
for (final Long nextSubscriptionTablePid : subscriptions) {
retVal += txTemplate.execute(new TransactionCallback<Integer>() {
@Override
public Integer doInTransaction(TransactionStatus theStatus) {
SubscriptionTable nextSubscriptionTable = mySubscriptionTableDao.findOne(nextSubscriptionTablePid);
return pollForNewUndeliveredResources(nextSubscriptionTable);
}
});
}
return retVal;
}
private int pollForNewUndeliveredResources(SubscriptionTable theSubscriptionTable) {
Subscription subscription = toResource(Subscription.class, theSubscriptionTable.getSubscriptionResource(), false);
RuntimeResourceDefinition resourceDef = validateCriteriaAndReturnResourceDefinition(subscription);
SearchParameterMap criteriaUrl = translateMatchUrl(subscription.getCriteria(), resourceDef);
criteriaUrl = new SearchParameterMap();
long start = theSubscriptionTable.getMostRecentMatch().getTime();
long end = System.currentTimeMillis() - getConfig().getSubscriptionPollDelay();
if (end <= start) {
ourLog.trace("Skipping search for subscription");
return 0;
}
ourLog.debug("Subscription {} search from {} to {}", new Object[] { subscription.getId().getIdPart(), new InstantDt(new Date(start)), new InstantDt(new Date(end)) });
DateRangeParam range = new DateRangeParam();
range.setLowerBound(new DateParam(QuantityCompararatorEnum.GREATERTHAN, start));
range.setUpperBound(new DateParam(QuantityCompararatorEnum.LESSTHAN, end));
criteriaUrl.setLastUpdated(range);
criteriaUrl.setSort(new SortSpec(Constants.PARAM_LASTUPDATED, SortOrderEnum.ASC));
IFhirResourceDao<? extends IBaseResource> dao = getDao(resourceDef.getImplementingClass());
IBundleProvider results = dao.search(criteriaUrl);
if (results.size() == 0) {
return 0;
}
ourLog.info("Found {} new results for Subscription {}", results.size(), subscription.getId().getIdPart());
List<SubscriptionFlaggedResource> flags = new ArrayList<SubscriptionFlaggedResource>();
Date mostRecentMatch = null;
for (IBaseResource next : results.getResources(0, results.size())) {
Date updated = ResourceMetadataKeyEnum.UPDATED.get((IResource) next).getValue();
if (mostRecentMatch == null) {
mostRecentMatch = updated;
} else {
long mostRecentMatchTime = mostRecentMatch.getTime();
long updatedTime = updated.getTime();
if (mostRecentMatchTime < updatedTime) {
mostRecentMatch = updated;
}
}
SubscriptionFlaggedResource nextFlag = new SubscriptionFlaggedResource();
Long pid = IDao.RESOURCE_PID.get((IResource) next);
ourLog.info("New resource for subscription: {}", pid);
nextFlag.setResource(myEntityManager.find(ResourceTable.class, pid));
nextFlag.setSubscription(theSubscriptionTable);
nextFlag.setVersion(next.getIdElement().getVersionIdPartAsLong());
flags.add(nextFlag);
}
mySubscriptionFlaggedResourceDataDao.save(flags);
ourLog.debug("Updating most recent match for subcription {} to {}", subscription.getId().getIdPart(), new InstantDt(mostRecentMatch));
theSubscriptionTable.setMostRecentMatch(mostRecentMatch);
mySubscriptionTableDao.save(theSubscriptionTable);
return results.size();
}
@Scheduled(fixedDelay = 10 * DateUtils.MILLIS_PER_SECOND)
@Transactional(propagation = Propagation.NOT_SUPPORTED)
public synchronized void pollForNewUndeliveredResourcesScheduler() {
pollForNewUndeliveredResources();
}
@Override
protected void postPersist(ResourceTable theEntity, Subscription theSubscription) {
super.postPersist(theEntity, theSubscription);
createSubscriptionTable(theEntity, theSubscription);
} }
@Scheduled(fixedDelay = DateUtils.MILLIS_PER_MINUTE) @Scheduled(fixedDelay = DateUtils.MILLIS_PER_MINUTE)
@ -305,4 +249,77 @@ public class FhirResourceDaoSubscriptionDstu2 extends FhirResourceDaoDstu2<Subsc
} }
} }
@Override
protected ResourceTable updateEntity(IResource theResource, ResourceTable theEntity, boolean theUpdateHistory, Date theDeletedTimestampOrNull, boolean thePerformIndexing, boolean theUpdateVersion,
Date theUpdateTime) {
ResourceTable retVal = super.updateEntity(theResource, theEntity, theUpdateHistory, theDeletedTimestampOrNull, thePerformIndexing, theUpdateVersion, theUpdateTime);
Subscription resource = (Subscription) theResource;
Long resourceId = theEntity.getId();
if (theDeletedTimestampOrNull != null) {
Long subscriptionId = getSubscriptionTablePidForSubscriptionResource(theEntity.getIdDt());
if (subscriptionId != null) {
mySubscriptionFlaggedResourceDataDao.deleteAllForSubscription(subscriptionId);
mySubscriptionTableDao.deleteAllForSubscription(subscriptionId);
}
} else {
Query q = myEntityManager.createNamedQuery("Q_HFJ_SUBSCRIPTION_SET_STATUS");
q.setParameter("res_id", resourceId);
q.setParameter("status", resource.getStatusElement().getValueAsEnum());
if (q.executeUpdate() > 0) {
ourLog.info("Updated subscription status for subscription {} to {}", resourceId, resource.getStatusElement().getValueAsEnum());
} else {
createSubscriptionTable(retVal, resource);
}
}
return retVal;
}
private RuntimeResourceDefinition validateCriteriaAndReturnResourceDefinition(Subscription theResource) {
String query = theResource.getCriteria();
if (isBlank(query)) {
throw new UnprocessableEntityException("Subscription.criteria must be populated");
}
int sep = query.indexOf('?');
if (sep <= 1) {
throw new UnprocessableEntityException("Subscription.criteria must be in the form \"{Resource Type}?[params]\"");
}
String resType = query.substring(0, sep);
if (resType.contains("/")) {
throw new UnprocessableEntityException("Subscription.criteria must be in the form \"{Resource Type}?[params]\"");
}
RuntimeResourceDefinition resDef;
try {
resDef = getContext().getResourceDefinition(resType);
} catch (DataFormatException e) {
throw new UnprocessableEntityException("Subscription.criteria contains invalid/unsupported resource type: " + resType);
}
return resDef;
}
@Override
protected void validateResourceForStorage(Subscription theResource, ResourceTable theEntityToSave) {
super.validateResourceForStorage(theResource, theEntityToSave);
RuntimeResourceDefinition resDef = validateCriteriaAndReturnResourceDefinition(theResource);
IFhirResourceDao<? extends IBaseResource> dao = getDao(resDef.getImplementingClass());
if (dao == null) {
throw new UnprocessableEntityException("Subscription.criteria contains invalid/unsupported resource type: " + resDef);
}
if (theResource.getChannel().getType() == null) {
throw new UnprocessableEntityException("Subscription.channel.type must be populated on this server");
}
SubscriptionStatusEnum status = theResource.getStatusElement().getValueAsEnum();
if (status == null) {
throw new UnprocessableEntityException("Subscription.status must be populated on this server");
}
}
} }

View File

@ -0,0 +1,98 @@
package ca.uhn.fhir.jpa.dao;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType;
import org.apache.commons.lang3.StringUtils;
import org.apache.lucene.search.Query;
import org.hibernate.search.jpa.FullTextEntityManager;
import org.hibernate.search.jpa.FullTextQuery;
import org.hibernate.search.query.dsl.BooleanJunction;
import org.hibernate.search.query.dsl.QueryBuilder;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.transaction.annotation.Transactional;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.server.Constants;
public class FhirSearchDao extends BaseHapiFhirDao<IBaseResource> implements ISearchDao {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirSearchDao.class);
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
private EntityManager myEntityManager;
@Transactional()
@Override
public List<Long> search(String theResourceName, SearchParameterMap theParams) {
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
QueryBuilder qb = em
.getSearchFactory()
.buildQueryBuilder()
.forEntity(ResourceTable.class).get();
BooleanJunction<?> bool = qb.bool();
List<List<? extends IQueryParameterType>> contentAndTerms = theParams.remove(Constants.PARAM_CONTENT);
addTextSearch(qb, bool, contentAndTerms, "myContentText");
List<List<? extends IQueryParameterType>> textAndTerms = theParams.remove(Constants.PARAM_TEXT);
addTextSearch(qb, bool, textAndTerms, "myNarrativeText");
if (bool.isEmpty()) {
return null;
}
if (isNotBlank(theResourceName)) {
bool.must(qb.keyword().onField("myResourceType").matching(theResourceName).createQuery());
}
Query luceneQuery = bool.createQuery();
// wrap Lucene query in a javax.persistence.Query
FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, ResourceTable.class);
jpaQuery.setProjection("myId");
// execute search
List<?> result = jpaQuery.getResultList();
ArrayList<Long> retVal = new ArrayList<Long>();
for (Object object : result) {
Object[] nextArray = (Object[]) object;
retVal.add((Long)nextArray[0]);
}
return retVal;
}
private void addTextSearch(QueryBuilder qb, BooleanJunction<?> bool, List<List<? extends IQueryParameterType>> contentAndTerms, String field) {
if (contentAndTerms == null) {
return;
}
for (List<? extends IQueryParameterType> nextAnd : contentAndTerms) {
Set<String> terms = new HashSet<String>();
for (IQueryParameterType nextOr : nextAnd) {
StringParam nextOrString = (StringParam) nextOr;
String nextValueTrimmed = StringUtils.defaultString(nextOrString.getValue()).trim();
if (isNotBlank(nextValueTrimmed)) {
terms.add(nextValueTrimmed);
}
}
if (terms.isEmpty() == false) {
String joinedTerms = StringUtils.join(terms, ' ');
bool.must(qb.keyword().onField(field).matching(joinedTerms).createQuery());
}
}
}
}

View File

@ -1,5 +1,7 @@
package ca.uhn.fhir.jpa.dao; package ca.uhn.fhir.jpa.dao;
import java.util.Collection;
/* /*
* #%L * #%L
* HAPI FHIR JPA Server * HAPI FHIR JPA Server
@ -34,6 +36,7 @@ import ca.uhn.fhir.model.api.TagList;
import ca.uhn.fhir.model.dstu2.composite.MetaDt; import ca.uhn.fhir.model.dstu2.composite.MetaDt;
import ca.uhn.fhir.rest.api.MethodOutcome; import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.ValidationModeEnum; import ca.uhn.fhir.rest.api.ValidationModeEnum;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.server.EncodingEnum; import ca.uhn.fhir.rest.server.EncodingEnum;
import ca.uhn.fhir.rest.server.IBundleProvider; import ca.uhn.fhir.rest.server.IBundleProvider;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
@ -121,7 +124,7 @@ public interface IFhirResourceDao<T extends IBaseResource> extends IDao {
Set<Long> searchForIds(String theParameterName, IQueryParameterType theValue); Set<Long> searchForIds(String theParameterName, IQueryParameterType theValue);
Set<Long> searchForIdsWithAndOr(SearchParameterMap theParams); Set<Long> searchForIdsWithAndOr(SearchParameterMap theParams, Collection<Long> theInitialPids, DateRangeParam theLastUpdated);
DaoMethodOutcome update(T theResource); DaoMethodOutcome update(T theResource);
@ -144,9 +147,9 @@ public interface IFhirResourceDao<T extends IBaseResource> extends IDao {
*/ */
DaoMethodOutcome deleteByUrl(String theUrl, boolean theTransaction); DaoMethodOutcome deleteByUrl(String theUrl, boolean theTransaction);
/** // /**
* Invoke the everything operation // * Invoke the everything operation
*/ // */
IBundleProvider everything(IIdType theId); // IBundleProvider everything(IIdType theId);
} }

View File

@ -0,0 +1,27 @@
package ca.uhn.fhir.jpa.dao;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2015 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.hl7.fhir.instance.model.api.IBaseResource;
public interface IFhirResourceDaoSearchParameter<T extends IBaseResource> extends IFhirResourceDao<T> {
// nothing yet..
}

View File

@ -27,7 +27,7 @@ import org.hl7.fhir.instance.model.api.IIdType;
public interface IFhirResourceDaoSubscription<T extends IBaseResource> extends IFhirResourceDao<T> { public interface IFhirResourceDaoSubscription<T extends IBaseResource> extends IFhirResourceDao<T> {
void pollForNewUndeliveredResources(); int pollForNewUndeliveredResources();
List<IBaseResource> getUndeliveredResourcesAndPurge(Long theSubscriptionPid); List<IBaseResource> getUndeliveredResourcesAndPurge(Long theSubscriptionPid);

View File

@ -0,0 +1,9 @@
package ca.uhn.fhir.jpa.dao;
import java.util.List;
public interface ISearchDao {
List<Long> search(String theResourceName, SearchParameterMap theParams);
}

View File

@ -46,7 +46,6 @@ public class SearchParameterMap extends LinkedHashMap<String, List<List<? extend
private Set<Include> myIncludes; private Set<Include> myIncludes;
private DateRangeParam myLastUpdated; private DateRangeParam myLastUpdated;
private Set<Include> myRevIncludes; private Set<Include> myRevIncludes;
private SortSpec mySort; private SortSpec mySort;
public void add(String theName, IQueryParameterAnd<?> theAnd) { public void add(String theName, IQueryParameterAnd<?> theAnd) {

View File

@ -30,6 +30,9 @@ import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne; import javax.persistence.ManyToOne;
import javax.persistence.MappedSuperclass; import javax.persistence.MappedSuperclass;
import org.hibernate.search.annotations.ContainedIn;
import org.hibernate.search.annotations.Field;
@MappedSuperclass @MappedSuperclass
public abstract class BaseResourceIndexedSearchParam implements Serializable { public abstract class BaseResourceIndexedSearchParam implements Serializable {
@ -42,16 +45,20 @@ public abstract class BaseResourceIndexedSearchParam implements Serializable {
@Column(name = "SP_ID") @Column(name = "SP_ID")
private Long myId; private Long myId;
@Field
@Column(name = "SP_NAME", length = MAX_SP_NAME, nullable=false) @Column(name = "SP_NAME", length = MAX_SP_NAME, nullable=false)
private String myParamName; private String myParamName;
@ManyToOne(optional = false) @ManyToOne(optional = false)
@JoinColumn(name = "RES_ID", referencedColumnName="RES_ID") @JoinColumn(name = "RES_ID", referencedColumnName="RES_ID")
@ContainedIn
private ResourceTable myResource; private ResourceTable myResource;
@Field
@Column(name = "RES_ID", insertable = false, updatable = false) @Column(name = "RES_ID", insertable = false, updatable = false)
private Long myResourcePid; private Long myResourcePid;
@Field
@Column(name = "RES_TYPE", nullable=false) @Column(name = "RES_TYPE", nullable=false)
private String myResourceType; private String myResourceType;

View File

@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.entity;
import java.io.Serializable; import java.io.Serializable;
import javax.persistence.Column; import javax.persistence.Column;
import javax.persistence.Embeddable;
import javax.persistence.Entity; import javax.persistence.Entity;
import javax.persistence.GeneratedValue; import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType; import javax.persistence.GenerationType;
@ -31,6 +32,7 @@ import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne; import javax.persistence.ManyToOne;
import javax.persistence.Table; import javax.persistence.Table;
@Embeddable
@Entity @Entity
@Table(name = "HFJ_HISTORY_TAG") @Table(name = "HFJ_HISTORY_TAG")
public class ResourceHistoryTag extends BaseTag implements Serializable { public class ResourceHistoryTag extends BaseTag implements Serializable {

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.entity;
*/ */
import javax.persistence.Column; import javax.persistence.Column;
import javax.persistence.Embeddable;
import javax.persistence.Entity; import javax.persistence.Entity;
import javax.persistence.Table; import javax.persistence.Table;
@ -28,8 +29,10 @@ import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle; import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.search.annotations.Field;
//@formatter:off //@formatter:off
@Embeddable
@Entity @Entity
@Table(name = "HFJ_SPIDX_COORDS" /* , indexes = { @Index(name = "IDX_SP_TOKEN", columnList = "SP_SYSTEM,SP_VALUE") } */) @Table(name = "HFJ_SPIDX_COORDS" /* , indexes = { @Index(name = "IDX_SP_TOKEN", columnList = "SP_SYSTEM,SP_VALUE") } */)
@org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_COORDS", indexes = { @org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_COORDS", indexes = {
@ -43,9 +46,11 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
@Column(name = "SP_LATITUDE") @Column(name = "SP_LATITUDE")
@Field
public double myLatitude; public double myLatitude;
@Column(name = "SP_LONGITUDE") @Column(name = "SP_LONGITUDE")
@Field
public double myLongitude; public double myLongitude;
public ResourceIndexedSearchParamCoords() { public ResourceIndexedSearchParamCoords() {

View File

@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.entity;
import java.util.Date; import java.util.Date;
import javax.persistence.Column; import javax.persistence.Column;
import javax.persistence.Embeddable;
import javax.persistence.Entity; import javax.persistence.Entity;
import javax.persistence.Table; import javax.persistence.Table;
import javax.persistence.Temporal; import javax.persistence.Temporal;
@ -32,8 +33,10 @@ import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle; import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.search.annotations.Field;
//@formatter:off //@formatter:off
@Embeddable
@Entity @Entity
@Table(name = "HFJ_SPIDX_DATE" /*, indexes= {@Index(name="IDX_SP_DATE", columnList= "SP_VALUE_LOW,SP_VALUE_HIGH")}*/) @Table(name = "HFJ_SPIDX_DATE" /*, indexes= {@Index(name="IDX_SP_DATE", columnList= "SP_VALUE_LOW,SP_VALUE_HIGH")}*/)
@org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_DATE", indexes= { @org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_DATE", indexes= {
@ -46,15 +49,23 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
@Column(name = "SP_VALUE_HIGH", nullable = true) @Column(name = "SP_VALUE_HIGH", nullable = true)
@Temporal(TemporalType.TIMESTAMP) @Temporal(TemporalType.TIMESTAMP)
@Field
public Date myValueHigh; public Date myValueHigh;
@Column(name = "SP_VALUE_LOW", nullable = true) @Column(name = "SP_VALUE_LOW", nullable = true)
@Temporal(TemporalType.TIMESTAMP) @Temporal(TemporalType.TIMESTAMP)
@Field
public Date myValueLow; public Date myValueLow;
/**
* Constructor
*/
public ResourceIndexedSearchParamDate() { public ResourceIndexedSearchParamDate() {
} }
/**
* Constructor
*/
public ResourceIndexedSearchParamDate(String theName, Date theLow, Date theHigh) { public ResourceIndexedSearchParamDate(String theName, Date theLow, Date theHigh) {
setParamName(theName); setParamName(theName);
setValueLow(theLow); setValueLow(theLow);

View File

@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.entity;
import java.math.BigDecimal; import java.math.BigDecimal;
import javax.persistence.Column; import javax.persistence.Column;
import javax.persistence.Embeddable;
import javax.persistence.Entity; import javax.persistence.Entity;
import javax.persistence.Table; import javax.persistence.Table;
@ -30,8 +31,15 @@ import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle; import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.search.annotations.Field;
import org.hibernate.search.annotations.FieldBridge;
import org.hibernate.search.annotations.Indexed;
import org.hibernate.search.annotations.NumericField;
import ca.uhn.fhir.jpa.util.BigDecimalNumericFieldBridge;
//@formatter:off //@formatter:off
@Embeddable
@Entity @Entity
@Table(name = "HFJ_SPIDX_NUMBER" /*, indexes= {@Index(name="IDX_SP_NUMBER", columnList="SP_VALUE")}*/ ) @Table(name = "HFJ_SPIDX_NUMBER" /*, indexes= {@Index(name="IDX_SP_NUMBER", columnList="SP_VALUE")}*/ )
@org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_NUMBER", indexes= { @org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_NUMBER", indexes= {
@ -43,6 +51,9 @@ public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchP
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
@Column(name = "SP_VALUE", nullable = true) @Column(name = "SP_VALUE", nullable = true)
@Field
@NumericField
@FieldBridge(impl = BigDecimalNumericFieldBridge.class)
public BigDecimal myValue; public BigDecimal myValue;
public ResourceIndexedSearchParamNumber() { public ResourceIndexedSearchParamNumber() {

View File

@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.entity;
import java.math.BigDecimal; import java.math.BigDecimal;
import javax.persistence.Column; import javax.persistence.Column;
import javax.persistence.Embeddable;
import javax.persistence.Entity; import javax.persistence.Entity;
import javax.persistence.Table; import javax.persistence.Table;
@ -30,8 +31,15 @@ import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle; import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.search.annotations.Field;
import org.hibernate.search.annotations.FieldBridge;
import org.hibernate.search.annotations.Indexed;
import org.hibernate.search.annotations.NumericField;
import ca.uhn.fhir.jpa.util.BigDecimalNumericFieldBridge;
//@formatter:off //@formatter:off
@Embeddable
@Entity @Entity
@Table(name = "HFJ_SPIDX_QUANTITY" /*, indexes= {@Index(name="IDX_SP_NUMBER", columnList="SP_VALUE")}*/ ) @Table(name = "HFJ_SPIDX_QUANTITY" /*, indexes= {@Index(name="IDX_SP_NUMBER", columnList="SP_VALUE")}*/ )
@org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_QUANTITY", indexes= { @org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_QUANTITY", indexes= {
@ -40,15 +48,22 @@ import org.apache.commons.lang3.builder.ToStringStyle;
//@formatter:on //@formatter:on
public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearchParam { public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearchParam {
private static final int MAX_LENGTH = 200;
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
@Column(name = "SP_SYSTEM", nullable = true, length = 100) @Column(name = "SP_SYSTEM", nullable = true, length = MAX_LENGTH)
@Field
public String mySystem; public String mySystem;
@Column(name = "SP_UNITS", nullable = true, length = 100) @Column(name = "SP_UNITS", nullable = true, length = MAX_LENGTH)
@Field
public String myUnits; public String myUnits;
@Column(name = "SP_VALUE", nullable = true) @Column(name = "SP_VALUE", nullable = true)
@Field
@NumericField
@FieldBridge(impl = BigDecimalNumericFieldBridge.class)
public BigDecimal myValue; public BigDecimal myValue;
public ResourceIndexedSearchParamQuantity() { public ResourceIndexedSearchParamQuantity() {

View File

@ -21,7 +21,10 @@ package ca.uhn.fhir.jpa.entity;
*/ */
import javax.persistence.Column; import javax.persistence.Column;
import javax.persistence.Embeddable;
import javax.persistence.Entity; import javax.persistence.Entity;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.Table; import javax.persistence.Table;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@ -29,7 +32,10 @@ import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle; import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.search.annotations.ContainedIn;
import org.hibernate.search.annotations.Field;
@Embeddable
@Entity @Entity
@Table(name = "HFJ_SPIDX_STRING"/* , indexes= {@Index(name="IDX_SP_STRING", columnList="SP_VALUE_NORMALIZED")} */) @Table(name = "HFJ_SPIDX_STRING"/* , indexes= {@Index(name="IDX_SP_STRING", columnList="SP_VALUE_NORMALIZED")} */)
@org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_STRING", indexes = { @org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_STRING", indexes = {
@ -37,15 +43,21 @@ import org.apache.commons.lang3.builder.ToStringStyle;
}) })
public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchParam { public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchParam {
public static final int MAX_LENGTH = 100; public static final int MAX_LENGTH = 200;
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
@Column(name = "SP_VALUE_EXACT", length = 100, nullable = true) @Column(name = "SP_VALUE_EXACT", length = MAX_LENGTH, nullable = true)
public String myValueExact; private String myValueExact;
@Column(name = "SP_VALUE_NORMALIZED", length = MAX_LENGTH, nullable = true) @Column(name = "SP_VALUE_NORMALIZED", length = MAX_LENGTH, nullable = true)
public String myValueNormalized; private String myValueNormalized;
@ManyToOne(optional = false)
@JoinColumn(name = "RES_ID", referencedColumnName="RES_ID", insertable=false, updatable=false)
@ContainedIn
private ResourceTable myResourceTable;
public ResourceIndexedSearchParamString() { public ResourceIndexedSearchParamString() {
} }

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.entity;
*/ */
import javax.persistence.Column; import javax.persistence.Column;
import javax.persistence.Embeddable;
import javax.persistence.Entity; import javax.persistence.Entity;
import javax.persistence.Table; import javax.persistence.Table;
@ -29,21 +30,26 @@ import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle; import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.search.annotations.Field;
import org.hibernate.search.annotations.Indexed;
@Embeddable
@Entity @Entity
@Table(name = "HFJ_SPIDX_TOKEN" /* , indexes = { @Index(name = "IDX_SP_TOKEN", columnList = "SP_SYSTEM,SP_VALUE") } */) @Table(name = "HFJ_SPIDX_TOKEN" /* , indexes = { @Index(name = "IDX_SP_TOKEN", columnList = "SP_SYSTEM,SP_VALUE") } */)
@org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_TOKEN", indexes = { @org.hibernate.annotations.Index(name = "IDX_SP_TOKEN", columnNames = { "RES_TYPE", "SP_NAME", "SP_SYSTEM", "SP_VALUE" }), @org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_TOKEN", indexes = { @org.hibernate.annotations.Index(name = "IDX_SP_TOKEN", columnNames = { "RES_TYPE", "SP_NAME", "SP_SYSTEM", "SP_VALUE" }),
@org.hibernate.annotations.Index(name = "IDX_SP_TOKEN_UNQUAL", columnNames = { "RES_TYPE", "SP_NAME", "SP_VALUE" }) }) @org.hibernate.annotations.Index(name = "IDX_SP_TOKEN_UNQUAL", columnNames = { "RES_TYPE", "SP_NAME", "SP_VALUE" }) })
public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchParam { public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchParam {
public static final int MAX_LENGTH = 100; public static final int MAX_LENGTH = 200;
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
@Field()
@Column(name = "SP_SYSTEM", nullable = true, length = MAX_LENGTH) @Column(name = "SP_SYSTEM", nullable = true, length = MAX_LENGTH)
public String mySystem; public String mySystem;
@Column(name = "SP_VALUE", nullable = true, length = 100) @Field()
@Column(name = "SP_VALUE", nullable = true, length = MAX_LENGTH)
public String myValue; public String myValue;
public ResourceIndexedSearchParamToken() { public ResourceIndexedSearchParamToken() {

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.entity;
*/ */
import javax.persistence.Column; import javax.persistence.Column;
import javax.persistence.Embeddable;
import javax.persistence.Entity; import javax.persistence.Entity;
import javax.persistence.Table; import javax.persistence.Table;
@ -28,8 +29,12 @@ import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringBuilder;
import org.hibernate.search.annotations.Analyzer;
import org.hibernate.search.annotations.Field;
import org.hibernate.search.annotations.Indexed;
//@formatter:off //@formatter:off
@Embeddable
@Entity @Entity
@Table(name = "HFJ_SPIDX_URI" /* , indexes = { @Index(name = "IDX_SP_TOKEN", columnList = "SP_SYSTEM,SP_VALUE") } */) @Table(name = "HFJ_SPIDX_URI" /* , indexes = { @Index(name = "IDX_SP_TOKEN", columnList = "SP_SYSTEM,SP_VALUE") } */)
@org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_URI", indexes = { @org.hibernate.annotations.Table(appliesTo = "HFJ_SPIDX_URI", indexes = {
@ -43,6 +48,7 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
@Column(name = "SP_URI", nullable = true, length = MAX_LENGTH) @Column(name = "SP_URI", nullable = true, length = MAX_LENGTH)
@Field()
public String myUri; public String myUri;
public ResourceIndexedSearchParamUri() { public ResourceIndexedSearchParamUri() {

View File

@ -34,28 +34,29 @@ import javax.persistence.FetchType;
import javax.persistence.GeneratedValue; import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType; import javax.persistence.GenerationType;
import javax.persistence.Id; import javax.persistence.Id;
import javax.persistence.Inheritance; import javax.persistence.Index;
import javax.persistence.InheritanceType; import javax.persistence.Lob;
import javax.persistence.OneToMany; import javax.persistence.OneToMany;
import javax.persistence.Table; import javax.persistence.Table;
import org.hibernate.annotations.Index; import org.hibernate.search.annotations.Field;
import org.hibernate.search.annotations.Indexed;
import org.hibernate.search.annotations.IndexedEmbedded;
import ca.uhn.fhir.jpa.search.IndexNonDeletedInterceptor;
import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.server.Constants; import ca.uhn.fhir.rest.server.Constants;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
//@formatter:off //@formatter:off
@Indexed(/*interceptor=IndexNonDeletedInterceptor.class*/)
@Entity @Entity
@Table(name = "HFJ_RESOURCE", uniqueConstraints = {}) @Table(name = "HFJ_RESOURCE", uniqueConstraints = {}, indexes= {
@Inheritance(strategy = InheritanceType.JOINED) @Index(name = "IDX_RES_DATE", columnList="RES_UPDATED"),
@org.hibernate.annotations.Table(appliesTo = "HFJ_RESOURCE", @Index(name = "IDX_RES_LANG", columnList="RES_TYPE,RES_LANGUAGE"),
indexes = { @Index(name = "IDX_RES_PROFILE", columnList="RES_PROFILE"),
@Index(name = "IDX_RES_DATE", columnNames = { "RES_UPDATED" }), @Index(name = "IDX_INDEXSTATUS", columnList="SP_INDEX_STATUS")
@Index(name = "IDX_RES_LANG", columnNames = { "RES_TYPE", "RES_LANGUAGE" }), })
@Index(name = "IDX_RES_PROFILE", columnNames = { "RES_PROFILE" }),
@Index(name = "IDX_INDEXSTATUS", columnNames = { "SP_INDEX_STATUS" })
})
//@formatter:on //@formatter:on
public class ResourceTable extends BaseHasResource implements Serializable { public class ResourceTable extends BaseHasResource implements Serializable {
private static final int MAX_LANGUAGE_LENGTH = 20; private static final int MAX_LANGUAGE_LENGTH = 20;
@ -82,6 +83,22 @@ public class ResourceTable extends BaseHasResource implements Serializable {
@Column(name = "RES_LANGUAGE", length = MAX_LANGUAGE_LENGTH, nullable = true) @Column(name = "RES_LANGUAGE", length = MAX_LANGUAGE_LENGTH, nullable = true)
private String myLanguage; private String myLanguage;
/**
* Holds the narrative text only - Used for Fulltext searching but not directly stored in the DB
*/
@Column(name = "SP_NARRATIVE_TEXT", length = Integer.MAX_VALUE - 1, nullable=true)
@Lob
@Field()
private String myNarrativeText;
/**
* Holds the narrative text only - Used for Fulltext searching but not directly stored in the DB
*/
@Column(name = "SP_CONTENT_TEXT", length = Integer.MAX_VALUE - 1, nullable=true)
@Lob
@Field()
private String myContentText;
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false) @OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
private Collection<ResourceIndexedSearchParamCoords> myParamsCoords; private Collection<ResourceIndexedSearchParamCoords> myParamsCoords;
@ -131,6 +148,7 @@ public class ResourceTable extends BaseHasResource implements Serializable {
private Collection<ResourceLink> myResourceLinks; private Collection<ResourceLink> myResourceLinks;
@Column(name = "RES_TYPE", length = RESTYPE_LEN) @Column(name = "RES_TYPE", length = RESTYPE_LEN)
@Field
private String myResourceType; private String myResourceType;
@OneToMany(mappedBy = "myResource", cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true) @OneToMany(mappedBy = "myResource", cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true)
@ -303,6 +321,14 @@ public class ResourceTable extends BaseHasResource implements Serializable {
myLanguage = theLanguage; myLanguage = theLanguage;
} }
public void setNarrativeTextParsedIntoWords(String theNarrativeText) {
myNarrativeText = theNarrativeText;
}
public void setContentTextParsedIntoWords(String theContentText) {
myContentText = theContentText;
}
public void setParamsCoords(Collection<ResourceIndexedSearchParamCoords> theParamsCoords) { public void setParamsCoords(Collection<ResourceIndexedSearchParamCoords> theParamsCoords) {
if (!isParamsTokenPopulated() && theParamsCoords.isEmpty()) { if (!isParamsTokenPopulated() && theParamsCoords.isEmpty()) {
return; return;
@ -426,10 +452,12 @@ public class ResourceTable extends BaseHasResource implements Serializable {
retVal.setDeleted(getDeleted()); retVal.setDeleted(getDeleted());
retVal.setForcedId(getForcedId()); retVal.setForcedId(getForcedId());
for (ResourceTag next : getTags()) { if (isHasTags()) {
retVal.addTag(next); for (ResourceTag next : getTags()) {
retVal.addTag(next);
}
} }
return retVal; return retVal;
} }

View File

@ -40,7 +40,6 @@ import ca.uhn.fhir.rest.annotation.Operation;
import ca.uhn.fhir.rest.annotation.OperationParam; import ca.uhn.fhir.rest.annotation.OperationParam;
import ca.uhn.fhir.rest.annotation.Transaction; import ca.uhn.fhir.rest.annotation.Transaction;
import ca.uhn.fhir.rest.annotation.TransactionParam; import ca.uhn.fhir.rest.annotation.TransactionParam;
import ca.uhn.fhir.rest.param.NumberParam;
public class JpaSystemProviderDstu2 extends BaseJpaSystemProvider<Bundle> { public class JpaSystemProviderDstu2 extends BaseJpaSystemProvider<Bundle> {
@ -171,20 +170,6 @@ public class JpaSystemProviderDstu2 extends BaseJpaSystemProvider<Bundle> {
return retVal; return retVal;
} }
//@formatter:off
@Operation(name="$perform-reindexing-pass", idempotent=true, returnParameters= {
@OperationParam(name="count", type=IntegerDt.class)
})
//@formatter:on
public Parameters performReindexingPass(@OperationParam(min=0, max=1, name="count") IntegerDt theCount) {
Integer countIn = theCount != null && theCount.getValue()!= null ? theCount.getValue().intValue() : null;
int count = mySystemDao.performReindexingPass(countIn);
Parameters retVal = new Parameters();
retVal.addParameter().setName("count").setValue(new IntegerDt(count));
return retVal;
}
//@formatter:off //@formatter:off
@Operation(name="$meta", idempotent=true, returnParameters= { @Operation(name="$meta", idempotent=true, returnParameters= {
@OperationParam(name="return", type=MetaDt.class) @OperationParam(name="return", type=MetaDt.class)

View File

@ -0,0 +1,38 @@
package ca.uhn.fhir.jpa.search;
import org.hibernate.search.indexes.interceptor.EntityIndexingInterceptor;
import org.hibernate.search.indexes.interceptor.IndexingOverride;
import ca.uhn.fhir.jpa.entity.ResourceTable;
/**
* Only store non-deleted resources
*/
public class IndexNonDeletedInterceptor implements EntityIndexingInterceptor<ResourceTable> {
@Override
public IndexingOverride onAdd(ResourceTable entity) {
if (entity.getDeleted() == null) {
return IndexingOverride.APPLY_DEFAULT;
}
return IndexingOverride.SKIP;
}
@Override
public IndexingOverride onUpdate(ResourceTable entity) {
if (entity.getDeleted() == null) {
return IndexingOverride.UPDATE;
}
return IndexingOverride.REMOVE;
}
@Override
public IndexingOverride onDelete(ResourceTable entity) {
return IndexingOverride.APPLY_DEFAULT;
}
@Override
public IndexingOverride onCollectionUpdate(ResourceTable entity) {
return IndexingOverride.APPLY_DEFAULT;
}
}

View File

@ -0,0 +1,42 @@
package ca.uhn.fhir.jpa.util;
import java.math.BigDecimal;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexableField;
import org.hibernate.search.bridge.LuceneOptions;
import org.hibernate.search.bridge.TwoWayFieldBridge;
public class BigDecimalNumericFieldBridge implements TwoWayFieldBridge {
@Override
public void set(String name, Object value, Document document, LuceneOptions luceneOptions) {
if (value == null) {
if (luceneOptions.indexNullAs() != null) {
luceneOptions.addFieldToDocument(name, luceneOptions.indexNullAs(), document);
}
} else {
BigDecimal bdValue = (BigDecimal)value;
applyToLuceneOptions(luceneOptions, name, bdValue.doubleValue(), document);
}
}
@Override
public final String objectToString(final Object object) {
return object == null ? null : object.toString();
}
@Override
public Object get(final String name, final Document document) {
final IndexableField field = document.getField(name);
if (field != null) {
Double doubleVal = (Double)field.numericValue();
return new BigDecimal(doubleVal);
} else {
return null;
}
}
protected void applyToLuceneOptions(LuceneOptions luceneOptions, String name, Number value, Document document) {
luceneOptions.addNumericFieldToDocument(name, value, document);
}
}

View File

@ -0,0 +1,21 @@
<beans
xmlns="http://www.springframework.org/schema/beans"
xmlns:context="http://www.springframework.org/schema/context"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:websocket="http://www.springframework.org/schema/websocket"
xmlns:task="http://www.springframework.org/schema/task"
xmlns:tx="http://www.springframework.org/schema/tx"
xsi:schemaLocation="
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/websocket http://www.springframework.org/schema/websocket/spring-websocket.xsd
http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx.xsd
http://www.springframework.org/schema/task http://www.springframework.org/schema/task/spring-task.xsd">
<context:annotation-config />
<tx:annotation-driven transaction-manager="myTxManagerDstu2" />
<bean id="mySearchDaoDstu2" class="ca.uhn.fhir.jpa.dao.FhirSearchDao" />
</beans>

View File

@ -168,7 +168,7 @@ public abstract class BaseJpaDstu2Test extends BaseJpaTest {
} }
protected <T extends IBaseResource> T loadResourceFromClasspath(Class<T> type, String resourceName) throws IOException { protected <T extends IBaseResource> T loadResourceFromClasspath(Class<T> type, String resourceName) throws IOException {
InputStream stream = FhirResourceDaoDstu2SearchTest.class.getResourceAsStream(resourceName); InputStream stream = FhirResourceDaoDstu2SearchNoFtTest.class.getResourceAsStream(resourceName);
if (stream == null) { if (stream == null) {
fail("Unable to load resource: " + resourceName); fail("Unable to load resource: " + resourceName);
} }

View File

@ -18,44 +18,30 @@ import ca.uhn.fhir.rest.server.IBundleProvider;
public class BaseJpaTest { public class BaseJpaTest {
public static String loadClasspath(String resource) throws IOException { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseJpaTest.class);
InputStream bundleRes = SystemProviderDstu2Test.class.getResourceAsStream(resource);
if (bundleRes == null) {
throw new NullPointerException("Can not load " + resource);
}
String bundleStr = IOUtils.toString(bundleRes);
return bundleStr;
}
@AfterClass
public static void afterClassShutdownDerby() throws SQLException {
// try {
// DriverManager.getConnection("jdbc:derby:memory:myUnitTestDB;drop=true");
// } catch (SQLNonTransientConnectionException e) {
// // expected.. for some reason....
// }
}
@SuppressWarnings({ "rawtypes" }) @SuppressWarnings({ "rawtypes" })
protected List toList(IBundleProvider theSearch) { protected List toList(IBundleProvider theSearch) {
return theSearch.getResources(0, theSearch.size()); return theSearch.getResources(0, theSearch.size());
} }
protected List<IIdType> toUnqualifiedVersionlessIds(IBundleProvider theFound) { protected List<IIdType> toUnqualifiedVersionlessIds(Bundle theFound) {
List<IIdType> retVal = new ArrayList<IIdType>(); List<IIdType> retVal = new ArrayList<IIdType>();
List<IBaseResource> resources = theFound.getResources(0, theFound.size()); for (Entry next : theFound.getEntry()) {
for (IBaseResource next : resources) { // if (next.getResource()!= null) {
retVal.add((IIdType) next.getIdElement().toUnqualifiedVersionless()); retVal.add(next.getResource().getId().toUnqualifiedVersionless());
// }
} }
return retVal; return retVal;
} }
protected List<IIdType> toUnqualifiedVersionlessIds(Bundle theFound) { protected List<IIdType> toUnqualifiedVersionlessIds(IBundleProvider theFound) {
List<IIdType> retVal = new ArrayList<IIdType>(); List<IIdType> retVal = new ArrayList<IIdType>();
for (Entry next : theFound.getEntry()) { int size = theFound.size();
// if (next.getResource()!= null) { ourLog.info("Found {} results", size);
retVal.add(next.getResource().getId().toUnqualifiedVersionless()); List<IBaseResource> resources = theFound.getResources(0, size);
// } for (IBaseResource next : resources) {
retVal.add((IIdType) next.getIdElement().toUnqualifiedVersionless());
} }
return retVal; return retVal;
} }
@ -68,5 +54,22 @@ public class BaseJpaTest {
return retVal; return retVal;
} }
@AfterClass
public static void afterClassShutdownDerby() throws SQLException {
// try {
// DriverManager.getConnection("jdbc:derby:memory:myUnitTestDB;drop=true");
// } catch (SQLNonTransientConnectionException e) {
// // expected.. for some reason....
// }
}
public static String loadClasspath(String resource) throws IOException {
InputStream bundleRes = SystemProviderDstu2Test.class.getResourceAsStream(resource);
if (bundleRes == null) {
throw new NullPointerException("Can not load " + resource);
}
String bundleStr = IOUtils.toString(bundleRes);
return bundleStr;
}
} }

View File

@ -1,10 +1,13 @@
package ca.uhn.fhir.jpa.dao; package ca.uhn.fhir.jpa.dao;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat; import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import java.util.List;
import org.hamcrest.core.StringContains; import org.hamcrest.core.StringContains;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.AfterClass; import org.junit.AfterClass;
@ -22,6 +25,8 @@ import ca.uhn.fhir.model.dstu.resource.Observation;
import ca.uhn.fhir.model.dstu.resource.Organization; import ca.uhn.fhir.model.dstu.resource.Organization;
import ca.uhn.fhir.model.dstu.resource.Patient; import ca.uhn.fhir.model.dstu.resource.Patient;
import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.StringDt;
import ca.uhn.fhir.rest.server.Constants;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
@ -61,7 +66,7 @@ public class FhirResourceDaoDstu1Test extends BaseJpaTest {
assertThat(e.getMessage(), containsString("Can not create entity with ID[" + methodName + "], a resource with this ID already exists")); assertThat(e.getMessage(), containsString("Can not create entity with ID[" + methodName + "], a resource with this ID already exists"));
} }
} }
@Test @Test
public void testCreateNumericIdFails() { public void testCreateNumericIdFails() {
Patient p = new Patient(); Patient p = new Patient();

View File

@ -0,0 +1,120 @@
package ca.uhn.fhir.jpa.dao;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.empty;
import static org.junit.Assert.assertThat;
import java.util.List;
import org.hibernate.search.jpa.FullTextEntityManager;
import org.hibernate.search.jpa.Search;
import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.Before;
import org.junit.Test;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.transaction.annotation.Transactional;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.model.dstu2.resource.Observation;
import ca.uhn.fhir.model.dstu2.resource.Patient;
import ca.uhn.fhir.model.primitive.StringDt;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.server.Constants;
@ContextConfiguration(locations = { "classpath:fhir-spring-search-config-dstu2.xml" })
public class FhirResourceDaoDstu2SearchFtTest extends BaseJpaDstu2Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu2SearchFtTest.class);
@Before
@Transactional
public void beforeFlushFT() {
FullTextEntityManager ftem = Search.getFullTextEntityManager(myEntityManager);
ftem.purgeAll(ResourceTable.class);
ftem.flushToIndexes();
}
@Test
public void testSearchAndReindex() {
Patient patient;
SearchParameterMap map;
patient = new Patient();
patient.getText().setDiv("<div>DIVAAA</div>");
patient.addName().addGiven("NAMEAAA");
IIdType pId1 = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
map = new SearchParameterMap();
map.add(Constants.PARAM_CONTENT, new StringParam("NAMEAAA"));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), contains(pId1));
map = new SearchParameterMap();
map.add(Constants.PARAM_TEXT, new StringParam("DIVAAA"));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), contains(pId1));
/*
* Reindex
*/
patient = new Patient();
patient.setId(pId1);
patient.getText().setDiv("<div>DIVBBB</div>");
patient.addName().addGiven("NAMEBBB");
myPatientDao.update(patient);
map = new SearchParameterMap();
map.add(Constants.PARAM_CONTENT, new StringParam("NAMEAAA"));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), empty());
map = new SearchParameterMap();
map.add(Patient.SP_NAME, new StringParam("NAMEBBB"));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), contains(pId1));
map = new SearchParameterMap();
map.add(Constants.PARAM_CONTENT, new StringParam("NAMEBBB"));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), contains(pId1));
map = new SearchParameterMap();
map.add(Constants.PARAM_TEXT, new StringParam("DIVBBB"));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), contains(pId1));
}
@Test
public void testSearchWithChainedParams() {
String methodName = "testSearchWithChainedParams";
IIdType pId1;
{
Patient patient = new Patient();
patient.addName().addGiven("methodName");
patient.addAddress().addLine("My fulltext address");
pId1 = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
}
Observation obs = new Observation();
obs.getSubject().setReference(pId1);
obs.setValue(new StringDt("This is the FULLtext of the observation"));
IIdType oId1 = myObservationDao.create(obs).getId().toUnqualifiedVersionless();
obs = new Observation();
obs.getSubject().setReference(pId1);
obs.setValue(new StringDt("Another fullText"));
IIdType oId2 = myObservationDao.create(obs).getId().toUnqualifiedVersionless();
List<IIdType> patients;
SearchParameterMap params;
params = new SearchParameterMap();
params.add(Constants.PARAM_CONTENT, new StringParam("fulltext"));
patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, containsInAnyOrder(pId1));
params = new SearchParameterMap();
params.add(Constants.PARAM_CONTENT, new StringParam("FULLTEXT"));
patients = toUnqualifiedVersionlessIds(myObservationDao.search(params));
assertThat(patients, containsInAnyOrder(oId1, oId2));
}
}

View File

@ -91,23 +91,12 @@ import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.param.UriParam; import ca.uhn.fhir.rest.param.UriParam;
import ca.uhn.fhir.rest.server.Constants; import ca.uhn.fhir.rest.server.Constants;
import ca.uhn.fhir.rest.server.IBundleProvider; import ca.uhn.fhir.rest.server.IBundleProvider;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test { public class FhirResourceDaoDstu2SearchNoFtTest extends BaseJpaDstu2Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu2SearchTest.class); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu2SearchNoFtTest.class);
@Test
public void testSearchWithEmptySort() {
SearchParameterMap criteriaUrl = new SearchParameterMap();
DateRangeParam range = new DateRangeParam();
range.setLowerBound(new DateParam(QuantityCompararatorEnum.GREATERTHAN, 1000000));
range.setUpperBound(new DateParam(QuantityCompararatorEnum.LESSTHAN, 2000000));
criteriaUrl.setLastUpdated(range);
criteriaUrl.setSort(new SortSpec(Constants.PARAM_LASTUPDATED, SortOrderEnum.ASC));
IBundleProvider results = myObservationDao.search(criteriaUrl);
assertEquals(0, results.size());
}
@Test @Test
public void testCodeSearch() { public void testCodeSearch() {
@ -122,26 +111,41 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
map.add(Subscription.SP_STATUS, new TokenParam(null, SubscriptionStatusEnum.ACTIVE.getCode())); map.add(Subscription.SP_STATUS, new TokenParam(null, SubscriptionStatusEnum.ACTIVE.getCode()));
assertThat(toUnqualifiedVersionlessIds(mySubscriptionDao.search(map)), contains(id)); assertThat(toUnqualifiedVersionlessIds(mySubscriptionDao.search(map)), contains(id));
} }
@Test @Test
public void testIndexNoDuplicatesString() { public void testEverythingTimings() throws Exception {
Patient p = new Patient(); String methodName = "testEverythingIncludesBackReferences";
p.addAddress().addLine("123 Fake Street");
p.addAddress().addLine("123 Fake Street");
p.addAddress().addLine("123 Fake Street");
p.addAddress().addLine("456 Fake Street");
p.addAddress().addLine("456 Fake Street");
p.addAddress().addLine("456 Fake Street");
IIdType id = myPatientDao.create(p).getId().toUnqualifiedVersionless(); Organization org = new Organization();
org.setName(methodName);
IIdType orgId = myOrganizationDao.create(org).getId().toUnqualifiedVersionless();
Class<ResourceIndexedSearchParamString> type = ResourceIndexedSearchParamString.class; Medication med = new Medication();
List<ResourceIndexedSearchParamString> results = myEntityManager.createQuery("SELECT i FROM " + type.getSimpleName() + " i", type).getResultList(); med.getCode().setText(methodName);
ourLog.info(toStringMultiline(results)); IIdType medId = myMedicationDao.create(med).getId().toUnqualifiedVersionless();
assertEquals(2, results.size());
List<IIdType> actual = toUnqualifiedVersionlessIds(myPatientDao.search(Patient.SP_ADDRESS, new StringParam("123 Fake Street"))); Patient pat = new Patient();
assertThat(actual, contains(id)); pat.addAddress().addLine(methodName);
pat.getManagingOrganization().setReference(orgId);
IIdType patId = myPatientDao.create(pat).getId().toUnqualifiedVersionless();
Patient pat2 = new Patient();
pat2.addAddress().addLine(methodName);
pat2.getManagingOrganization().setReference(orgId);
IIdType patId2 = myPatientDao.create(pat2).getId().toUnqualifiedVersionless();
MedicationOrder mo = new MedicationOrder();
mo.getPatient().setReference(patId);
mo.setMedication(new ResourceReferenceDt(medId));
IIdType moId = myMedicationOrderDao.create(mo).getId().toUnqualifiedVersionless();
HttpServletRequest request = mock(HttpServletRequest.class);
IBundleProvider resp = myPatientDao.patientTypeEverything(request, null, null, null);
assertThat(toUnqualifiedVersionlessIds(resp), containsInAnyOrder(orgId, medId, patId, moId, patId2));
request = mock(HttpServletRequest.class);
resp = myPatientDao.patientInstanceEverything(request, patId, null, null, null);
assertThat(toUnqualifiedVersionlessIds(resp), containsInAnyOrder(orgId, medId, patId, moId));
} }
@Test @Test
@ -164,7 +168,7 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
ourLog.info(toStringMultiline(results)); ourLog.info(toStringMultiline(results));
assertEquals(2, results.size()); assertEquals(2, results.size());
} }
@Test @Test
public void testIndexNoDuplicatesNumber() { public void testIndexNoDuplicatesNumber() {
Immunization res = new Immunization(); Immunization res = new Immunization();
@ -185,26 +189,7 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
ourLog.info(toStringMultiline(results)); ourLog.info(toStringMultiline(results));
assertEquals(2, results.size()); assertEquals(2, results.size());
} }
@Test
public void testIndexNoDuplicatesUri() {
ConceptMap res = new ConceptMap();
res.addElement().addTarget().addDependsOn().setElement("http://foo");
res.addElement().addTarget().addDependsOn().setElement("http://foo");
res.addElement().addTarget().addDependsOn().setElement("http://bar");
res.addElement().addTarget().addDependsOn().setElement("http://bar");
IIdType id = myConceptMapDao.create(res).getId().toUnqualifiedVersionless();
Class<ResourceIndexedSearchParamUri> type = ResourceIndexedSearchParamUri.class;
List<?> results = myEntityManager.createQuery("SELECT i FROM " + type.getSimpleName() + " i", type).getResultList();
ourLog.info(toStringMultiline(results));
assertEquals(2, results.size());
List<IIdType> actual = toUnqualifiedVersionlessIds(myConceptMapDao.search(ConceptMap.SP_DEPENDSON, new UriParam("http://foo")));
assertThat(actual, contains(id));
}
@Test @Test
public void testIndexNoDuplicatesQuantity() { public void testIndexNoDuplicatesQuantity() {
Substance res = new Substance(); Substance res = new Substance();
@ -223,25 +208,6 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
List<IIdType> actual = toUnqualifiedVersionlessIds(mySubstanceDao.search(Substance.SP_QUANTITY, new QuantityParam(null, 123, "http://foo", "UNIT"))); List<IIdType> actual = toUnqualifiedVersionlessIds(mySubstanceDao.search(Substance.SP_QUANTITY, new QuantityParam(null, 123, "http://foo", "UNIT")));
assertThat(actual, contains(id)); assertThat(actual, contains(id));
} }
@Test
public void testIndexNoDuplicatesToken() {
Patient res = new Patient();
res.addIdentifier().setSystem("http://foo1").setValue("123");
res.addIdentifier().setSystem("http://foo1").setValue("123");
res.addIdentifier().setSystem("http://foo2").setValue("1234");
res.addIdentifier().setSystem("http://foo2").setValue("1234");
IIdType id = myPatientDao.create(res).getId().toUnqualifiedVersionless();
Class<ResourceIndexedSearchParamToken> type = ResourceIndexedSearchParamToken.class;
List<?> results = myEntityManager.createQuery("SELECT i FROM " + type.getSimpleName() + " i", type).getResultList();
ourLog.info(toStringMultiline(results));
assertEquals(2, results.size());
List<IIdType> actual = toUnqualifiedVersionlessIds(myPatientDao.search(Patient.SP_IDENTIFIER, new TokenParam("http://foo1", "123")));
assertThat(actual, contains(id));
}
@Test @Test
public void testIndexNoDuplicatesReference() { public void testIndexNoDuplicatesReference() {
@ -271,13 +237,63 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
assertThat(actual, contains(id)); assertThat(actual, contains(id));
} }
private String toStringMultiline(List<?> theResults) { @Test
StringBuilder b = new StringBuilder(); public void testIndexNoDuplicatesString() {
for (Object next : theResults) { Patient p = new Patient();
b.append('\n'); p.addAddress().addLine("123 Fake Street");
b.append(" * ").append(next.toString()); p.addAddress().addLine("123 Fake Street");
} p.addAddress().addLine("123 Fake Street");
return b.toString(); p.addAddress().addLine("456 Fake Street");
p.addAddress().addLine("456 Fake Street");
p.addAddress().addLine("456 Fake Street");
IIdType id = myPatientDao.create(p).getId().toUnqualifiedVersionless();
Class<ResourceIndexedSearchParamString> type = ResourceIndexedSearchParamString.class;
List<ResourceIndexedSearchParamString> results = myEntityManager.createQuery("SELECT i FROM " + type.getSimpleName() + " i", type).getResultList();
ourLog.info(toStringMultiline(results));
assertEquals(2, results.size());
List<IIdType> actual = toUnqualifiedVersionlessIds(myPatientDao.search(Patient.SP_ADDRESS, new StringParam("123 Fake Street")));
assertThat(actual, contains(id));
}
@Test
public void testIndexNoDuplicatesToken() {
Patient res = new Patient();
res.addIdentifier().setSystem("http://foo1").setValue("123");
res.addIdentifier().setSystem("http://foo1").setValue("123");
res.addIdentifier().setSystem("http://foo2").setValue("1234");
res.addIdentifier().setSystem("http://foo2").setValue("1234");
IIdType id = myPatientDao.create(res).getId().toUnqualifiedVersionless();
Class<ResourceIndexedSearchParamToken> type = ResourceIndexedSearchParamToken.class;
List<?> results = myEntityManager.createQuery("SELECT i FROM " + type.getSimpleName() + " i", type).getResultList();
ourLog.info(toStringMultiline(results));
assertEquals(2, results.size());
List<IIdType> actual = toUnqualifiedVersionlessIds(myPatientDao.search(Patient.SP_IDENTIFIER, new TokenParam("http://foo1", "123")));
assertThat(actual, contains(id));
}
@Test
public void testIndexNoDuplicatesUri() {
ConceptMap res = new ConceptMap();
res.addElement().addTarget().addDependsOn().setElement("http://foo");
res.addElement().addTarget().addDependsOn().setElement("http://foo");
res.addElement().addTarget().addDependsOn().setElement("http://bar");
res.addElement().addTarget().addDependsOn().setElement("http://bar");
IIdType id = myConceptMapDao.create(res).getId().toUnqualifiedVersionless();
Class<ResourceIndexedSearchParamUri> type = ResourceIndexedSearchParamUri.class;
List<?> results = myEntityManager.createQuery("SELECT i FROM " + type.getSimpleName() + " i", type).getResultList();
ourLog.info(toStringMultiline(results));
assertEquals(2, results.size());
List<IIdType> actual = toUnqualifiedVersionlessIds(myConceptMapDao.search(ConceptMap.SP_DEPENDSON, new UriParam("http://foo")));
assertThat(actual, contains(id));
} }
@Test @Test
@ -299,20 +315,7 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
List<IBaseResource> patients = toList(myPatientDao.search(params)); List<IBaseResource> patients = toList(myPatientDao.search(params));
assertTrue(patients.size() >= 2); assertTrue(patients.size() >= 2);
} }
/**
* TODO: currently this doesn't index, we should get it working
*/
@Test
public void testSearchNearParam() {
{
Location loc = new Location();
loc.getPosition().setLatitude(43.7);
loc.getPosition().setLatitude(79.4);
myLocationDao.create(loc);
}
}
@Test @Test
public void testSearchByIdParam() { public void testSearchByIdParam() {
IIdType id1; IIdType id1;
@ -340,103 +343,6 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
} }
/**
* #222
*/
@Test
public void testSearchForDeleted() {
{
Patient patient = new Patient();
patient.setId("TEST");
patient.setLanguage(new CodeDt("TEST"));
patient.addName().addFamily("TEST");
patient.addIdentifier().setSystem("TEST").setValue("TEST");
myPatientDao.update(patient);
}
Map<String, IQueryParameterType> params = new HashMap<String, IQueryParameterType>();
params.put("_id", new StringDt("TEST"));
assertEquals(1, toList(myPatientDao.search(params)).size());
params.put("_language", new StringParam("TEST"));
assertEquals(1, toList(myPatientDao.search(params)).size());
params.put(Patient.SP_IDENTIFIER, new TokenParam("TEST", "TEST"));
assertEquals(1, toList(myPatientDao.search(params)).size());
params.put(Patient.SP_NAME, new StringParam("TEST"));
assertEquals(1, toList(myPatientDao.search(params)).size());
myPatientDao.delete(new IdDt("Patient/TEST"));
params = new HashMap<String, IQueryParameterType>();
params.put("_id", new StringDt("TEST"));
assertEquals(0, toList(myPatientDao.search(params)).size());
params.put("_language", new StringParam("TEST"));
assertEquals(0, toList(myPatientDao.search(params)).size());
params.put(Patient.SP_IDENTIFIER, new TokenParam("TEST", "TEST"));
assertEquals(0, toList(myPatientDao.search(params)).size());
params.put(Patient.SP_NAME, new StringParam("TEST"));
assertEquals(0, toList(myPatientDao.search(params)).size());
}
@Test
public void testSearchByIdParamWrongType() {
IIdType id1;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id1 = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
}
IIdType id2;
{
Organization patient = new Organization();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id2 = myOrganizationDao.create(patient).getId().toUnqualifiedVersionless();
}
SearchParameterMap params = new SearchParameterMap();
params.add("_id", new StringOrListParam().addOr(new StringParam(id1.getIdPart())).addOr(new StringParam(id2.getIdPart())));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
}
@Test
public void testSearchByIdParamOr() {
IIdType id1;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id1 = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
}
IIdType id2;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id2 = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
}
SearchParameterMap params = new SearchParameterMap();
params.add("_id", new StringOrListParam().addOr(new StringParam(id1.getIdPart())).addOr(new StringParam(id2.getIdPart())));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1, id2));
params = new SearchParameterMap();
params.add("_id", new StringOrListParam().addOr(new StringParam(id1.getIdPart())).addOr(new StringParam(id1.getIdPart())));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
params = new SearchParameterMap();
params.add("_id", new StringOrListParam().addOr(new StringParam(id1.getIdPart())).addOr(new StringParam("999999999999")));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
}
@Test @Test
public void testSearchByIdParamAnd() { public void testSearchByIdParamAnd() {
IIdType id1; IIdType id1;
@ -484,7 +390,65 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), empty()); assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), empty());
} }
@Test
public void testSearchByIdParamOr() {
IIdType id1;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id1 = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
}
long betweenTime = System.currentTimeMillis();
IIdType id2;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id2 = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
}
SearchParameterMap params = new SearchParameterMap();
params.add("_id", new StringOrListParam().addOr(new StringParam(id1.getIdPart())).addOr(new StringParam(id2.getIdPart())));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1, id2));
params = new SearchParameterMap();
params.add("_id", new StringOrListParam().addOr(new StringParam(id1.getIdPart())).addOr(new StringParam(id1.getIdPart())));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
params = new SearchParameterMap();
params.add("_id", new StringOrListParam().addOr(new StringParam(id1.getIdPart())).addOr(new StringParam("999999999999")));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
// With lastupdated
params = new SearchParameterMap();
params.add("_id", new StringOrListParam().addOr(new StringParam(id1.getIdPart())).addOr(new StringParam(id2.getIdPart())));
params.setLastUpdated(new DateRangeParam(new Date(betweenTime), null));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id2));
}
@Test
public void testSearchByIdParamWrongType() {
IIdType id1;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id1 = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
}
IIdType id2;
{
Organization patient = new Organization();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id2 = myOrganizationDao.create(patient).getId().toUnqualifiedVersionless();
}
SearchParameterMap params = new SearchParameterMap();
params.add("_id", new StringOrListParam().addOr(new StringParam(id1.getIdPart())).addOr(new StringParam(id2.getIdPart())));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1));
}
@Test @Test
public void testSearchCompositeParam() { public void testSearchCompositeParam() {
Observation o1 = new Observation(); Observation o1 = new Observation();
@ -547,6 +511,53 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
} }
/**
* #222
*/
@Test
public void testSearchForDeleted() {
{
Patient patient = new Patient();
patient.setId("TEST");
patient.setLanguage(new CodeDt("TEST"));
patient.addName().addFamily("TEST");
patient.addIdentifier().setSystem("TEST").setValue("TEST");
myPatientDao.update(patient);
}
Map<String, IQueryParameterType> params = new HashMap<String, IQueryParameterType>();
params.put("_id", new StringDt("TEST"));
assertEquals(1, toList(myPatientDao.search(params)).size());
params.put("_language", new StringParam("TEST"));
assertEquals(1, toList(myPatientDao.search(params)).size());
params.put(Patient.SP_IDENTIFIER, new TokenParam("TEST", "TEST"));
assertEquals(1, toList(myPatientDao.search(params)).size());
params.put(Patient.SP_NAME, new StringParam("TEST"));
assertEquals(1, toList(myPatientDao.search(params)).size());
myPatientDao.delete(new IdDt("Patient/TEST"));
params = new HashMap<String, IQueryParameterType>();
params.put("_id", new StringDt("TEST"));
assertEquals(0, toList(myPatientDao.search(params)).size());
params.put("_language", new StringParam("TEST"));
assertEquals(0, toList(myPatientDao.search(params)).size());
params.put(Patient.SP_IDENTIFIER, new TokenParam("TEST", "TEST"));
assertEquals(0, toList(myPatientDao.search(params)).size());
params.put(Patient.SP_NAME, new StringParam("TEST"));
assertEquals(0, toList(myPatientDao.search(params)).size());
}
@Test @Test
public void testSearchForUnknownAlphanumericId() { public void testSearchForUnknownAlphanumericId() {
{ {
@ -556,6 +567,7 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
assertEquals(0, retrieved.size()); assertEquals(0, retrieved.size());
} }
} }
@Test @Test
public void testSearchLanguageParam() { public void testSearchLanguageParam() {
IIdType id1; IIdType id1;
@ -595,7 +607,7 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
assertEquals(0, patients.size()); assertEquals(0, patients.size());
} }
} }
@Test @Test
public void testSearchLanguageParamAndOr() { public void testSearchLanguageParamAndOr() {
IIdType id1; IIdType id1;
@ -606,6 +618,9 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
patient.addName().addFamily("testSearchLanguageParam").addGiven("Joe"); patient.addName().addFamily("testSearchLanguageParam").addGiven("Joe");
id1 = myPatientDao.create(patient).getId().toUnqualifiedVersionless(); id1 = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
} }
Date betweenTime = new Date();
IIdType id2; IIdType id2;
{ {
Patient patient = new Patient(); Patient patient = new Patient();
@ -619,6 +634,12 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
params.add(BaseResource.SP_RES_LANGUAGE, new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("en_US"))); params.add(BaseResource.SP_RES_LANGUAGE, new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("en_US")));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1, id2)); assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id1, id2));
} }
{
SearchParameterMap params = new SearchParameterMap();
params.add(BaseResource.SP_RES_LANGUAGE, new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("en_US")));
params.setLastUpdated(new DateRangeParam(betweenTime, null));
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(params)), containsInAnyOrder(id2));
}
{ {
SearchParameterMap params = new SearchParameterMap(); SearchParameterMap params = new SearchParameterMap();
params.add(BaseResource.SP_RES_LANGUAGE, new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ"))); params.add(BaseResource.SP_RES_LANGUAGE, new StringOrListParam().addOr(new StringParam("en_CA")).addOr(new StringParam("ZZZZ")));
@ -676,111 +697,7 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
} }
} }
@Test
public void testEverythingTimings() throws Exception {
String methodName = "testEverythingIncludesBackReferences";
Organization org = new Organization();
org.setName(methodName);
IIdType orgId = myOrganizationDao.create(org).getId().toUnqualifiedVersionless();
Medication med = new Medication();
med.getCode().setText(methodName);
IIdType medId = myMedicationDao.create(med).getId().toUnqualifiedVersionless();
Patient pat = new Patient();
pat.addAddress().addLine(methodName);
pat.getManagingOrganization().setReference(orgId);
IIdType patId = myPatientDao.create(pat).getId().toUnqualifiedVersionless();
Patient pat2 = new Patient();
pat2.addAddress().addLine(methodName);
pat2.getManagingOrganization().setReference(orgId);
IIdType patId2 = myPatientDao.create(pat2).getId().toUnqualifiedVersionless();
MedicationOrder mo = new MedicationOrder();
mo.getPatient().setReference(patId);
mo.setMedication(new ResourceReferenceDt(medId));
IIdType moId = myMedicationOrderDao.create(mo).getId().toUnqualifiedVersionless();
HttpServletRequest request = mock(HttpServletRequest.class);
IBundleProvider resp = myPatientDao.patientTypeEverything(request, null, null, null);
assertThat(toUnqualifiedVersionlessIds(resp), containsInAnyOrder(orgId, medId, patId, moId, patId2));
request = mock(HttpServletRequest.class);
resp = myPatientDao.patientInstanceEverything(request, patId, null, null, null);
assertThat(toUnqualifiedVersionlessIds(resp), containsInAnyOrder(orgId, medId, patId, moId));
}
@Test
public void testSearchLastUpdatedParamWithComparator() throws InterruptedException {
String methodName = "testSearchLastUpdatedParamWithComparator";
IIdType id0;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id0 = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
}
int sleep = 100;
long start = System.currentTimeMillis();
Thread.sleep(sleep);
DateTimeDt beforeAny = new DateTimeDt(new Date(), TemporalPrecisionEnum.MILLI);
IIdType id1a;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id1a = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
}
IIdType id1b;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id1b = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
}
ourLog.info("Res 1: {}", ResourceMetadataKeyEnum.PUBLISHED.get(myPatientDao.read(id0)).getValueAsString());
ourLog.info("Res 2: {}", ResourceMetadataKeyEnum.PUBLISHED.get(myPatientDao.read(id1a)).getValueAsString());
InstantDt id1bpublished = ResourceMetadataKeyEnum.PUBLISHED.get(myPatientDao.read(id1b));
ourLog.info("Res 3: {}", id1bpublished.getValueAsString());
Thread.sleep(sleep);
long end = System.currentTimeMillis();
SearchParameterMap map;
Date startDate = new Date(start);
Date endDate = new Date(end);
DateTimeDt startDateTime = new DateTimeDt(startDate, TemporalPrecisionEnum.MILLI);
DateTimeDt endDateTime = new DateTimeDt(endDate, TemporalPrecisionEnum.MILLI);
map = new SearchParameterMap();
map.setLastUpdated(new DateRangeParam(startDateTime, endDateTime));
ourLog.info("Searching: {}", map.getLastUpdated());
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), containsInAnyOrder(id1a, id1b));
map = new SearchParameterMap();
map.setLastUpdated(new DateRangeParam(new DateParam(QuantityCompararatorEnum.GREATERTHAN_OR_EQUALS, startDateTime), new DateParam(QuantityCompararatorEnum.LESSTHAN_OR_EQUALS, endDateTime)));
ourLog.info("Searching: {}", map.getLastUpdated());
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), containsInAnyOrder(id1a, id1b));
map = new SearchParameterMap();
map.setLastUpdated(new DateRangeParam(new DateParam(QuantityCompararatorEnum.GREATERTHAN, startDateTime), new DateParam(QuantityCompararatorEnum.LESSTHAN, endDateTime)));
ourLog.info("Searching: {}", map.getLastUpdated());
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), containsInAnyOrder(id1a, id1b));
map = new SearchParameterMap();
map.setLastUpdated(new DateRangeParam(new DateParam(QuantityCompararatorEnum.GREATERTHAN, startDateTime.getValue()), new DateParam(QuantityCompararatorEnum.LESSTHAN, id1bpublished.getValue())));
ourLog.info("Searching: {}", map.getLastUpdated());
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), containsInAnyOrder(id1a));
}
@Test @Test
public void testSearchLastUpdatedParam() throws InterruptedException { public void testSearchLastUpdatedParam() throws InterruptedException {
String methodName = "testSearchLastUpdatedParam"; String methodName = "testSearchLastUpdatedParam";
@ -850,6 +767,71 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
} }
} }
@Test
public void testSearchLastUpdatedParamWithComparator() throws InterruptedException {
String methodName = "testSearchLastUpdatedParamWithComparator";
IIdType id0;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id0 = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
}
int sleep = 100;
long start = System.currentTimeMillis();
Thread.sleep(sleep);
DateTimeDt beforeAny = new DateTimeDt(new Date(), TemporalPrecisionEnum.MILLI);
IIdType id1a;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id1a = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
}
IIdType id1b;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
id1b = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
}
ourLog.info("Res 1: {}", ResourceMetadataKeyEnum.PUBLISHED.get(myPatientDao.read(id0)).getValueAsString());
ourLog.info("Res 2: {}", ResourceMetadataKeyEnum.PUBLISHED.get(myPatientDao.read(id1a)).getValueAsString());
InstantDt id1bpublished = ResourceMetadataKeyEnum.PUBLISHED.get(myPatientDao.read(id1b));
ourLog.info("Res 3: {}", id1bpublished.getValueAsString());
Thread.sleep(sleep);
long end = System.currentTimeMillis();
SearchParameterMap map;
Date startDate = new Date(start);
Date endDate = new Date(end);
DateTimeDt startDateTime = new DateTimeDt(startDate, TemporalPrecisionEnum.MILLI);
DateTimeDt endDateTime = new DateTimeDt(endDate, TemporalPrecisionEnum.MILLI);
map = new SearchParameterMap();
map.setLastUpdated(new DateRangeParam(startDateTime, endDateTime));
ourLog.info("Searching: {}", map.getLastUpdated());
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), containsInAnyOrder(id1a, id1b));
map = new SearchParameterMap();
map.setLastUpdated(new DateRangeParam(new DateParam(QuantityCompararatorEnum.GREATERTHAN_OR_EQUALS, startDateTime), new DateParam(QuantityCompararatorEnum.LESSTHAN_OR_EQUALS, endDateTime)));
ourLog.info("Searching: {}", map.getLastUpdated());
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), containsInAnyOrder(id1a, id1b));
map = new SearchParameterMap();
map.setLastUpdated(new DateRangeParam(new DateParam(QuantityCompararatorEnum.GREATERTHAN, startDateTime), new DateParam(QuantityCompararatorEnum.LESSTHAN, endDateTime)));
ourLog.info("Searching: {}", map.getLastUpdated());
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), containsInAnyOrder(id1a, id1b));
map = new SearchParameterMap();
map.setLastUpdated(new DateRangeParam(new DateParam(QuantityCompararatorEnum.GREATERTHAN, startDateTime.getValue()), new DateParam(QuantityCompararatorEnum.LESSTHAN, id1bpublished.getValue())));
ourLog.info("Searching: {}", map.getLastUpdated());
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map)), containsInAnyOrder(id1a));
}
@Test @Test
public void testSearchNameParam() { public void testSearchNameParam() {
IIdType id1; IIdType id1;
@ -899,6 +881,20 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
} }
/**
* TODO: currently this doesn't index, we should get it working
*/
@Test
public void testSearchNearParam() {
{
Location loc = new Location();
loc.getPosition().setLatitude(43.7);
loc.getPosition().setLatitude(79.4);
myLocationDao.create(loc);
}
}
@Test @Test
public void testSearchNumberParam() { public void testSearchNumberParam() {
Encounter e1 = new Encounter(); Encounter e1 = new Encounter();
@ -926,6 +922,34 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
} }
} }
@Test
public void testSearchParamChangesType() {
String name = "testSearchParamChangesType";
IIdType id;
{
Patient patient = new Patient();
patient.addName().addFamily(name);
id = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
}
Map<String, IQueryParameterType> params = new HashMap<String, IQueryParameterType>();
params.put(Patient.SP_FAMILY, new StringDt(name));
List<IIdType> patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, contains(id));
Patient patient = new Patient();
patient.addIdentifier().setSystem(name).setValue(name);
patient.setId(id);
myPatientDao.update(patient);
params = new HashMap<String, IQueryParameterType>();
params.put(Patient.SP_FAMILY, new StringDt(name));
patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, not(contains(id)));
}
@Test @Test
public void testSearchPractitionerPhoneAndEmailParam() { public void testSearchPractitionerPhoneAndEmailParam() {
String methodName = "testSearchPractitionerPhoneAndEmailParam"; String methodName = "testSearchPractitionerPhoneAndEmailParam";
@ -1192,33 +1216,6 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
} }
@Test
public void testSearchParamChangesType() {
String name = "testSearchParamChangesType";
IIdType id;
{
Patient patient = new Patient();
patient.addName().addFamily(name);
id = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
}
Map<String, IQueryParameterType> params = new HashMap<String, IQueryParameterType>();
params.put(Patient.SP_FAMILY, new StringDt(name));
List<IIdType> patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, contains(id));
Patient patient = new Patient();
patient.addIdentifier().setSystem(name).setValue(name);
patient.setId(id);
myPatientDao.update(patient);
params = new HashMap<String, IQueryParameterType>();
params.put(Patient.SP_FAMILY, new StringDt(name));
patients = toUnqualifiedVersionlessIds(myPatientDao.search(params));
assertThat(patients, not(contains(id)));
}
@Test @Test
public void testSearchStringParamReallyLong() { public void testSearchStringParamReallyLong() {
String methodName = "testSearchStringParamReallyLong"; String methodName = "testSearchStringParamReallyLong";
@ -1349,6 +1346,30 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
} }
} }
@Test
public void testSearchUnknownContentParam() {
SearchParameterMap params = new SearchParameterMap();
params.add(Constants.PARAM_CONTENT, new StringDt("fulltext"));
try {
myPatientDao.search(params);
fail();
} catch (InvalidRequestException e) {
assertEquals("Fulltext search is not enabled on this service, can not process parameter: _content", e.getMessage());
}
}
@Test
public void testSearchUnknownTextParam() {
SearchParameterMap params = new SearchParameterMap();
params.add(Constants.PARAM_TEXT, new StringDt("fulltext"));
try {
myPatientDao.search(params);
fail();
} catch (InvalidRequestException e) {
assertEquals("Fulltext search is not enabled on this service, can not process parameter: _text", e.getMessage());
}
}
@Test @Test
public void testSearchValueQuantity() { public void testSearchValueQuantity() {
String methodName = "testSearchValueQuantity"; String methodName = "testSearchValueQuantity";
@ -1384,6 +1405,18 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
} }
@Test
public void testSearchWithEmptySort() {
SearchParameterMap criteriaUrl = new SearchParameterMap();
DateRangeParam range = new DateRangeParam();
range.setLowerBound(new DateParam(QuantityCompararatorEnum.GREATERTHAN, 1000000));
range.setUpperBound(new DateParam(QuantityCompararatorEnum.LESSTHAN, 2000000));
criteriaUrl.setLastUpdated(range);
criteriaUrl.setSort(new SortSpec(Constants.PARAM_LASTUPDATED, SortOrderEnum.ASC));
IBundleProvider results = myObservationDao.search(criteriaUrl);
assertEquals(0, results.size());
}
@Test @Test
public void testSearchWithIncludes() { public void testSearchWithIncludes() {
String methodName = "testSearchWithIncludes"; String methodName = "testSearchWithIncludes";
@ -1877,17 +1910,6 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
} }
@Test
public void testSearchWithUriParam() throws Exception {
Class<ValueSet> type = ValueSet.class;
String resourceName = "/valueset-dstu2.json";
ValueSet vs = loadResourceFromClasspath(type, resourceName);
myValueSetDao.update(vs);
IBundleProvider result = myValueSetDao.search(ValueSet.SP_URL, new UriParam("http://hl7.org/fhir/ValueSet/basic-resource-type"));
assertThat(toUnqualifiedVersionlessIds(result), contains((IIdType)new IdDt("ValueSet/testSearchWithUriParam")));
}
@Test @Test
public void testSearchWithSecurityAndProfileParams() { public void testSearchWithSecurityAndProfileParams() {
String methodName = "testSearchWithSecurityAndProfileParams"; String methodName = "testSearchWithSecurityAndProfileParams";
@ -1938,6 +1960,9 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
ResourceMetadataKeyEnum.TAG_LIST.put(org, tagList); ResourceMetadataKeyEnum.TAG_LIST.put(org, tagList);
tag1id = myOrganizationDao.create(org).getId().toUnqualifiedVersionless(); tag1id = myOrganizationDao.create(org).getId().toUnqualifiedVersionless();
} }
Date betweenDate = new Date();
IIdType tag2id; IIdType tag2id;
{ {
Organization org = new Organization(); Organization org = new Organization();
@ -1973,6 +1998,17 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
List<IIdType> patients = toUnqualifiedVersionlessIds(myOrganizationDao.search(params)); List<IIdType> patients = toUnqualifiedVersionlessIds(myOrganizationDao.search(params));
assertThat(patients, containsInAnyOrder(tag1id, tag2id)); assertThat(patients, containsInAnyOrder(tag1id, tag2id));
} }
{
// Or tags with lastupdated
SearchParameterMap params = new SearchParameterMap();
TokenOrListParam orListParam = new TokenOrListParam();
orListParam.add(new TokenParam("urn:taglist", methodName + "1a"));
orListParam.add(new TokenParam("urn:taglist", methodName + "2a"));
params.add("_tag", orListParam);
params.setLastUpdated(new DateRangeParam(betweenDate, null));
List<IIdType> patients = toUnqualifiedVersionlessIds(myOrganizationDao.search(params));
assertThat(patients, containsInAnyOrder(tag2id));
}
// TODO: get multiple/AND working // TODO: get multiple/AND working
{ {
// And tags // And tags
@ -1997,7 +2033,7 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
} }
} }
@Test @Test
public void testSearchWithToken() { public void testSearchWithToken() {
IIdType notMissing; IIdType notMissing;
@ -2034,5 +2070,25 @@ public class FhirResourceDaoDstu2SearchTest extends BaseJpaDstu2Test {
} }
} }
@Test
public void testSearchWithUriParam() throws Exception {
Class<ValueSet> type = ValueSet.class;
String resourceName = "/valueset-dstu2.json";
ValueSet vs = loadResourceFromClasspath(type, resourceName);
myValueSetDao.update(vs);
IBundleProvider result = myValueSetDao.search(ValueSet.SP_URL, new UriParam("http://hl7.org/fhir/ValueSet/basic-resource-type"));
assertThat(toUnqualifiedVersionlessIds(result), contains((IIdType)new IdDt("ValueSet/testSearchWithUriParam")));
}
private String toStringMultiline(List<?> theResults) {
StringBuilder b = new StringBuilder();
for (Object next : theResults) {
b.append('\n');
b.append(" * ").append(next.toString());
}
return b.toString();
}
} }

View File

@ -339,7 +339,9 @@ public class FhirResourceDaoDstu2SubscriptionTest extends BaseJpaDstu2Test {
List<IBaseResource> results; List<IBaseResource> results;
List<IIdType> resultIds; List<IIdType> resultIds;
mySubscriptionDao.pollForNewUndeliveredResources(); assertEquals(4, mySubscriptionDao.pollForNewUndeliveredResources());
assertEquals(0, mySubscriptionDao.pollForNewUndeliveredResources());
results = mySubscriptionDao.getUndeliveredResourcesAndPurge(subsId1); results = mySubscriptionDao.getUndeliveredResourcesAndPurge(subsId1);
resultIds = toUnqualifiedVersionlessIds(results); resultIds = toUnqualifiedVersionlessIds(results);
assertThat(resultIds, contains(afterId1, afterId2)); assertThat(resultIds, contains(afterId1, afterId2));
@ -364,6 +366,12 @@ public class FhirResourceDaoDstu2SubscriptionTest extends BaseJpaDstu2Test {
resultIds = toUnqualifiedVersionlessIds(results); resultIds = toUnqualifiedVersionlessIds(results);
assertThat(resultIds, empty()); assertThat(resultIds, empty());
mySystemDao.markAllResourcesForReindexing();
mySystemDao.performReindexingPass(100);
assertEquals(6, mySubscriptionDao.pollForNewUndeliveredResources());
assertEquals(0, mySubscriptionDao.pollForNewUndeliveredResources());
} }
@ -379,7 +387,7 @@ public class FhirResourceDaoDstu2SubscriptionTest extends BaseJpaDstu2Test {
Observation obs = new Observation(); Observation obs = new Observation();
obs.getSubject().setReference(pId); obs.getSubject().setReference(pId);
obs.setStatus(ObservationStatusEnum.FINAL); obs.setStatus(ObservationStatusEnum.FINAL);
myObservationDao.create(obs).getId().toUnqualifiedVersionless(); IIdType oId = myObservationDao.create(obs).getId().toUnqualifiedVersionless();
Subscription subs; Subscription subs;
@ -395,38 +403,45 @@ public class FhirResourceDaoDstu2SubscriptionTest extends BaseJpaDstu2Test {
assertNull(mySubscriptionTableDao.findOne(subsId1).getLastClientPoll()); assertNull(mySubscriptionTableDao.findOne(subsId1).getLastClientPoll());
assertEquals(0, mySubscriptionDao.pollForNewUndeliveredResources());
ourLog.info("pId: {} - oId: {}", pId, oId);
myObservationDao.update(myObservationDao.read(oId));
assertEquals(1, mySubscriptionDao.pollForNewUndeliveredResources());
ourLog.info("Between passes");
assertEquals(0, mySubscriptionDao.pollForNewUndeliveredResources());
Thread.sleep(100); Thread.sleep(100);
ourLog.info("Before: {}", System.currentTimeMillis()); ourLog.info("Before: {}", System.currentTimeMillis());
obs = new Observation(); obs = new Observation();
obs.getSubject().setReference(pId); obs.getSubject().setReference(pId);
obs.setStatus(ObservationStatusEnum.FINAL); obs.setStatus(ObservationStatusEnum.FINAL);
IIdType afterId1 = myObservationDao.create(obs).getId().toUnqualifiedVersionless(); myObservationDao.create(obs).getId().toUnqualifiedVersionless();
obs = new Observation(); obs = new Observation();
obs.getSubject().setReference(pId); obs.getSubject().setReference(pId);
obs.setStatus(ObservationStatusEnum.FINAL); obs.setStatus(ObservationStatusEnum.FINAL);
IIdType afterId2 = myObservationDao.create(obs).getId().toUnqualifiedVersionless(); myObservationDao.create(obs).getId().toUnqualifiedVersionless();
Thread.sleep(100); Thread.sleep(100);
ourLog.info("After: {}", System.currentTimeMillis()); ourLog.info("After: {}", System.currentTimeMillis());
List<IBaseResource> results; assertEquals(2, mySubscriptionDao.pollForNewUndeliveredResources());
List<IIdType> resultIds; assertEquals(3, mySubscriptionFlaggedResourceDataDao.count());
mySubscriptionDao.pollForNewUndeliveredResources();
assertEquals(2, mySubscriptionFlaggedResourceDataDao.count());
Thread.sleep(100); Thread.sleep(100);
mySubscriptionDao.pollForNewUndeliveredResources(); mySubscriptionDao.pollForNewUndeliveredResources();
assertEquals(2, mySubscriptionFlaggedResourceDataDao.count()); assertEquals(3, mySubscriptionFlaggedResourceDataDao.count());
Thread.sleep(100); Thread.sleep(100);
mySubscriptionDao.pollForNewUndeliveredResources(); mySubscriptionDao.pollForNewUndeliveredResources();
assertEquals(2, mySubscriptionFlaggedResourceDataDao.count()); assertEquals(3, mySubscriptionFlaggedResourceDataDao.count());
Thread.sleep(100); Thread.sleep(100);
@ -436,12 +451,12 @@ public class FhirResourceDaoDstu2SubscriptionTest extends BaseJpaDstu2Test {
myObservationDao.create(obs).getId().toUnqualifiedVersionless(); myObservationDao.create(obs).getId().toUnqualifiedVersionless();
mySubscriptionDao.pollForNewUndeliveredResources(); mySubscriptionDao.pollForNewUndeliveredResources();
assertEquals(3, mySubscriptionFlaggedResourceDataDao.count()); assertEquals(4, mySubscriptionFlaggedResourceDataDao.count());
Thread.sleep(100); Thread.sleep(100);
mySubscriptionDao.pollForNewUndeliveredResources(); mySubscriptionDao.pollForNewUndeliveredResources();
assertEquals(3, mySubscriptionFlaggedResourceDataDao.count()); assertEquals(4, mySubscriptionFlaggedResourceDataDao.count());
} }

View File

@ -0,0 +1,186 @@
package ca.uhn.fhir.jpa.dao;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.empty;
import static org.junit.Assert.assertThat;
import java.util.List;
import org.hibernate.search.jpa.FullTextEntityManager;
import org.hibernate.search.jpa.Search;
import org.junit.Before;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.transaction.annotation.Transactional;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.model.dstu2.resource.Organization;
import ca.uhn.fhir.model.dstu2.resource.Patient;
import ca.uhn.fhir.rest.param.StringAndListParam;
import ca.uhn.fhir.rest.param.StringOrListParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.server.Constants;
@ContextConfiguration(locations = { "classpath:fhir-spring-search-config-dstu2.xml" })
public class FhirSearchDaoDstu2Test extends BaseJpaDstu2Test {
@Autowired
private ISearchDao mySearchDao;
@Before
@Transactional
public void beforeFlushFT() {
FullTextEntityManager ftem = Search.getFullTextEntityManager(myEntityManager);
ftem.purgeAll(ResourceTable.class);
ftem.flushToIndexes();
}
@Test
public void testContentSearch() {
Long id1;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("001");
patient.addName().addGiven("testSearchStringParamWithNonNormalized_h\u00F6ra");
patient.addName().addFamily("AAAS");
patient.addName().addFamily("CCC");
id1 = myPatientDao.create(patient).getId().toUnqualifiedVersionless().getIdPartAsLong();
}
Long id2;
{
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("002");
patient.addName().addGiven("testSearchStringParamWithNonNormalized_HORA");
patient.addName().addFamily("AAAB");
patient.addName().addFamily("CCC");
id2 = myPatientDao.create(patient).getId().toUnqualifiedVersionless().getIdPartAsLong();
}
Long id3;
{
Organization org = new Organization();
org.setName("DDD");
id3 = myOrganizationDao.create(org).getId().toUnqualifiedVersionless().getIdPartAsLong();
}
SearchParameterMap map = new SearchParameterMap();
String resourceName = "Patient";
// One term
{
StringAndListParam content = new StringAndListParam();
content.addAnd(new StringOrListParam().addOr(new StringParam("AAAS")));
map.add(Constants.PARAM_CONTENT, content);
List<Long> found = mySearchDao.search(resourceName, map);
assertThat(found, containsInAnyOrder(id1));
}
// OR
{
StringAndListParam content = new StringAndListParam();
content.addAnd(new StringOrListParam().addOr(new StringParam("AAAS")).addOr(new StringParam("AAAB")));
map.add(Constants.PARAM_CONTENT, content);
List<Long> found = mySearchDao.search(resourceName, map);
assertThat(found, containsInAnyOrder(id1, id2));
}
// AND
{
StringAndListParam content = new StringAndListParam();
content.addAnd(new StringOrListParam().addOr(new StringParam("AAAS")));
content.addAnd(new StringOrListParam().addOr(new StringParam("CCC")));
map.add(Constants.PARAM_CONTENT, content);
List<Long> found = mySearchDao.search(resourceName, map);
assertThat(found, containsInAnyOrder(id1));
}
// AND OR
{
StringAndListParam content = new StringAndListParam();
content.addAnd(new StringOrListParam().addOr(new StringParam("AAAB")).addOr(new StringParam("AAAS")));
content.addAnd(new StringOrListParam().addOr(new StringParam("CCC")));
map.add(Constants.PARAM_CONTENT, content);
List<Long> found = mySearchDao.search(resourceName, map);
assertThat(found, containsInAnyOrder(id1, id2));
}
// All Resource Types
{
StringAndListParam content = new StringAndListParam();
content.addAnd(new StringOrListParam().addOr(new StringParam("CCC")).addOr(new StringParam("DDD")));
map.add(Constants.PARAM_CONTENT, content);
List<Long> found = mySearchDao.search(null, map);
assertThat(found, containsInAnyOrder(id1, id2, id3));
}
}
@Test
public void testNarrativeSearch() {
Long id1;
{
Patient patient = new Patient();
patient.getText().setDiv("<div>AAAS<p>FOO</p> CCC </div>");
id1 = myPatientDao.create(patient).getId().toUnqualifiedVersionless().getIdPartAsLong();
}
Long id2;
{
Patient patient = new Patient();
patient.getText().setDiv("<div>AAAB<p>FOO</p> CCC </div>");
id2 = myPatientDao.create(patient).getId().toUnqualifiedVersionless().getIdPartAsLong();
}
SearchParameterMap map = new SearchParameterMap();
String resourceName = "Patient";
// One term
{
StringAndListParam content = new StringAndListParam();
content.addAnd(new StringOrListParam().addOr(new StringParam("AAAS")));
map.add(Constants.PARAM_TEXT, content);
List<Long> found = mySearchDao.search(resourceName, map);
assertThat(found, containsInAnyOrder(id1));
}
// OR
{
StringAndListParam content = new StringAndListParam();
content.addAnd(new StringOrListParam().addOr(new StringParam("AAAS")).addOr(new StringParam("AAAB")));
map.add(Constants.PARAM_TEXT, content);
List<Long> found = mySearchDao.search(resourceName, map);
assertThat(found, containsInAnyOrder(id1, id2));
}
// AND
{
StringAndListParam content = new StringAndListParam();
content.addAnd(new StringOrListParam().addOr(new StringParam("AAAS")));
content.addAnd(new StringOrListParam().addOr(new StringParam("CCC")));
map.add(Constants.PARAM_TEXT, content);
List<Long> found = mySearchDao.search(resourceName, map);
assertThat(found, containsInAnyOrder(id1));
}
// AND OR
{
StringAndListParam content = new StringAndListParam();
content.addAnd(new StringOrListParam().addOr(new StringParam("AAAB")).addOr(new StringParam("AAAS")));
content.addAnd(new StringOrListParam().addOr(new StringParam("CCC")));
map.add(Constants.PARAM_TEXT, content);
List<Long> found = mySearchDao.search(resourceName, map);
assertThat(found, containsInAnyOrder(id1, id2));
}
// Tag Contents
{
StringAndListParam content = new StringAndListParam();
content.addAnd(new StringOrListParam().addOr(new StringParam("div")));
map.add(Constants.PARAM_TEXT, content);
List<Long> found = mySearchDao.search(resourceName, map);
assertThat(found, empty());
}
}
}

View File

@ -11,7 +11,7 @@ public class SpringFileTest {
@Test @Test
public void testNoBadResources() throws Exception { public void testNoBadResources() throws Exception {
String text = IOUtils.toString(SpringFileTest.class.getResourceAsStream("/hapi-fhir-server-resourceproviders-dstu2.xml")); String text = IOUtils.toString(SpringFileTest.class.getResourceAsStream("/hapi-fhir-server-resourceproviders-dstu2.xml"));
assertThat(text, not(containsString("OperationDefinition"))); // assertThat(text, not(containsString("OperationDefinition")));
} }
} }

View File

@ -35,6 +35,9 @@
<property name="hibernate.cache.use_query_cache" value="false" /> <property name="hibernate.cache.use_query_cache" value="false" />
<property name="hibernate.cache.use_second_level_cache" value="false" /> <property name="hibernate.cache.use_second_level_cache" value="false" />
<property name="hibernate.cache.use_structured_entries" value="false" /> <property name="hibernate.cache.use_structured_entries" value="false" />
<property name="hibernate.search.default.directory_provider" value="filesystem"/>
<property name="hibernate.search.default.indexBase" value="./target/lucene_indexes"/>
<property name="hibernate.search.lucene_version" value="LUCENE_CURRENT" />
<!-- <!--
<property name="hibernate.ejb.naming_strategy" value="ca.uhn.fhir.jpa.util.CustomNamingStrategy" /> <property name="hibernate.ejb.naming_strategy" value="ca.uhn.fhir.jpa.util.CustomNamingStrategy" />
--> -->

View File

@ -2,7 +2,7 @@
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender"> <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter"> <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>DEBUG</level> <level>TRACE</level>
</filter> </filter>
<encoder> <encoder>
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
@ -37,6 +37,10 @@
<logger name="org.hibernate.SQL" additivity="false" level="trace"> <logger name="org.hibernate.SQL" additivity="false" level="trace">
<appender-ref ref="STDOUT" /> <appender-ref ref="STDOUT" />
</logger> </logger>
<!-- Set to 'trace' to enable SQL Value logging -->
<logger name="org.hibernate.type" additivity="false" level="info">
<appender-ref ref="STDOUT" />
</logger>
<root level="info"> <root level="info">
<appender-ref ref="STDOUT" /> <appender-ref ref="STDOUT" />

View File

@ -1,6 +1,7 @@
/target /target
/jpaserver_derby_files /jpaserver_derby_files
*.log *.log
ca.uhn.fhir.jpa.entity.ResourceTable/
# Created by https://www.gitignore.io # Created by https://www.gitignore.io

View File

@ -1,42 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<persistence xmlns="http://java.sun.com/xml/ns/persistence"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://java.sun.com/xml/ns/persistence http://java.sun.com/xml/ns/persistence/persistence_2_0.xsd"
version="2.0">
<persistence-unit name="FHIR_UT" transaction-type="RESOURCE_LOCAL">
<provider>org.hibernate.ejb.HibernatePersistence</provider>
<class>ca.uhn.fhir.jpa.entity.ForcedId</class>
<class>ca.uhn.fhir.jpa.entity.ResourceHistoryTable</class>
<class>ca.uhn.fhir.jpa.entity.ResourceHistoryTag</class>
<class>ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamDate</class>
<class>ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamNumber</class>
<class>ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamQuantity</class>
<class>ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamString</class>
<class>ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamToken</class>
<class>ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamUri</class>
<class>ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamCoords</class>
<class>ca.uhn.fhir.jpa.entity.ResourceLink</class>
<class>ca.uhn.fhir.jpa.entity.ResourceTable</class>
<class>ca.uhn.fhir.jpa.entity.ResourceTag</class>
<class>ca.uhn.fhir.jpa.entity.SubscriptionTable</class>
<class>ca.uhn.fhir.jpa.entity.SubscriptionFlaggedResource</class>
<class>ca.uhn.fhir.jpa.entity.TagDefinition</class>
<class>ca.uhn.fhir.jpa.entity.Search</class>
<class>ca.uhn.fhir.jpa.entity.SearchResult</class>
<exclude-unlisted-classes>true</exclude-unlisted-classes>
<properties>
<property name="hibernate.dialect" value="ca.uhn.fhir.jpa.util.HapiDerbyTenSevenDialect" />
<property name="hibernate.hbm2ddl.auto" value="update" />
<property name="hibernate.jdbc.batch_size" value="20" />
<property name="hibernate.cache.use_minimal_puts" value="true" />
<property name="hibernate.show_sql" value="false" />
<property name="hibernate.cache.use_query_cache" value="false" />
<property name="hibernate.cache.use_second_level_cache" value="false" />
<property name="hibernate.cache.use_structured_entries" value="false" />
</properties>
</persistence-unit>
</persistence>

View File

@ -12,7 +12,7 @@
<context:annotation-config /> <context:annotation-config />
<context:mbean-server /> <context:mbean-server />
<bean depends-on="dbServer" id="myPersistenceDataSource" class="org.apache.commons.dbcp2.BasicDataSource" destroy-method="close"> <bean depends-on="dbServer" id="myPersistenceDataSourceDstu1" class="org.apache.commons.dbcp2.BasicDataSource" destroy-method="close">
<!-- ;create=true /opt/glassfish/glassfish4/glassfish/nodes/localhost-domain1/fhirtest/fhirdb --> <!-- ;create=true /opt/glassfish/glassfish4/glassfish/nodes/localhost-domain1/fhirtest/fhirdb -->
<!-- <property name="url" value="jdbc:hsqldb:hsql://localhost/uhnfhirdb"/>--> <!-- <property name="url" value="jdbc:hsqldb:hsql://localhost/uhnfhirdb"/>-->
<!-- <property name="url" value="jdbc:derby:directory:#{systemproperties['fhir.db.location']};create=true" /> --> <!-- <property name="url" value="jdbc:derby:directory:#{systemproperties['fhir.db.location']};create=true" /> -->
@ -24,6 +24,38 @@
<property name="password" value="SA"/> <property name="password" value="SA"/>
</bean> </bean>
<bean depends-on="dbServer" id="entityManagerFactory" class="org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean">
<property name="dataSource" ref="myPersistenceDataSourceDstu1" />
<!--
<property name="persistenceXmlLocation" value="classpath:META-INF/fhirtest_persistence.xml" />
-->
<property name="packagesToScan">
<list>
<value>ca.uhn.fhir.jpa.entity</value>
</list>
</property>
<property name="persistenceUnitName" value="FHIR_DSTU2" />
<property name="jpaVendorAdapter">
<bean class="org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter">
<property name="showSql" value="false" />
<property name="generateDdl" value="true" />
<property name="databasePlatform" value="org.hibernate.dialect.DerbyTenSevenDialect" />
</bean>
</property>
<property name="jpaPropertyMap">
<map>
<entry key="hibernate.dialect" value="ca.uhn.fhir.jpa.util.HapiDerbyTenSevenDialect" />
<entry key="hibernate.hbm2ddl.auto" value="update" />
<entry key="hibernate.jdbc.batch_size" value="20" />
<entry key="hibernate.cache.use_minimal_puts" value="true" />
<entry key="hibernate.show_sql" value="false" />
<entry key="hibernate.cache.use_query_cache" value="false" />
<entry key="hibernate.cache.use_second_level_cache" value="false" />
<entry key="hibernate.cache.use_structured_entries" value="false" />
</map>
</property>
</bean>
<!--for mysql--> <!--for mysql-->
<!-- <!--
<bean depends-on="dbServer" id="myPersistenceDataSource" class="org.apache.commons.dbcp2.BasicDataSource" destroy-method="close"> <bean depends-on="dbServer" id="myPersistenceDataSource" class="org.apache.commons.dbcp2.BasicDataSource" destroy-method="close">
@ -36,21 +68,6 @@
</bean> </bean>
--> -->
<bean depends-on="dbServer" id="entityManagerFactory" class="org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean">
<property name="dataSource" ref="myPersistenceDataSource" />
<property name="persistenceXmlLocation" value="classpath:META-INF/fhirtest_persistence.xml" />
<property name="persistenceUnitName" value="FHIR_UT" />
<property name="jpaVendorAdapter">
<bean class="org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter">
<property name="showSql" value="false" />
<property name="generateDdl" value="true" />
<property name="databasePlatform" value="ca.uhn.fhir.jpa.util.HapiDerbyTenSevenDialect" />
<!-- <property name="databasePlatform" value="org.hibernate.dialect.HSQLDialect" />-->
<!-- <property name="databasePlatform" value="org.hibernate.dialect.MySQL5Dialect" /> -->
</bean>
</property>
</bean>
<bean id="transactionManager" class="org.springframework.orm.jpa.JpaTransactionManager"> <bean id="transactionManager" class="org.springframework.orm.jpa.JpaTransactionManager">
<property name="entityManagerFactory" ref="entityManagerFactory" /> <property name="entityManagerFactory" ref="entityManagerFactory" />
</bean> </bean>

View File

@ -11,7 +11,7 @@
<context:annotation-config /> <context:annotation-config />
<context:mbean-server /> <context:mbean-server />
<bean depends-on="dbServer" id="myPersistenceDataSource" class="org.apache.commons.dbcp2.BasicDataSource" destroy-method="close"> <bean depends-on="dbServer" id="myPersistenceDataSourceDstu2" class="org.apache.commons.dbcp2.BasicDataSource" destroy-method="close">
<property name="driverClassName" value="org.apache.derby.jdbc.ClientDriver"></property> <property name="driverClassName" value="org.apache.derby.jdbc.ClientDriver"></property>
<property name="url" value="jdbc:derby://localhost:1527/#{systemProperties['fhir.db.location.dstu2']};create=true" /> <property name="url" value="jdbc:derby://localhost:1527/#{systemProperties['fhir.db.location.dstu2']};create=true" />
<property name="username" value="SA" /> <property name="username" value="SA" />
@ -24,18 +24,41 @@
value="sa"/> <property name="testOnBorrow" value="true"/> <property name="validationQuery" value="select 1;"/> </bean> --> value="sa"/> <property name="testOnBorrow" value="true"/> <property name="validationQuery" value="select 1;"/> </bean> -->
<bean depends-on="dbServer" id="entityManagerFactory" class="org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean"> <bean depends-on="dbServer" id="entityManagerFactory" class="org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean">
<property name="dataSource" ref="myPersistenceDataSource" /> <property name="dataSource" ref="myPersistenceDataSourceDstu2" />
<!--
<property name="persistenceXmlLocation" value="classpath:META-INF/fhirtest_persistence.xml" /> <property name="persistenceXmlLocation" value="classpath:META-INF/fhirtest_persistence.xml" />
<property name="persistenceUnitName" value="FHIR_UT" /> -->
<property name="jpaVendorAdapter"> <property name="persistenceUnitName" value="FHIR_DSTU2" />
<property name="packagesToScan">
<list>
<value>ca.uhn.fhir.jpa.entity</value>
</list>
</property>
<property name="jpaVendorAdapter">
<bean class="org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter"> <bean class="org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter">
<property name="showSql" value="false" /> <property name="showSql" value="false" />
<property name="generateDdl" value="true" /> <property name="generateDdl" value="true" />
<property name="databasePlatform" value="ca.uhn.fhir.jpa.util.HapiDerbyTenSevenDialect" /> <property name="databasePlatform" value="org.hibernate.dialect.DerbyTenSevenDialect" />
<!-- <property name="databasePlatform" value="ca.uhn.fhir.jpa.util.HapiDerbyTenSevenDialect" />-->
<!-- <property name="databasePlatform" value="org.hibernate.dialect.HSQLDialect" /> --> <!-- <property name="databasePlatform" value="org.hibernate.dialect.HSQLDialect" /> -->
<!-- <property name="databasePlatform" value="org.hibernate.dialect.MySQL5Dialect" /> --> <!-- <property name="databasePlatform" value="org.hibernate.dialect.MySQL5Dialect" /> -->
</bean> </bean>
</property> </property>
<property name="jpaPropertyMap">
<map>
<entry key="hibernate.dialect" value="ca.uhn.fhir.jpa.util.HapiDerbyTenSevenDialect" />
<entry key="hibernate.hbm2ddl.auto" value="update" />
<entry key="hibernate.jdbc.batch_size" value="20" />
<entry key="hibernate.cache.use_minimal_puts" value="true" />
<entry key="hibernate.show_sql" value="false" />
<entry key="hibernate.cache.use_query_cache" value="false" />
<entry key="hibernate.cache.use_second_level_cache" value="false" />
<entry key="hibernate.cache.use_structured_entries" value="false" />
<entry key="hibernate.search.default.directory_provider" value="filesystem" />
<entry key="hibernate.search.default.indexBase" value="#{systemProperties['fhir.lucene.location.dstu2']}" />
<entry key="hibernate.search.lucene_version" value="LUCENE_CURRENT" />
</map>
</property>
</bean> </bean>
<bean id="transactionManager" class="org.springframework.orm.jpa.JpaTransactionManager"> <bean id="transactionManager" class="org.springframework.orm.jpa.JpaTransactionManager">

View File

@ -24,20 +24,6 @@
<ref bean="myLoggingInterceptor"/> <ref bean="myLoggingInterceptor"/>
</util:list> </util:list>
<!--
<bean class="ca.uhn.fhirtest.HsqldbServer" id="dbServer" init-method="start">
<constructor-arg>
<value>
server.database.0=file:#{systemProperties['fhir.db.location']}/hsql-fhir-db
server.dbname.0=uhnfhirdb
server.remote_open=true
hsqldb.reconfig_logging=false
hsqldb.default_table_type=cached
</value>
</constructor-arg>
</bean>
-->
<!--for mysql--> <!--for mysql-->
<!-- <!--
<bean id="dbServer" class="ca.uhn.fhirtest.MySqlServer"> <bean id="dbServer" class="ca.uhn.fhirtest.MySqlServer">

View File

@ -17,6 +17,7 @@
classpath:hapi-fhir-server-resourceproviders-dstu1.xml classpath:hapi-fhir-server-resourceproviders-dstu1.xml
classpath:hapi-fhir-server-resourceproviders-dstu2.xml classpath:hapi-fhir-server-resourceproviders-dstu2.xml
classpath:fhir-spring-subscription-config-dstu2.xml classpath:fhir-spring-subscription-config-dstu2.xml
classpath:fhir-spring-search-config-dstu2.xml
</param-value> </param-value>
</context-param> </context-param>

View File

@ -29,8 +29,9 @@ public class UhnFhirTestApp {
// new File("target/testdb").mkdirs(); // new File("target/testdb").mkdirs();
System.setProperty("fhir.db.location", "./target/testdb"); System.setProperty("fhir.db.location", "./target/testdb");
System.setProperty("fhir.db.location.dstu2", "./target/testdb_dstu2"); System.setProperty("fhir.db.location.dstu2", "./target/testdb_dstu2");
System.setProperty("fhir.baseurl.dstu1", base + "Dstu1"); System.setProperty("fhir.lucene.location.dstu2", "./target/testlucene_dstu2");
System.setProperty("fhir.baseurl.dstu2", base + "Dstu1"); System.setProperty("fhir.baseurl.dstu1", base.replace("Dstu2", "Dstu1"));
System.setProperty("fhir.baseurl.dstu2", base);
Server server = new Server(myPort); Server server = new Server(myPort);

View File

@ -8,9 +8,12 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import ca.uhn.fhir.model.dstu2.resource.*;
import ca.uhn.fhir.model.dstu2.valueset.*;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.ConfigurationException;
@ -26,20 +29,7 @@ import ca.uhn.fhir.model.base.composite.BaseCodingDt;
import ca.uhn.fhir.model.dstu2.composite.CodingDt; import ca.uhn.fhir.model.dstu2.composite.CodingDt;
import ca.uhn.fhir.model.dstu2.composite.HumanNameDt; import ca.uhn.fhir.model.dstu2.composite.HumanNameDt;
import ca.uhn.fhir.model.dstu2.composite.ResourceReferenceDt; import ca.uhn.fhir.model.dstu2.composite.ResourceReferenceDt;
import ca.uhn.fhir.model.dstu2.resource.Binary;
import ca.uhn.fhir.model.dstu2.resource.Bundle.Entry; import ca.uhn.fhir.model.dstu2.resource.Bundle.Entry;
import ca.uhn.fhir.model.dstu2.resource.Conformance;
import ca.uhn.fhir.model.dstu2.resource.DiagnosticReport;
import ca.uhn.fhir.model.dstu2.resource.Medication;
import ca.uhn.fhir.model.dstu2.resource.MedicationOrder;
import ca.uhn.fhir.model.dstu2.resource.Observation;
import ca.uhn.fhir.model.dstu2.resource.Patient;
import ca.uhn.fhir.model.dstu2.resource.QuestionnaireResponse;
import ca.uhn.fhir.model.dstu2.valueset.AdministrativeGenderEnum;
import ca.uhn.fhir.model.dstu2.valueset.BundleTypeEnum;
import ca.uhn.fhir.model.dstu2.valueset.IdentifierUseEnum;
import ca.uhn.fhir.model.dstu2.valueset.MaritalStatusCodesEnum;
import ca.uhn.fhir.model.dstu2.valueset.ObservationStatusEnum;
import ca.uhn.fhir.model.primitive.DateDt; import ca.uhn.fhir.model.primitive.DateDt;
import ca.uhn.fhir.model.primitive.DateTimeDt; import ca.uhn.fhir.model.primitive.DateTimeDt;
import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.IdDt;
@ -75,6 +65,13 @@ public class JsonParserDstu2Test {
ourLog.info(encoded); ourLog.info(encoded);
assertThat(encoded, containsString("\"div\":\"" + expected.replace("\"", "\\\"") + "\"")); assertThat(encoded, containsString("\"div\":\"" + expected.replace("\"", "\\\"") + "\""));
} }
@Test
public void testEncodeEmptyBinary() {
String output = ourCtx.newJsonParser().encodeResourceToString(new Binary());
assertEquals("{\"resourceType\":\"Binary\"}", output);
}
@Test @Test
public void testNamespacePreservationParse() throws Exception { public void testNamespacePreservationParse() throws Exception {
@ -1012,4 +1009,19 @@ public class JsonParserDstu2Test {
Assert.assertThat(message, containsString("contained")); Assert.assertThat(message, containsString("contained"));
} }
// see #241
@Ignore
@Test
public void testEncodeThenParseShouldNotAddSpuriousId() throws Exception {
Condition condition = new Condition().setVerificationStatus(ConditionVerificationStatusEnum.CONFIRMED);
ca.uhn.fhir.model.dstu2.resource.Bundle bundle = new ca.uhn.fhir.model.dstu2.resource.Bundle();
ca.uhn.fhir.model.dstu2.resource.Bundle.Entry entry = new ca.uhn.fhir.model.dstu2.resource.Bundle.Entry();
entry.setFullUrl(IdDt.newRandomUuid());
entry.setResource(condition);
bundle.getEntry().add(entry);
IParser parser = ourCtx.newJsonParser();
String json = parser.encodeResourceToString(bundle);
bundle = (ca.uhn.fhir.model.dstu2.resource.Bundle) parser.parseResource(json);
assertTrue(bundle.getEntry().get(0).getResource().getIdElement().isEmpty());
}
} }

View File

@ -123,6 +123,13 @@ public class XmlParserDstu2Test {
assertArrayEquals(new byte[] { 1, 2, 3, 4 }, bin.getContent()); assertArrayEquals(new byte[] { 1, 2, 3, 4 }, bin.getContent());
} }
@Test
public void testEncodeEmptyBinary() {
String output = ourCtx.newXmlParser().encodeResourceToString(new Binary());
assertEquals("<Binary xmlns=\"http://hl7.org/fhir\"/>", output);
}
@Test @Test
public void testContainedResourceInExtensionUndeclared() { public void testContainedResourceInExtensionUndeclared() {

View File

@ -50,6 +50,14 @@ public class ${className}ResourceProvider extends
@OptionalParam(name="_language") @OptionalParam(name="_language")
StringAndListParam theResourceLanguage, StringAndListParam theResourceLanguage,
@Description(shortDefinition="Search the contents of the resource's data using a fulltext search")
@OptionalParam(name=ca.uhn.fhir.rest.server.Constants.PARAM_CONTENT)
StringAndListParam theFtContent,
@Description(shortDefinition="Search the contents of the resource's narrative using a fulltext search")
@OptionalParam(name=ca.uhn.fhir.rest.server.Constants.PARAM_TEXT)
StringAndListParam theFtText,
@Description(shortDefinition="Search for resources which have the given tag") @Description(shortDefinition="Search for resources which have the given tag")
@OptionalParam(name=ca.uhn.fhir.rest.server.Constants.PARAM_TAG) @OptionalParam(name=ca.uhn.fhir.rest.server.Constants.PARAM_TAG)
TokenAndListParam theSearchForTag, TokenAndListParam theSearchForTag,
@ -128,6 +136,8 @@ public class ${className}ResourceProvider extends
SearchParameterMap paramMap = new SearchParameterMap(); SearchParameterMap paramMap = new SearchParameterMap();
paramMap.add("_id", theId); paramMap.add("_id", theId);
paramMap.add("_language", theResourceLanguage); paramMap.add("_language", theResourceLanguage);
paramMap.add(ca.uhn.fhir.rest.server.Constants.PARAM_CONTENT, theFtContent);
paramMap.add(ca.uhn.fhir.rest.server.Constants.PARAM_TEXT, theFtText);
paramMap.add(ca.uhn.fhir.rest.server.Constants.PARAM_TAG, theSearchForTag); paramMap.add(ca.uhn.fhir.rest.server.Constants.PARAM_TAG, theSearchForTag);
paramMap.add(ca.uhn.fhir.rest.server.Constants.PARAM_SECURITY, theSearchForSecurity); paramMap.add(ca.uhn.fhir.rest.server.Constants.PARAM_SECURITY, theSearchForSecurity);
paramMap.add(ca.uhn.fhir.rest.server.Constants.PARAM_PROFILE, theSearchForProfile); paramMap.add(ca.uhn.fhir.rest.server.Constants.PARAM_PROFILE, theSearchForProfile);

View File

@ -44,7 +44,7 @@
#foreach ( $res in $resources ) #foreach ( $res in $resources )
<bean id="my${res.name}Dao${versionCapitalized}" <bean id="my${res.name}Dao${versionCapitalized}"
## Some resource types have customized DAOs for resource specific functionality ## Some resource types have customized DAOs for resource specific functionality
#if ( ${versionCapitalized} == 'Dstu2' && ( ${res.name} == 'Bundle' || ${res.name} == 'Encounter' || ${res.name} == 'Everything' || ${res.name} == 'Patient' || ${res.name} == 'Subscription' || ${res.name} == 'QuestionnaireResponse' || ${res.name} == 'ValueSet')) #if ( ${versionCapitalized} == 'Dstu2' && ( ${res.name} == 'Bundle' || ${res.name} == 'Encounter' || ${res.name} == 'Everything' || ${res.name} == 'Patient' || ${res.name} == 'Subscription' || ${res.name} == 'ValueSet' || ${res.name} == 'QuestionnaireResponse' || ${res.name} == 'SearchParameter'))
class="ca.uhn.fhir.jpa.dao.FhirResourceDao${res.name}${versionCapitalized}"> class="ca.uhn.fhir.jpa.dao.FhirResourceDao${res.name}${versionCapitalized}">
#else #else
class="ca.uhn.fhir.jpa.dao.FhirResourceDao${versionCapitalized}"> class="ca.uhn.fhir.jpa.dao.FhirResourceDao${versionCapitalized}">

View File

@ -204,7 +204,7 @@
<maven.build.timestamp.format>yyyy-MM-dd'T'HH:mm:ss'Z'</maven.build.timestamp.format> <maven.build.timestamp.format>yyyy-MM-dd'T'HH:mm:ss'Z'</maven.build.timestamp.format>
<!-- This property is used in some of the site documentation where the version is shown, so that we can deploy the site even if the project is on a snapshot version. --> <!-- This property is used in some of the site documentation where the version is shown, so that we can deploy the site even if the project is on a snapshot version. -->
<hapi_stable_version>1.1</hapi_stable_version> <hapi_stable_version>1.2</hapi_stable_version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
@ -219,7 +219,7 @@
<!-- Note on Hibernate versions: Hibernate 4.3+ uses JPA 2.1, which is too new for a number of platforms including JBoss EAP 6.x and Glassfish 3.0. Upgrade this <!-- Note on Hibernate versions: Hibernate 4.3+ uses JPA 2.1, which is too new for a number of platforms including JBoss EAP 6.x and Glassfish 3.0. Upgrade this
version with caution! Also note that if you change this, you may get a failure in hibernate4-maven-plugin. See the note in hapi-fhir-jpaserver-base/pom.xml's configuration version with caution! Also note that if you change this, you may get a failure in hibernate4-maven-plugin. See the note in hapi-fhir-jpaserver-base/pom.xml's configuration
for that plugin... --> for that plugin... -->
<hibernate_version>5.0.1.Final</hibernate_version> <hibernate_version>5.0.2.Final</hibernate_version>
<hibernate_validator_version>5.2.1.Final</hibernate_validator_version> <hibernate_validator_version>5.2.1.Final</hibernate_validator_version>
<jetty_version>9.2.6.v20141205</jetty_version> <jetty_version>9.2.6.v20141205</jetty_version>
<maven_build_helper_plugin_version>1.9.1</maven_build_helper_plugin_version> <maven_build_helper_plugin_version>1.9.1</maven_build_helper_plugin_version>

View File

@ -162,6 +162,18 @@
meaning that the same operation can also be invoked meaning that the same operation can also be invoked
at the type level. at the type level.
</action> </action>
<action type="add">
Make JPA search queries with _lastUpdated parameter a bit more efficient
</action>
<action type="add" issue="239">
Clean up Android project to make it more lightweight and remove a
number of unneeded dependencies. Thanks to Thomas Andersen
for the pull request!
</action>
<action type="fix">
Fix a crash when encoding a Binary resource in JSON encoding
if the resource has no content-type
</action>
</release> </release>
<release version="1.2" date="2015-09-18"> <release version="1.2" date="2015-09-18">
<action type="add"> <action type="add">

Binary file not shown.

After

Width:  |  Height:  |  Size: 82 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 70 KiB

View File

@ -74,6 +74,7 @@
<menu name="Using HAPI" inherit="bottom" > <menu name="Using HAPI" inherit="bottom" >
<item name="Documentation Index" href="./docindex.html"/> <item name="Documentation Index" href="./docindex.html"/>
<item name="Command Line Tool (cli)" href="./doc_cli.html" />
</menu> </menu>
<menu name="Documentation" inherit="top" > <menu name="Documentation" inherit="top" >
@ -112,6 +113,7 @@
<item name="Model API (RI DSTU2)" href="./apidocs-hl7org-dstu2/index.html" /> <item name="Model API (RI DSTU2)" href="./apidocs-hl7org-dstu2/index.html" />
<item name="JPA Server API" href="./apidocs-jpaserver/index.html" /> <item name="JPA Server API" href="./apidocs-jpaserver/index.html" />
</item> </item>
<item name="Command Line Tool (hapi-fhir-cli)" href="./doc_cli.html" />
<item name="Maven Plugin (hapi-tinder-plugin)" href="./doc_tinder.html" /> <item name="Maven Plugin (hapi-tinder-plugin)" href="./doc_tinder.html" />
</menu> </menu>

97
src/site/xdoc/doc_cli.xml Normal file
View File

@ -0,0 +1,97 @@
<?xml version="1.0" encoding="UTF-8"?>
<document xmlns="http://maven.apache.org/XDOC/2.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/XDOC/2.0 http://maven.apache.org/xsd/xdoc-2.0.xsd">
<properties>
<title>Command Line Tool</title>
<author email="jamesagnew@users.sourceforge.net">James Agnew</author>
</properties>
<body>
<section name="Command Line Tool for HAPI FHIR">
<p>
<b>hapi-fhir-cli</b> is the HAPI FHIR Command Line tool. It features a number of HAPI's
built-in features as easy to use command line options.
</p>
<subsection name="Download and Installation">
<p>
You can get the tool by downloading it from our
<a href="https://github.com/jamesagnew/hapi-fhir/releases">GitHub Releases</a> page
(look for the archive named <code>hapi-fhir-[version]-cli.tar.bz2</code> or <code>.zip</code> on Windows).
</p>
<p>
When you have downloaded the archive (either ZIP or tar.bz2), expand it into a directory
where you will keep it, and add this directory to your path.
</p>
<p>
You can now try the tool out by executing the following command: <code>hapi-fhir-cli</code>
This command should show a help screen, as shown in the screeenshot below.
</p>
<img src="./images/hapi-fhir-cli.png" alt="Basic screen shot" style="margin-left: 40px;"/>
</subsection>
<subsection name="Troubleshooting">
<p>
The tool should work correctly on any system that has Java 6 (or newer) installed. If
it is not working correctly, first try the following command to test if Java is installed:<br/>
<code>$ java -version</code>
</p>
<p>
If this command does not produce output similar to the following, you should install/reinstall
Java.<br/>
<pre><![CDATA[$ java -version
java version "1.8.0_60"
Java(TM) SE Runtime Environment (build 1.8.0_60-b27)
Java HotSpot(TM) 64-Bit Server VM (build 25.60-b23, mixed mode)]]></pre>
</p>
<p>
If this does not help, please post a question on our
<a href="https://groups.google.com/d/forum/hapi-fhir">Google Group</a>.
</p>
</subsection>
</section>
<section name="Server (run-server)">
<p>
The CLI tool can be used to start a local, fully functional FHIR server which you can use
for testing. To start this server, simply issue the command <code>hapi-fhir-cli run-server</code>
as shown in the example below:
</p>
<img src="./images/hapi-fhir-cli-run-server.png" alt="Run Server" style="margin-left: 40px;"/>
<p>
Once the server has started, you can access the testing webpage by pointing your
browser at <a href="http://localhost:8080/">http://localhost:8080/</a>. The FHIR
server base URL will be <a href="http://localhost:8080/baseDstu2/">http://localhost:8080/baseDstu2/</a>.
</p>
<p>
Note that by default this server will not be populated with any resources at all. You can
easily populate it with the FHIR example resources by <b>leaving it running</b> and opening
a second terminal window, then using the <code>hapi-fhir-cli upload-examples</code> command
(see the section below).
</p>
<p>
The server uses a local Derby database instance for storage. You may want to execute
this command in an empty directory, which you can clear if you want to reset the server.
</p>
</section>
<section name="Upload Example Resources (upload-examples)">
<p>
The <b>upload-examples</b> command downloads the complete set of FHIR example resources from
the HL7 website, and uploads them to a server of your choice. This can be useful to
populate a server with test data.
</p>
<p>
To execute this command, uploading test resources to a local CLI server, issue
the following: <code>hapi-fhir-cli upload-examples -t http://localhost:8080/baseDstu2</code>
</p>
<p>
Note that this command may take a surprisingly long time to complete because of the
large number of examples.
</p>
</section>
</body>
</document>

View File

@ -54,6 +54,8 @@
<li><a href="./doc_rest_etag.html">ETags</a></li> <li><a href="./doc_rest_etag.html">ETags</a></li>
<li><a href="./doc_jpa.html">JPA/Database Server</a></li> <li><a href="./doc_jpa.html">JPA/Database Server</a></li>
<li><a href="./doc_tinder.html">Maven Plugin (hapi-tinder-plugin)</a></li> <li><a href="./doc_tinder.html">Maven Plugin (hapi-tinder-plugin)</a></li>
<li><a href="./doc_cli.html">Command Line Tool (hapi-fhir-cli)</a></li>
</ul> </ul>
<h4>JavaDocs</h4> <h4>JavaDocs</h4>