Add support for Cache-Control header in JPA server and client
This commit is contained in:
parent
4d1ab2734f
commit
ce720f5601
|
@ -1,5 +1,6 @@
|
||||||
package example;
|
package example;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||||
import org.hl7.fhir.dstu3.model.Bundle;
|
import org.hl7.fhir.dstu3.model.Bundle;
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
@ -8,6 +9,7 @@ import ca.uhn.fhir.rest.api.EncodingEnum;
|
||||||
import ca.uhn.fhir.rest.client.apache.GZipContentInterceptor;
|
import ca.uhn.fhir.rest.client.apache.GZipContentInterceptor;
|
||||||
import ca.uhn.fhir.rest.client.api.*;
|
import ca.uhn.fhir.rest.client.api.*;
|
||||||
import ca.uhn.fhir.rest.client.interceptor.*;
|
import ca.uhn.fhir.rest.client.interceptor.*;
|
||||||
|
import org.hl7.fhir.r4.model.Patient;
|
||||||
|
|
||||||
public class ClientExamples {
|
public class ClientExamples {
|
||||||
|
|
||||||
|
@ -52,6 +54,26 @@ public class ClientExamples {
|
||||||
// END SNIPPET: processMessage
|
// END SNIPPET: processMessage
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("unused")
|
||||||
|
public void cacheControl() {
|
||||||
|
FhirContext ctx = FhirContext.forDstu3();
|
||||||
|
|
||||||
|
// Create the client
|
||||||
|
IGenericClient client = ctx.newRestfulGenericClient("http://localhost:9999/fhir");
|
||||||
|
|
||||||
|
Bundle bundle = new Bundle();
|
||||||
|
// ..populate the bundle..
|
||||||
|
|
||||||
|
// START SNIPPET: cacheControl
|
||||||
|
Bundle response = client
|
||||||
|
.search()
|
||||||
|
.forResource(Patient.class)
|
||||||
|
.returnBundle(Bundle.class)
|
||||||
|
.cacheControl(new CacheControlDirective().setNoCache(true)) // <-- add a directive
|
||||||
|
.execute();
|
||||||
|
// END SNIPPET: cacheControl
|
||||||
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unused")
|
@SuppressWarnings("unused")
|
||||||
public void createOkHttp() {
|
public void createOkHttp() {
|
||||||
// START SNIPPET: okhttp
|
// START SNIPPET: okhttp
|
||||||
|
|
|
@ -0,0 +1,108 @@
|
||||||
|
package ca.uhn.fhir.rest.api;
|
||||||
|
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.StringTokenizer;
|
||||||
|
|
||||||
|
import static org.apache.commons.lang3.StringUtils.trim;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses and stores the value(s) within HTTP Cache-Control headers
|
||||||
|
*/
|
||||||
|
public class CacheControlDirective {
|
||||||
|
|
||||||
|
private static final String MAX_RESULTS_EQUALS = Constants.CACHE_CONTROL_MAX_RESULTS + "=";
|
||||||
|
private static final Logger ourLog = LoggerFactory.getLogger(CacheControlDirective.class);
|
||||||
|
private boolean myNoCache;
|
||||||
|
private boolean myNoStore;
|
||||||
|
private Integer myMaxResults;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructor
|
||||||
|
*/
|
||||||
|
public CacheControlDirective() {
|
||||||
|
super();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If the {@link #isNoStore() no-store} directive is set, this HAPI FHIR extention
|
||||||
|
* to the <code>Cache-Control</code> header called <code>max-results=123</code>
|
||||||
|
* specified the maximum number of results which will be fetched from the
|
||||||
|
* database before returning.
|
||||||
|
*/
|
||||||
|
public Integer getMaxResults() {
|
||||||
|
return myMaxResults;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If the {@link #isNoStore() no-store} directive is set, this HAPI FHIR extention
|
||||||
|
* to the <code>Cache-Control</code> header called <code>max-results=123</code>
|
||||||
|
* specified the maximum number of results which will be fetched from the
|
||||||
|
* database before returning.
|
||||||
|
*/
|
||||||
|
public CacheControlDirective setMaxResults(Integer theMaxResults) {
|
||||||
|
myMaxResults = theMaxResults;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If <code>true<</code>, adds the <code>no-cache</code> directive to the
|
||||||
|
* request. This directive indicates that the cache should not be used to
|
||||||
|
* serve this request.
|
||||||
|
*/
|
||||||
|
public boolean isNoCache() {
|
||||||
|
return myNoCache;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If <code>true<</code>, adds the <code>no-cache</code> directive to the
|
||||||
|
* request. This directive indicates that the cache should not be used to
|
||||||
|
* serve this request.
|
||||||
|
*/
|
||||||
|
public CacheControlDirective setNoCache(boolean theNoCache) {
|
||||||
|
myNoCache = theNoCache;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isNoStore() {
|
||||||
|
return myNoStore;
|
||||||
|
}
|
||||||
|
|
||||||
|
public CacheControlDirective setNoStore(boolean theNoStore) {
|
||||||
|
myNoStore = theNoStore;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses a list of <code>Cache-Control</code> header values
|
||||||
|
*
|
||||||
|
* @param theValues The <code>Cache-Control</code> header values
|
||||||
|
*/
|
||||||
|
public CacheControlDirective parse(List<String> theValues) {
|
||||||
|
if (theValues != null) {
|
||||||
|
for (String nextValue : theValues) {
|
||||||
|
StringTokenizer tok = new StringTokenizer(nextValue, ",");
|
||||||
|
while (tok.hasMoreTokens()) {
|
||||||
|
String next = trim(tok.nextToken());
|
||||||
|
if (Constants.CACHE_CONTROL_NO_CACHE.equals(next)) {
|
||||||
|
myNoCache = true;
|
||||||
|
} else if (Constants.CACHE_CONTROL_NO_STORE.equals(next)) {
|
||||||
|
myNoStore = true;
|
||||||
|
} else if (next.startsWith(MAX_RESULTS_EQUALS)) {
|
||||||
|
String valueString = trim(next.substring(MAX_RESULTS_EQUALS.length()));
|
||||||
|
try {
|
||||||
|
myMaxResults = Integer.parseInt(valueString);
|
||||||
|
} catch (NumberFormatException e) {
|
||||||
|
ourLog.warn("Invalid {} value: {}", Constants.CACHE_CONTROL_MAX_RESULTS, valueString);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}
|
|
@ -25,6 +25,9 @@ import java.util.*;
|
||||||
|
|
||||||
public class Constants {
|
public class Constants {
|
||||||
|
|
||||||
|
public static final String CACHE_CONTROL_MAX_RESULTS = "max-results";
|
||||||
|
public static final String CACHE_CONTROL_NO_CACHE = "no-cache";
|
||||||
|
public static final String CACHE_CONTROL_NO_STORE = "no-store";
|
||||||
public static final String CHARSET_NAME_UTF8 = "UTF-8";
|
public static final String CHARSET_NAME_UTF8 = "UTF-8";
|
||||||
public static final Charset CHARSET_UTF8;
|
public static final Charset CHARSET_UTF8;
|
||||||
public static final String CHARSET_UTF8_CTSUFFIX = "; charset=" + CHARSET_NAME_UTF8;
|
public static final String CHARSET_UTF8_CTSUFFIX = "; charset=" + CHARSET_NAME_UTF8;
|
||||||
|
@ -67,6 +70,7 @@ public class Constants {
|
||||||
public static final String HEADER_AUTHORIZATION = "Authorization";
|
public static final String HEADER_AUTHORIZATION = "Authorization";
|
||||||
public static final String HEADER_AUTHORIZATION_VALPREFIX_BASIC = "Basic ";
|
public static final String HEADER_AUTHORIZATION_VALPREFIX_BASIC = "Basic ";
|
||||||
public static final String HEADER_AUTHORIZATION_VALPREFIX_BEARER = "Bearer ";
|
public static final String HEADER_AUTHORIZATION_VALPREFIX_BEARER = "Bearer ";
|
||||||
|
public static final String HEADER_CACHE_CONTROL = "Cache-Control";
|
||||||
public static final String HEADER_CONTENT_DISPOSITION = "Content-Disposition";
|
public static final String HEADER_CONTENT_DISPOSITION = "Content-Disposition";
|
||||||
public static final String HEADER_CONTENT_ENCODING = "Content-Encoding";
|
public static final String HEADER_CONTENT_ENCODING = "Content-Encoding";
|
||||||
public static final String HEADER_CONTENT_LOCATION = "Content-Location";
|
public static final String HEADER_CONTENT_LOCATION = "Content-Location";
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
package ca.uhn.fhir.rest.gclient;
|
package ca.uhn.fhir.rest.gclient;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||||
import ca.uhn.fhir.rest.api.EncodingEnum;
|
import ca.uhn.fhir.rest.api.EncodingEnum;
|
||||||
import ca.uhn.fhir.rest.api.SummaryEnum;
|
import ca.uhn.fhir.rest.api.SummaryEnum;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
@ -38,6 +39,12 @@ public interface IClientExecutable<T extends IClientExecutable<?,Y>, Y> {
|
||||||
@Deprecated
|
@Deprecated
|
||||||
T andLogRequestAndResponse(boolean theLogRequestAndResponse);
|
T andLogRequestAndResponse(boolean theLogRequestAndResponse);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets the <code>Cache-Control</code> header value, which advises the server (or any cache in front of it)
|
||||||
|
* how to behave in terms of cached requests
|
||||||
|
*/
|
||||||
|
T cacheControl(CacheControlDirective theCacheControlDirective);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Request that the server return subsetted resources, containing only the elements specified in the given parameters.
|
* Request that the server return subsetted resources, containing only the elements specified in the given parameters.
|
||||||
* For example: <code>subsetElements("name", "identifier")</code> requests that the server only return
|
* For example: <code>subsetElements("name", "identifier")</code> requests that the server only return
|
||||||
|
|
|
@ -0,0 +1,58 @@
|
||||||
|
package ca.uhn.fhir.rest.api;
|
||||||
|
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import static org.junit.Assert.*;
|
||||||
|
|
||||||
|
public class CacheControlDirectiveTest {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testParseNoCache() {
|
||||||
|
List<String> values = Arrays.asList(Constants.CACHE_CONTROL_NO_CACHE);
|
||||||
|
CacheControlDirective ccd = new CacheControlDirective();
|
||||||
|
ccd.parse(values);
|
||||||
|
assertTrue(ccd.isNoCache());
|
||||||
|
assertFalse(ccd.isNoStore());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testParseNoCacheNoStore() {
|
||||||
|
List<String> values = Arrays.asList(Constants.CACHE_CONTROL_NO_CACHE + " , " + Constants.CACHE_CONTROL_NO_STORE);
|
||||||
|
CacheControlDirective ccd = new CacheControlDirective();
|
||||||
|
ccd.parse(values);
|
||||||
|
assertTrue(ccd.isNoCache());
|
||||||
|
assertTrue(ccd.isNoStore());
|
||||||
|
assertEquals(null, ccd.getMaxResults());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testParseNoCacheNoStoreMaxResults() {
|
||||||
|
List<String> values = Arrays.asList(Constants.CACHE_CONTROL_NO_STORE + ", "+ Constants.CACHE_CONTROL_MAX_RESULTS + "=5");
|
||||||
|
CacheControlDirective ccd = new CacheControlDirective();
|
||||||
|
ccd.parse(values);
|
||||||
|
assertFalse(ccd.isNoCache());
|
||||||
|
assertTrue(ccd.isNoStore());
|
||||||
|
assertEquals(5, ccd.getMaxResults().intValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testParseNoCacheNoStoreMaxResultsInvalid() {
|
||||||
|
List<String> values = Arrays.asList(Constants.CACHE_CONTROL_NO_STORE + ", "+ Constants.CACHE_CONTROL_MAX_RESULTS + "=A");
|
||||||
|
CacheControlDirective ccd = new CacheControlDirective();
|
||||||
|
ccd.parse(values);
|
||||||
|
assertFalse(ccd.isNoCache());
|
||||||
|
assertTrue(ccd.isNoStore());
|
||||||
|
assertEquals(null, ccd.getMaxResults());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testParseNull() {
|
||||||
|
CacheControlDirective ccd = new CacheControlDirective();
|
||||||
|
ccd.parse(null);
|
||||||
|
assertFalse(ccd.isNoCache());
|
||||||
|
assertFalse(ccd.isNoStore());
|
||||||
|
}
|
||||||
|
}
|
|
@ -34,6 +34,7 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.commons.lang3.Validate;
|
import org.apache.commons.lang3.Validate;
|
||||||
|
@ -135,7 +136,7 @@ public abstract class BaseClient implements IRestfulClient {
|
||||||
public <T extends IBaseResource> T fetchResourceFromUrl(Class<T> theResourceType, String theUrl) {
|
public <T extends IBaseResource> T fetchResourceFromUrl(Class<T> theResourceType, String theUrl) {
|
||||||
BaseHttpClientInvocation clientInvocation = new HttpGetClientInvocation(getFhirContext(), theUrl);
|
BaseHttpClientInvocation clientInvocation = new HttpGetClientInvocation(getFhirContext(), theUrl);
|
||||||
ResourceResponseHandler<T> binding = new ResourceResponseHandler<T>(theResourceType);
|
ResourceResponseHandler<T> binding = new ResourceResponseHandler<T>(theResourceType);
|
||||||
return invokeClient(getFhirContext(), binding, clientInvocation, null, false, false, null, null);
|
return invokeClient(getFhirContext(), binding, clientInvocation, null, false, false, null, null, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
void forceConformanceCheck() {
|
void forceConformanceCheck() {
|
||||||
|
@ -198,11 +199,11 @@ public abstract class BaseClient implements IRestfulClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
<T> T invokeClient(FhirContext theContext, IClientResponseHandler<T> binding, BaseHttpClientInvocation clientInvocation, boolean theLogRequestAndResponse) {
|
<T> T invokeClient(FhirContext theContext, IClientResponseHandler<T> binding, BaseHttpClientInvocation clientInvocation, boolean theLogRequestAndResponse) {
|
||||||
return invokeClient(theContext, binding, clientInvocation, null, null, theLogRequestAndResponse, null, null);
|
return invokeClient(theContext, binding, clientInvocation, null, null, theLogRequestAndResponse, null, null, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
<T> T invokeClient(FhirContext theContext, IClientResponseHandler<T> binding, BaseHttpClientInvocation clientInvocation, EncodingEnum theEncoding, Boolean thePrettyPrint,
|
<T> T invokeClient(FhirContext theContext, IClientResponseHandler<T> binding, BaseHttpClientInvocation clientInvocation, EncodingEnum theEncoding, Boolean thePrettyPrint,
|
||||||
boolean theLogRequestAndResponse, SummaryEnum theSummaryMode, Set<String> theSubsetElements) {
|
boolean theLogRequestAndResponse, SummaryEnum theSummaryMode, Set<String> theSubsetElements, CacheControlDirective theCacheControlDirective) {
|
||||||
|
|
||||||
if (!myDontValidateConformance) {
|
if (!myDontValidateConformance) {
|
||||||
myFactory.validateServerBaseIfConfiguredToDoSo(myUrlBase, myClient, this);
|
myFactory.validateServerBaseIfConfiguredToDoSo(myUrlBase, myClient, this);
|
||||||
|
@ -244,6 +245,18 @@ public abstract class BaseClient implements IRestfulClient {
|
||||||
|
|
||||||
httpRequest = clientInvocation.asHttpRequest(myUrlBase, params, encoding, thePrettyPrint);
|
httpRequest = clientInvocation.asHttpRequest(myUrlBase, params, encoding, thePrettyPrint);
|
||||||
|
|
||||||
|
if (theCacheControlDirective != null) {
|
||||||
|
StringBuilder b = new StringBuilder();
|
||||||
|
addToCacheControlHeader(b, Constants.CACHE_CONTROL_NO_CACHE, theCacheControlDirective.isNoCache());
|
||||||
|
addToCacheControlHeader(b, Constants.CACHE_CONTROL_NO_STORE, theCacheControlDirective.isNoStore());
|
||||||
|
if (theCacheControlDirective.getMaxResults() != null) {
|
||||||
|
addToCacheControlHeader(b, Constants.CACHE_CONTROL_MAX_RESULTS+"="+ Integer.toString(theCacheControlDirective.getMaxResults().intValue()), true);
|
||||||
|
}
|
||||||
|
if (b.length() > 0) {
|
||||||
|
httpRequest.addHeader(Constants.HEADER_CACHE_CONTROL, b.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (theLogRequestAndResponse) {
|
if (theLogRequestAndResponse) {
|
||||||
ourLog.info("Client invoking: {}", httpRequest);
|
ourLog.info("Client invoking: {}", httpRequest);
|
||||||
String body = httpRequest.getRequestBodyFromStream();
|
String body = httpRequest.getRequestBodyFromStream();
|
||||||
|
@ -366,6 +379,15 @@ public abstract class BaseClient implements IRestfulClient {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void addToCacheControlHeader(StringBuilder theBuilder, String theDirective, boolean theActive) {
|
||||||
|
if (theActive) {
|
||||||
|
if (theBuilder.length() > 0) {
|
||||||
|
theBuilder.append(", ");
|
||||||
|
}
|
||||||
|
theBuilder.append(theDirective);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* For now, this is a part of the internal API of HAPI - Use with caution as this method may change!
|
* For now, this is a part of the internal API of HAPI - Use with caution as this method may change!
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -120,10 +120,10 @@ public class GenericClient extends BaseClient implements IGenericClient {
|
||||||
ResourceResponseHandler<T> binding = new ResourceResponseHandler<T>(theType, (Class<? extends IBaseResource>) null, id, allowHtmlResponse);
|
ResourceResponseHandler<T> binding = new ResourceResponseHandler<T>(theType, (Class<? extends IBaseResource>) null, id, allowHtmlResponse);
|
||||||
|
|
||||||
if (theNotModifiedHandler == null) {
|
if (theNotModifiedHandler == null) {
|
||||||
return invokeClient(myContext, binding, invocation, theEncoding, thePrettyPrint, myLogRequestAndResponse, theSummary, theSubsetElements);
|
return invokeClient(myContext, binding, invocation, theEncoding, thePrettyPrint, myLogRequestAndResponse, theSummary, theSubsetElements, null);
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
return invokeClient(myContext, binding, invocation, theEncoding, thePrettyPrint, myLogRequestAndResponse, theSummary, theSubsetElements);
|
return invokeClient(myContext, binding, invocation, theEncoding, thePrettyPrint, myLogRequestAndResponse, theSummary, theSubsetElements, null);
|
||||||
} catch (NotModifiedException e) {
|
} catch (NotModifiedException e) {
|
||||||
return theNotModifiedHandler.call();
|
return theNotModifiedHandler.call();
|
||||||
}
|
}
|
||||||
|
@ -373,6 +373,7 @@ public class GenericClient extends BaseClient implements IGenericClient {
|
||||||
private boolean myQueryLogRequestAndResponse;
|
private boolean myQueryLogRequestAndResponse;
|
||||||
private HashSet<String> mySubsetElements;
|
private HashSet<String> mySubsetElements;
|
||||||
protected SummaryEnum mySummaryMode;
|
protected SummaryEnum mySummaryMode;
|
||||||
|
protected CacheControlDirective myCacheControlDirective;
|
||||||
|
|
||||||
@Deprecated // override deprecated method
|
@Deprecated // override deprecated method
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
|
@ -382,6 +383,12 @@ public class GenericClient extends BaseClient implements IGenericClient {
|
||||||
return (T) this;
|
return (T) this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public T cacheControl(CacheControlDirective theCacheControlDirective) {
|
||||||
|
myCacheControlDirective = theCacheControlDirective;
|
||||||
|
return (T) this;
|
||||||
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
@Override
|
@Override
|
||||||
public T elementsSubset(String... theElements) {
|
public T elementsSubset(String... theElements) {
|
||||||
|
@ -434,19 +441,11 @@ public class GenericClient extends BaseClient implements IGenericClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
protected <Z> Z invoke(Map<String, List<String>> theParams, IClientResponseHandler<Z> theHandler, BaseHttpClientInvocation theInvocation) {
|
protected <Z> Z invoke(Map<String, List<String>> theParams, IClientResponseHandler<Z> theHandler, BaseHttpClientInvocation theInvocation) {
|
||||||
// if (myParamEncoding != null) {
|
|
||||||
// theParams.put(Constants.PARAM_FORMAT, Collections.singletonList(myParamEncoding.getFormatContentType()));
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// if (myPrettyPrint != null) {
|
|
||||||
// theParams.put(Constants.PARAM_PRETTY, Collections.singletonList(myPrettyPrint.toString()));
|
|
||||||
// }
|
|
||||||
|
|
||||||
if (isKeepResponses()) {
|
if (isKeepResponses()) {
|
||||||
myLastRequest = theInvocation.asHttpRequest(getServerBase(), theParams, getEncoding(), myPrettyPrint);
|
myLastRequest = theInvocation.asHttpRequest(getServerBase(), theParams, getEncoding(), myPrettyPrint);
|
||||||
}
|
}
|
||||||
|
|
||||||
Z resp = invokeClient(myContext, theHandler, theInvocation, myParamEncoding, myPrettyPrint, myQueryLogRequestAndResponse || myLogRequestAndResponse, mySummaryMode, mySubsetElements);
|
Z resp = invokeClient(myContext, theHandler, theInvocation, myParamEncoding, myPrettyPrint, myQueryLogRequestAndResponse || myLogRequestAndResponse, mySummaryMode, mySubsetElements, myCacheControlDirective);
|
||||||
return resp;
|
return resp;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -36,10 +36,7 @@ import ca.uhn.fhir.jpa.util.jsonpatch.JsonPatchUtils;
|
||||||
import ca.uhn.fhir.jpa.util.xmlpatch.XmlPatchUtils;
|
import ca.uhn.fhir.jpa.util.xmlpatch.XmlPatchUtils;
|
||||||
import ca.uhn.fhir.model.api.*;
|
import ca.uhn.fhir.model.api.*;
|
||||||
import ca.uhn.fhir.model.primitive.IdDt;
|
import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
import ca.uhn.fhir.rest.api.PatchTypeEnum;
|
import ca.uhn.fhir.rest.api.*;
|
||||||
import ca.uhn.fhir.rest.api.QualifiedParamList;
|
|
||||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
|
||||||
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
|
|
||||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.param.ParameterUtil;
|
import ca.uhn.fhir.rest.param.ParameterUtil;
|
||||||
|
@ -928,7 +925,12 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return mySearchCoordinatorSvc.registerSearch(this, theParams, getResourceName());
|
CacheControlDirective cacheControlDirective = new CacheControlDirective();
|
||||||
|
if (theRequestDetails != null) {
|
||||||
|
cacheControlDirective.parse(theRequestDetails.getHeaders(Constants.HEADER_CACHE_CONTROL));
|
||||||
|
}
|
||||||
|
|
||||||
|
return mySearchCoordinatorSvc.registerSearch(this, theParams, getResourceName(), cacheControlDirective);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -107,6 +107,7 @@ public class DaoConfig {
|
||||||
private Set<String> myTreatBaseUrlsAsLocal = new HashSet<String>();
|
private Set<String> myTreatBaseUrlsAsLocal = new HashSet<String>();
|
||||||
private Set<String> myTreatReferencesAsLogical = new HashSet<String>(DEFAULT_LOGICAL_BASE_URLS);
|
private Set<String> myTreatReferencesAsLogical = new HashSet<String>(DEFAULT_LOGICAL_BASE_URLS);
|
||||||
private boolean myAutoCreatePlaceholderReferenceTargets;
|
private boolean myAutoCreatePlaceholderReferenceTargets;
|
||||||
|
private Integer myCacheControlNoStoreMaxResultsUpperLimit = 1000;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
|
@ -131,6 +132,26 @@ public class DaoConfig {
|
||||||
myTreatReferencesAsLogical.add(theTreatReferencesAsLogical);
|
myTreatReferencesAsLogical.add(theTreatReferencesAsLogical);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specifies the highest number that a client is permitted to use in a
|
||||||
|
* <code>Cache-Control: nostore, max-results=NNN</code>
|
||||||
|
* directive. If the client tries to exceed this limit, the
|
||||||
|
* request will be denied. Defaults to 1000.
|
||||||
|
*/
|
||||||
|
public Integer getCacheControlNoStoreMaxResultsUpperLimit() {
|
||||||
|
return myCacheControlNoStoreMaxResultsUpperLimit;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specifies the highest number that a client is permitted to use in a
|
||||||
|
* <code>Cache-Control: nostore, max-results=NNN</code>
|
||||||
|
* directive. If the client tries to exceed this limit, the
|
||||||
|
* request will be denied. Defaults to 1000.
|
||||||
|
*/
|
||||||
|
public void setCacheControlNoStoreMaxResultsUpperLimit(Integer theCacheControlNoStoreMaxResults) {
|
||||||
|
myCacheControlNoStoreMaxResultsUpperLimit = theCacheControlNoStoreMaxResults;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* When a code system is added that contains more than this number of codes,
|
* When a code system is added that contains more than this number of codes,
|
||||||
* the code system will be indexed later in an incremental process in order to
|
* the code system will be indexed later in an incremental process in order to
|
||||||
|
@ -336,8 +357,11 @@ public class DaoConfig {
|
||||||
/**
|
/**
|
||||||
* This may be used to optionally register server interceptors directly against the DAOs.
|
* This may be used to optionally register server interceptors directly against the DAOs.
|
||||||
*/
|
*/
|
||||||
public void setInterceptors(List<IServerInterceptor> theInterceptors) {
|
public void setInterceptors(IServerInterceptor... theInterceptor) {
|
||||||
myInterceptors = theInterceptors;
|
setInterceptors(new ArrayList<IServerInterceptor>());
|
||||||
|
if (theInterceptor != null && theInterceptor.length != 0) {
|
||||||
|
getInterceptors().addAll(Arrays.asList(theInterceptor));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -434,6 +458,11 @@ public class DaoConfig {
|
||||||
* This approach can improve performance, especially under heavy load, but can also mean that
|
* This approach can improve performance, especially under heavy load, but can also mean that
|
||||||
* searches may potentially return slightly out-of-date results.
|
* searches may potentially return slightly out-of-date results.
|
||||||
* </p>
|
* </p>
|
||||||
|
* <p>
|
||||||
|
* Note that if this is set to a non-null value, clients may override this setting by using
|
||||||
|
* the <code>Cache-Control</code> header. If this is set to <code>null</code>, the Cache-Control
|
||||||
|
* header will be ignored.
|
||||||
|
* </p>
|
||||||
*/
|
*/
|
||||||
public Long getReuseCachedSearchResultsForMillis() {
|
public Long getReuseCachedSearchResultsForMillis() {
|
||||||
return myReuseCachedSearchResultsForMillis;
|
return myReuseCachedSearchResultsForMillis;
|
||||||
|
@ -449,6 +478,11 @@ public class DaoConfig {
|
||||||
* This approach can improve performance, especially under heavy load, but can also mean that
|
* This approach can improve performance, especially under heavy load, but can also mean that
|
||||||
* searches may potentially return slightly out-of-date results.
|
* searches may potentially return slightly out-of-date results.
|
||||||
* </p>
|
* </p>
|
||||||
|
* <p>
|
||||||
|
* Note that if this is set to a non-null value, clients may override this setting by using
|
||||||
|
* the <code>Cache-Control</code> header. If this is set to <code>null</code>, the Cache-Control
|
||||||
|
* header will be ignored.
|
||||||
|
* </p>
|
||||||
*/
|
*/
|
||||||
public void setReuseCachedSearchResultsForMillis(Long theReuseCachedSearchResultsForMillis) {
|
public void setReuseCachedSearchResultsForMillis(Long theReuseCachedSearchResultsForMillis) {
|
||||||
myReuseCachedSearchResultsForMillis = theReuseCachedSearchResultsForMillis;
|
myReuseCachedSearchResultsForMillis = theReuseCachedSearchResultsForMillis;
|
||||||
|
@ -925,11 +959,8 @@ public class DaoConfig {
|
||||||
/**
|
/**
|
||||||
* This may be used to optionally register server interceptors directly against the DAOs.
|
* This may be used to optionally register server interceptors directly against the DAOs.
|
||||||
*/
|
*/
|
||||||
public void setInterceptors(IServerInterceptor... theInterceptor) {
|
public void setInterceptors(List<IServerInterceptor> theInterceptors) {
|
||||||
setInterceptors(new ArrayList<IServerInterceptor>());
|
myInterceptors = theInterceptors;
|
||||||
if (theInterceptor != null && theInterceptor.length != 0) {
|
|
||||||
getInterceptors().addAll(Arrays.asList(theInterceptor));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -65,7 +65,7 @@ public class FhirResourceDaoPatientDstu2 extends FhirResourceDaoDstu2<Patient>im
|
||||||
paramMap.setLoadSynchronous(true);
|
paramMap.setLoadSynchronous(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
return mySearchCoordinatorSvc.registerSearch(this, paramMap, getResourceName());
|
return mySearchCoordinatorSvc.registerSearch(this, paramMap, getResourceName(), new CacheControlDirective().parse(theRequestDetails.getHeaders(Constants.HEADER_CACHE_CONTROL)));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -24,6 +24,7 @@ import java.util.Collections;
|
||||||
|
|
||||||
import javax.servlet.http.HttpServletRequest;
|
import javax.servlet.http.HttpServletRequest;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||||
import org.hl7.fhir.dstu3.model.Patient;
|
import org.hl7.fhir.dstu3.model.Patient;
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||||
|
@ -66,7 +67,7 @@ public class FhirResourceDaoPatientDstu3 extends FhirResourceDaoDstu3<Patient>im
|
||||||
paramMap.setLoadSynchronous(true);
|
paramMap.setLoadSynchronous(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
return mySearchCoordinatorSvc.registerSearch(this, paramMap, getResourceName());
|
return mySearchCoordinatorSvc.registerSearch(this, paramMap, getResourceName(), new CacheControlDirective().parse(theRequestDetails.getHeaders(Constants.HEADER_CACHE_CONTROL)));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -24,6 +24,7 @@ import java.util.Collections;
|
||||||
|
|
||||||
import javax.servlet.http.HttpServletRequest;
|
import javax.servlet.http.HttpServletRequest;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||||
import org.hl7.fhir.r4.model.Patient;
|
import org.hl7.fhir.r4.model.Patient;
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||||
|
@ -66,7 +67,7 @@ public class FhirResourceDaoPatientR4 extends FhirResourceDaoR4<Patient>implemen
|
||||||
paramMap.setLoadSynchronous(true);
|
paramMap.setLoadSynchronous(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
return mySearchCoordinatorSvc.registerSearch(this, paramMap, getResourceName());
|
return mySearchCoordinatorSvc.registerSearch(this, paramMap, getResourceName(), new CacheControlDirective().parse(theRequestDetails.getHeaders(Constants.HEADER_CACHE_CONTROL)));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -20,18 +20,19 @@ package ca.uhn.fhir.jpa.search;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.dao.IDao;
|
import ca.uhn.fhir.jpa.dao.IDao;
|
||||||
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
|
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
|
||||||
|
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
public interface ISearchCoordinatorSvc {
|
public interface ISearchCoordinatorSvc {
|
||||||
|
|
||||||
List<Long> getResources(String theUuid, int theFrom, int theTo);
|
|
||||||
|
|
||||||
IBundleProvider registerSearch(IDao theCallingDao, SearchParameterMap theParams, String theResourceType);
|
|
||||||
|
|
||||||
void cancelAllActiveSearches();
|
void cancelAllActiveSearches();
|
||||||
|
|
||||||
|
List<Long> getResources(String theUuid, int theFrom, int theTo);
|
||||||
|
|
||||||
|
IBundleProvider registerSearch(IDao theCallingDao, SearchParameterMap theParams, String theResourceType, CacheControlDirective theCacheControlDirective);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,6 +24,8 @@ import java.util.concurrent.*;
|
||||||
|
|
||||||
import javax.persistence.EntityManager;
|
import javax.persistence.EntityManager;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||||
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
import org.apache.commons.lang3.ObjectUtils;
|
import org.apache.commons.lang3.ObjectUtils;
|
||||||
import org.apache.commons.lang3.Validate;
|
import org.apache.commons.lang3.Validate;
|
||||||
import org.apache.commons.lang3.exception.ExceptionUtils;
|
import org.apache.commons.lang3.exception.ExceptionUtils;
|
||||||
|
@ -55,7 +57,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
||||||
public static final int DEFAULT_SYNC_SIZE = 250;
|
public static final int DEFAULT_SYNC_SIZE = 250;
|
||||||
|
|
||||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SearchCoordinatorSvcImpl.class);
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SearchCoordinatorSvcImpl.class);
|
||||||
|
private final ConcurrentHashMap<String, SearchTask> myIdToSearchTask = new ConcurrentHashMap<String, SearchTask>();
|
||||||
@Autowired
|
@Autowired
|
||||||
private FhirContext myContext;
|
private FhirContext myContext;
|
||||||
@Autowired
|
@Autowired
|
||||||
|
@ -63,7 +65,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
||||||
@Autowired
|
@Autowired
|
||||||
private EntityManager myEntityManager;
|
private EntityManager myEntityManager;
|
||||||
private ExecutorService myExecutor;
|
private ExecutorService myExecutor;
|
||||||
private final ConcurrentHashMap<String, SearchTask> myIdToSearchTask = new ConcurrentHashMap<String, SearchTask>();
|
|
||||||
private Integer myLoadingThrottleForUnitTests = null;
|
private Integer myLoadingThrottleForUnitTests = null;
|
||||||
private long myMaxMillisToWaitForRemoteResults = DateUtils.MILLIS_PER_MINUTE;
|
private long myMaxMillisToWaitForRemoteResults = DateUtils.MILLIS_PER_MINUTE;
|
||||||
private boolean myNeverUseLocalSearchForUnitTests;
|
private boolean myNeverUseLocalSearchForUnitTests;
|
||||||
|
@ -186,7 +187,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public IBundleProvider registerSearch(final IDao theCallingDao, final SearchParameterMap theParams, String theResourceType) {
|
public IBundleProvider registerSearch(final IDao theCallingDao, final SearchParameterMap theParams, String theResourceType, CacheControlDirective theCacheControlDirective) {
|
||||||
StopWatch w = new StopWatch();
|
StopWatch w = new StopWatch();
|
||||||
final String searchUuid = UUID.randomUUID().toString();
|
final String searchUuid = UUID.randomUUID().toString();
|
||||||
|
|
||||||
|
@ -194,7 +195,21 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
||||||
final ISearchBuilder sb = theCallingDao.newSearchBuilder();
|
final ISearchBuilder sb = theCallingDao.newSearchBuilder();
|
||||||
sb.setType(resourceTypeClass, theResourceType);
|
sb.setType(resourceTypeClass, theResourceType);
|
||||||
|
|
||||||
if (theParams.isLoadSynchronous()) {
|
final Integer loadSynchronousUpTo;
|
||||||
|
if (theCacheControlDirective != null && theCacheControlDirective.isNoStore()) {
|
||||||
|
if (theCacheControlDirective.getMaxResults() != null) {
|
||||||
|
loadSynchronousUpTo = theCacheControlDirective.getMaxResults();
|
||||||
|
if (loadSynchronousUpTo > myDaoConfig.getCacheControlNoStoreMaxResultsUpperLimit()) {
|
||||||
|
throw new InvalidRequestException(Constants.HEADER_CACHE_CONTROL + " header " + Constants.CACHE_CONTROL_MAX_RESULTS + " value must not exceed " + myDaoConfig.getCacheControlNoStoreMaxResultsUpperLimit());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
loadSynchronousUpTo = 100;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
loadSynchronousUpTo = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (theParams.isLoadSynchronous() || loadSynchronousUpTo != null) {
|
||||||
|
|
||||||
// Execute the query and make sure we return distinct results
|
// Execute the query and make sure we return distinct results
|
||||||
TransactionTemplate txTemplate = new TransactionTemplate(myManagedTxManager);
|
TransactionTemplate txTemplate = new TransactionTemplate(myManagedTxManager);
|
||||||
|
@ -209,6 +224,9 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
||||||
Iterator<Long> resultIter = sb.createQuery(theParams, searchUuid);
|
Iterator<Long> resultIter = sb.createQuery(theParams, searchUuid);
|
||||||
while (resultIter.hasNext()) {
|
while (resultIter.hasNext()) {
|
||||||
pids.add(resultIter.next());
|
pids.add(resultIter.next());
|
||||||
|
if (loadSynchronousUpTo != null && pids.size() >= loadSynchronousUpTo) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
if (theParams.getLoadSynchronousUpTo() != null && pids.size() >= theParams.getLoadSynchronousUpTo()) {
|
if (theParams.getLoadSynchronousUpTo() != null && pids.size() >= theParams.getLoadSynchronousUpTo()) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -238,9 +256,13 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
||||||
* See if there are any cached searches whose results we can return
|
* See if there are any cached searches whose results we can return
|
||||||
* instead
|
* instead
|
||||||
*/
|
*/
|
||||||
|
boolean useCache = true;
|
||||||
|
if (theCacheControlDirective != null && theCacheControlDirective.isNoCache() == true) {
|
||||||
|
useCache = false;
|
||||||
|
}
|
||||||
final String queryString = theParams.toNormalizedQueryString(myContext);
|
final String queryString = theParams.toNormalizedQueryString(myContext);
|
||||||
if (theParams.getEverythingMode() == null) {
|
if (theParams.getEverythingMode() == null) {
|
||||||
if (myDaoConfig.getReuseCachedSearchResultsForMillis() != null) {
|
if (myDaoConfig.getReuseCachedSearchResultsForMillis() != null && useCache) {
|
||||||
|
|
||||||
final Date createdCutoff = new Date(System.currentTimeMillis() - myDaoConfig.getReuseCachedSearchResultsForMillis());
|
final Date createdCutoff = new Date(System.currentTimeMillis() - myDaoConfig.getReuseCachedSearchResultsForMillis());
|
||||||
final String resourceType = theResourceType;
|
final String resourceType = theResourceType;
|
||||||
|
@ -401,16 +423,16 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
||||||
|
|
||||||
public class SearchTask implements Callable<Void> {
|
public class SearchTask implements Callable<Void> {
|
||||||
|
|
||||||
private boolean myAbortRequested;
|
|
||||||
private final IDao myCallingDao;
|
private final IDao myCallingDao;
|
||||||
private final CountDownLatch myCompletionLatch;
|
private final CountDownLatch myCompletionLatch;
|
||||||
private int myCountSaved = 0;
|
|
||||||
private final CountDownLatch myInitialCollectionLatch = new CountDownLatch(1);
|
private final CountDownLatch myInitialCollectionLatch = new CountDownLatch(1);
|
||||||
private final SearchParameterMap myParams;
|
private final SearchParameterMap myParams;
|
||||||
private final String myResourceType;
|
private final String myResourceType;
|
||||||
private final Search mySearch;
|
private final Search mySearch;
|
||||||
private final ArrayList<Long> mySyncedPids = new ArrayList<Long>();
|
private final ArrayList<Long> mySyncedPids = new ArrayList<Long>();
|
||||||
private final ArrayList<Long> myUnsyncedPids = new ArrayList<Long>();
|
private final ArrayList<Long> myUnsyncedPids = new ArrayList<Long>();
|
||||||
|
private boolean myAbortRequested;
|
||||||
|
private int myCountSaved = 0;
|
||||||
private String mySearchUuid;
|
private String mySearchUuid;
|
||||||
|
|
||||||
public SearchTask(Search theSearch, IDao theCallingDao, SearchParameterMap theParams, String theResourceType, String theSearchUuid) {
|
public SearchTask(Search theSearch, IDao theCallingDao, SearchParameterMap theParams, String theResourceType, String theSearchUuid) {
|
||||||
|
|
|
@ -0,0 +1,164 @@
|
||||||
|
package ca.uhn.fhir.jpa.provider.r4;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
|
import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl;
|
||||||
|
import ca.uhn.fhir.parser.StrictErrorHandler;
|
||||||
|
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
|
import ca.uhn.fhir.util.TestUtil;
|
||||||
|
import org.hl7.fhir.r4.model.Bundle;
|
||||||
|
import org.hl7.fhir.r4.model.Patient;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.springframework.test.util.AopTestUtils;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import static org.junit.Assert.*;
|
||||||
|
|
||||||
|
public class ResourceProviderR4CacheTest extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResourceProviderR4CacheTest.class);
|
||||||
|
private SearchCoordinatorSvcImpl mySearchCoordinatorSvcRaw;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
@After
|
||||||
|
public void after() throws Exception {
|
||||||
|
super.after();
|
||||||
|
myDaoConfig.setReuseCachedSearchResultsForMillis(new DaoConfig().getReuseCachedSearchResultsForMillis());
|
||||||
|
myDaoConfig.setCacheControlNoStoreMaxResultsUpperLimit(new DaoConfig().getCacheControlNoStoreMaxResultsUpperLimit());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void before() throws Exception {
|
||||||
|
super.before();
|
||||||
|
myFhirCtx.setParserErrorHandler(new StrictErrorHandler());
|
||||||
|
mySearchCoordinatorSvcRaw = AopTestUtils.getTargetObject(mySearchCoordinatorSvc);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCacheNoStore() throws IOException {
|
||||||
|
|
||||||
|
Patient pt1 = new Patient();
|
||||||
|
pt1.addName().setFamily("FAM");
|
||||||
|
ourClient.create().resource(pt1).execute();
|
||||||
|
|
||||||
|
Bundle results = ourClient
|
||||||
|
.search()
|
||||||
|
.forResource("Patient")
|
||||||
|
.where(Patient.FAMILY.matches().value("FAM"))
|
||||||
|
.returnBundle(Bundle.class)
|
||||||
|
.cacheControl(new CacheControlDirective().setNoStore(true))
|
||||||
|
.execute();
|
||||||
|
assertEquals(1, results.getEntry().size());
|
||||||
|
assertEquals(0, mySearchEntityDao.count());
|
||||||
|
|
||||||
|
Patient pt2 = new Patient();
|
||||||
|
pt2.addName().setFamily("FAM");
|
||||||
|
ourClient.create().resource(pt2).execute();
|
||||||
|
|
||||||
|
results = ourClient
|
||||||
|
.search()
|
||||||
|
.forResource("Patient")
|
||||||
|
.where(Patient.FAMILY.matches().value("FAM"))
|
||||||
|
.returnBundle(Bundle.class)
|
||||||
|
.cacheControl(new CacheControlDirective().setNoStore(true))
|
||||||
|
.execute();
|
||||||
|
assertEquals(2, results.getEntry().size());
|
||||||
|
assertEquals(0, mySearchEntityDao.count());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCacheNoStoreMaxResults() throws IOException {
|
||||||
|
|
||||||
|
for (int i = 0; i < 10; i++) {
|
||||||
|
Patient pt1 = new Patient();
|
||||||
|
pt1.addName().setFamily("FAM" + i);
|
||||||
|
ourClient.create().resource(pt1).execute();
|
||||||
|
}
|
||||||
|
|
||||||
|
Bundle results = ourClient
|
||||||
|
.search()
|
||||||
|
.forResource("Patient")
|
||||||
|
.where(Patient.FAMILY.matches().value("FAM"))
|
||||||
|
.returnBundle(Bundle.class)
|
||||||
|
.cacheControl(new CacheControlDirective().setNoStore(true).setMaxResults(5))
|
||||||
|
.execute();
|
||||||
|
assertEquals(5, results.getEntry().size());
|
||||||
|
assertEquals(0, mySearchEntityDao.count());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCacheNoStoreMaxResultsWithIllegalValue() throws IOException {
|
||||||
|
myDaoConfig.setCacheControlNoStoreMaxResultsUpperLimit(123);
|
||||||
|
try {
|
||||||
|
ourClient
|
||||||
|
.search()
|
||||||
|
.forResource("Patient")
|
||||||
|
.where(Patient.FAMILY.matches().value("FAM"))
|
||||||
|
.returnBundle(Bundle.class)
|
||||||
|
.cacheControl(new CacheControlDirective().setNoStore(true).setMaxResults(5000))
|
||||||
|
.execute();
|
||||||
|
fail();
|
||||||
|
} catch (InvalidRequestException e) {
|
||||||
|
assertEquals("HTTP 400 Bad Request: Cache-Control header max-results value must not exceed 123", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCacheSuppressed() throws IOException {
|
||||||
|
|
||||||
|
Patient pt1 = new Patient();
|
||||||
|
pt1.addName().setFamily("FAM");
|
||||||
|
ourClient.create().resource(pt1).execute();
|
||||||
|
|
||||||
|
Bundle results = ourClient.search().forResource("Patient").where(Patient.FAMILY.matches().value("FAM")).returnBundle(Bundle.class).execute();
|
||||||
|
assertEquals(1, results.getEntry().size());
|
||||||
|
assertEquals(1, mySearchEntityDao.count());
|
||||||
|
|
||||||
|
Patient pt2 = new Patient();
|
||||||
|
pt2.addName().setFamily("FAM");
|
||||||
|
ourClient.create().resource(pt2).execute();
|
||||||
|
|
||||||
|
results = ourClient
|
||||||
|
.search()
|
||||||
|
.forResource("Patient")
|
||||||
|
.where(Patient.FAMILY.matches().value("FAM"))
|
||||||
|
.returnBundle(Bundle.class)
|
||||||
|
.cacheControl(new CacheControlDirective().setNoCache(true))
|
||||||
|
.execute();
|
||||||
|
assertEquals(2, results.getEntry().size());
|
||||||
|
assertEquals(2, mySearchEntityDao.count());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCacheUsedNormally() throws IOException {
|
||||||
|
|
||||||
|
Patient pt1 = new Patient();
|
||||||
|
pt1.addName().setFamily("FAM");
|
||||||
|
ourClient.create().resource(pt1).execute();
|
||||||
|
|
||||||
|
Bundle results = ourClient.search().forResource("Patient").where(Patient.FAMILY.matches().value("FAM")).returnBundle(Bundle.class).execute();
|
||||||
|
assertEquals(1, results.getEntry().size());
|
||||||
|
assertEquals(1, mySearchEntityDao.count());
|
||||||
|
|
||||||
|
Patient pt2 = new Patient();
|
||||||
|
pt2.addName().setFamily("FAM");
|
||||||
|
ourClient.create().resource(pt2).execute();
|
||||||
|
|
||||||
|
results = ourClient.search().forResource("Patient").where(Patient.FAMILY.matches().value("FAM")).returnBundle(Bundle.class).execute();
|
||||||
|
assertEquals(1, results.getEntry().size());
|
||||||
|
assertEquals(1, mySearchEntityDao.count());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterClass
|
||||||
|
public static void afterClassClearContext() {
|
||||||
|
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -1,41 +1,60 @@
|
||||||
package ca.uhn.fhir.jpa.search;
|
package ca.uhn.fhir.jpa.search;
|
||||||
|
|
||||||
import static org.junit.Assert.*;
|
|
||||||
import static org.mockito.Matchers.*;
|
|
||||||
import static org.mockito.Mockito.*;
|
|
||||||
|
|
||||||
import java.util.*;
|
|
||||||
|
|
||||||
import javax.persistence.EntityManager;
|
|
||||||
|
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
|
||||||
import org.junit.*;
|
|
||||||
import org.junit.runner.RunWith;
|
|
||||||
import org.mockito.*;
|
|
||||||
import org.mockito.invocation.InvocationOnMock;
|
|
||||||
import org.mockito.runners.MockitoJUnitRunner;
|
|
||||||
import org.mockito.stubbing.Answer;
|
|
||||||
import org.springframework.data.domain.*;
|
|
||||||
import org.springframework.transaction.PlatformTransactionManager;
|
|
||||||
|
|
||||||
import com.google.common.collect.Lists;
|
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.jpa.dao.*;
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
import ca.uhn.fhir.jpa.dao.data.*;
|
import ca.uhn.fhir.jpa.dao.IDao;
|
||||||
import ca.uhn.fhir.jpa.entity.*;
|
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
|
||||||
|
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.ISearchIncludeDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
|
||||||
|
import ca.uhn.fhir.jpa.entity.Search;
|
||||||
|
import ca.uhn.fhir.jpa.entity.SearchResult;
|
||||||
|
import ca.uhn.fhir.jpa.entity.SearchStatusEnum;
|
||||||
|
import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
|
||||||
import ca.uhn.fhir.jpa.util.BaseIterator;
|
import ca.uhn.fhir.jpa.util.BaseIterator;
|
||||||
import ca.uhn.fhir.model.dstu2.resource.Patient;
|
import ca.uhn.fhir.model.dstu2.resource.Patient;
|
||||||
|
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||||
import ca.uhn.fhir.rest.param.StringParam;
|
import ca.uhn.fhir.rest.param.StringParam;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||||
import ca.uhn.fhir.util.TestUtil;
|
import ca.uhn.fhir.util.TestUtil;
|
||||||
|
import com.google.common.collect.Lists;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.mockito.ArgumentCaptor;
|
||||||
|
import org.mockito.Captor;
|
||||||
|
import org.mockito.Mock;
|
||||||
|
import org.mockito.Mockito;
|
||||||
|
import org.mockito.invocation.InvocationOnMock;
|
||||||
|
import org.mockito.runners.MockitoJUnitRunner;
|
||||||
|
import org.mockito.stubbing.Answer;
|
||||||
|
import org.springframework.data.domain.Page;
|
||||||
|
import org.springframework.data.domain.PageImpl;
|
||||||
|
import org.springframework.data.domain.Pageable;
|
||||||
|
import org.springframework.transaction.PlatformTransactionManager;
|
||||||
|
|
||||||
@SuppressWarnings({ "unchecked" })
|
import javax.persistence.EntityManager;
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
import static org.junit.Assert.*;
|
||||||
|
import static org.mockito.Matchers.any;
|
||||||
|
import static org.mockito.Matchers.anyBoolean;
|
||||||
|
import static org.mockito.Matchers.eq;
|
||||||
|
import static org.mockito.Matchers.same;
|
||||||
|
import static org.mockito.Mockito.*;
|
||||||
|
|
||||||
|
@SuppressWarnings({"unchecked"})
|
||||||
@RunWith(MockitoJUnitRunner.class)
|
@RunWith(MockitoJUnitRunner.class)
|
||||||
public class SearchCoordinatorSvcImplTest {
|
public class SearchCoordinatorSvcImplTest {
|
||||||
|
|
||||||
private static FhirContext ourCtx = FhirContext.forDstu3();
|
private static FhirContext ourCtx = FhirContext.forDstu3();
|
||||||
|
@Captor
|
||||||
|
ArgumentCaptor<Iterable<SearchResult>> mySearchResultIterCaptor;
|
||||||
@Mock
|
@Mock
|
||||||
private IDao myCallingDao;
|
private IDao myCallingDao;
|
||||||
@Mock
|
@Mock
|
||||||
|
@ -49,10 +68,6 @@ public class SearchCoordinatorSvcImplTest {
|
||||||
private ISearchIncludeDao mySearchIncludeDao;
|
private ISearchIncludeDao mySearchIncludeDao;
|
||||||
@Mock
|
@Mock
|
||||||
private ISearchResultDao mySearchResultDao;
|
private ISearchResultDao mySearchResultDao;
|
||||||
@Captor
|
|
||||||
ArgumentCaptor<Iterable<SearchResult>> mySearchResultIterCaptor;
|
|
||||||
|
|
||||||
|
|
||||||
private SearchCoordinatorSvcImpl mySvc;
|
private SearchCoordinatorSvcImpl mySvc;
|
||||||
|
|
||||||
@Mock
|
@Mock
|
||||||
|
@ -63,9 +78,10 @@ public class SearchCoordinatorSvcImplTest {
|
||||||
public void after() {
|
public void after() {
|
||||||
verify(myCallingDao, atMost(myExpectedNumberOfSearchBuildersCreated)).newSearchBuilder();
|
verify(myCallingDao, atMost(myExpectedNumberOfSearchBuildersCreated)).newSearchBuilder();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void before() {
|
public void before() {
|
||||||
|
|
||||||
mySvc = new SearchCoordinatorSvcImpl();
|
mySvc = new SearchCoordinatorSvcImpl();
|
||||||
mySvc.setEntityManagerForUnitTest(myEntityManager);
|
mySvc.setEntityManagerForUnitTest(myEntityManager);
|
||||||
mySvc.setTransactionManagerForUnitTest(myTxManager);
|
mySvc.setTransactionManagerForUnitTest(myTxManager);
|
||||||
|
@ -76,9 +92,9 @@ public class SearchCoordinatorSvcImplTest {
|
||||||
|
|
||||||
myDaoConfig = new DaoConfig();
|
myDaoConfig = new DaoConfig();
|
||||||
mySvc.setDaoConfigForUnitTest(myDaoConfig);
|
mySvc.setDaoConfigForUnitTest(myDaoConfig);
|
||||||
|
|
||||||
when(myCallingDao.newSearchBuilder()).thenReturn(mySearchBuider);
|
when(myCallingDao.newSearchBuilder()).thenReturn(mySearchBuider);
|
||||||
|
|
||||||
doAnswer(new Answer<Void>() {
|
doAnswer(new Answer<Void>() {
|
||||||
@Override
|
@Override
|
||||||
public Void answer(InvocationOnMock theInvocation) throws Throwable {
|
public Void answer(InvocationOnMock theInvocation) throws Throwable {
|
||||||
|
@ -89,7 +105,8 @@ public class SearchCoordinatorSvcImplTest {
|
||||||
provider.setEntityManager(myEntityManager);
|
provider.setEntityManager(myEntityManager);
|
||||||
provider.setContext(ourCtx);
|
provider.setContext(ourCtx);
|
||||||
return null;
|
return null;
|
||||||
}}).when(myCallingDao).injectDependenciesIntoBundleProvider(any(PersistedJpaBundleProvider.class));
|
}
|
||||||
|
}).when(myCallingDao).injectDependenciesIntoBundleProvider(any(PersistedJpaBundleProvider.class));
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<Long> createPidSequence(int from, int to) {
|
private List<Long> createPidSequence(int from, int to) {
|
||||||
|
@ -128,7 +145,7 @@ public class SearchCoordinatorSvcImplTest {
|
||||||
|
|
||||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(List.class), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(List.class), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
||||||
|
|
||||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient");
|
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective());
|
||||||
assertNotNull(result.getUuid());
|
assertNotNull(result.getUuid());
|
||||||
assertEquals(null, result.size());
|
assertEquals(null, result.size());
|
||||||
|
|
||||||
|
@ -151,12 +168,12 @@ public class SearchCoordinatorSvcImplTest {
|
||||||
|
|
||||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(List.class), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(List.class), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
||||||
|
|
||||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient");
|
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective());
|
||||||
assertNotNull(result.getUuid());
|
assertNotNull(result.getUuid());
|
||||||
assertEquals(null, result.size());
|
assertEquals(null, result.size());
|
||||||
|
|
||||||
List<IBaseResource> resources;
|
List<IBaseResource> resources;
|
||||||
|
|
||||||
resources = result.getResources(0, 100000);
|
resources = result.getResources(0, 100000);
|
||||||
assertEquals(790, resources.size());
|
assertEquals(790, resources.size());
|
||||||
assertEquals("10", resources.get(0).getIdElement().getValueAsString());
|
assertEquals("10", resources.get(0).getIdElement().getValueAsString());
|
||||||
|
@ -164,18 +181,18 @@ public class SearchCoordinatorSvcImplTest {
|
||||||
|
|
||||||
ArgumentCaptor<Search> searchCaptor = ArgumentCaptor.forClass(Search.class);
|
ArgumentCaptor<Search> searchCaptor = ArgumentCaptor.forClass(Search.class);
|
||||||
verify(mySearchDao, atLeastOnce()).save(searchCaptor.capture());
|
verify(mySearchDao, atLeastOnce()).save(searchCaptor.capture());
|
||||||
|
|
||||||
verify(mySearchResultDao, atLeastOnce()).save(mySearchResultIterCaptor.capture());
|
verify(mySearchResultDao, atLeastOnce()).save(mySearchResultIterCaptor.capture());
|
||||||
List<SearchResult> allResults= new ArrayList<SearchResult>();
|
List<SearchResult> allResults = new ArrayList<SearchResult>();
|
||||||
for (Iterable<SearchResult> next : mySearchResultIterCaptor.getAllValues()) {
|
for (Iterable<SearchResult> next : mySearchResultIterCaptor.getAllValues()) {
|
||||||
allResults.addAll(Lists.newArrayList(next));
|
allResults.addAll(Lists.newArrayList(next));
|
||||||
}
|
}
|
||||||
|
|
||||||
assertEquals(790, allResults.size());
|
assertEquals(790, allResults.size());
|
||||||
assertEquals(10, allResults.get(0).getResourcePid().longValue());
|
assertEquals(10, allResults.get(0).getResourcePid().longValue());
|
||||||
assertEquals(799, allResults.get(789).getResourcePid().longValue());
|
assertEquals(799, allResults.get(789).getResourcePid().longValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testAsyncSearchLargeResultSetSameCoordinator() {
|
public void testAsyncSearchLargeResultSetSameCoordinator() {
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
SearchParameterMap params = new SearchParameterMap();
|
||||||
|
@ -187,12 +204,12 @@ public class SearchCoordinatorSvcImplTest {
|
||||||
|
|
||||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(List.class), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(List.class), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
||||||
|
|
||||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient");
|
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective());
|
||||||
assertNotNull(result.getUuid());
|
assertNotNull(result.getUuid());
|
||||||
assertEquals(null, result.size());
|
assertEquals(null, result.size());
|
||||||
|
|
||||||
List<IBaseResource> resources;
|
List<IBaseResource> resources;
|
||||||
|
|
||||||
resources = result.getResources(0, 30);
|
resources = result.getResources(0, 30);
|
||||||
assertEquals(30, resources.size());
|
assertEquals(30, resources.size());
|
||||||
assertEquals("10", resources.get(0).getIdElement().getValueAsString());
|
assertEquals("10", resources.get(0).getIdElement().getValueAsString());
|
||||||
|
@ -202,7 +219,7 @@ public class SearchCoordinatorSvcImplTest {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Subsequent requests for the same search (i.e. a request for the next
|
* Subsequent requests for the same search (i.e. a request for the next
|
||||||
* page) within the same JVM will not use the original bundle provider
|
* page) within the same JVM will not use the original bundle provider
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testAsyncSearchLargeResultSetSecondRequestSameCoordinator() {
|
public void testAsyncSearchLargeResultSetSecondRequestSameCoordinator() {
|
||||||
|
@ -215,7 +232,7 @@ public class SearchCoordinatorSvcImplTest {
|
||||||
|
|
||||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(List.class), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(List.class), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
||||||
|
|
||||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient");
|
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective());
|
||||||
assertNotNull(result.getUuid());
|
assertNotNull(result.getUuid());
|
||||||
assertEquals(null, result.size());
|
assertEquals(null, result.size());
|
||||||
|
|
||||||
|
@ -223,10 +240,10 @@ public class SearchCoordinatorSvcImplTest {
|
||||||
verify(mySearchDao, atLeast(1)).save(searchCaptor.capture());
|
verify(mySearchDao, atLeast(1)).save(searchCaptor.capture());
|
||||||
Search search = searchCaptor.getValue();
|
Search search = searchCaptor.getValue();
|
||||||
assertEquals(SearchTypeEnum.SEARCH, search.getSearchType());
|
assertEquals(SearchTypeEnum.SEARCH, search.getSearchType());
|
||||||
|
|
||||||
List<IBaseResource> resources;
|
List<IBaseResource> resources;
|
||||||
PersistedJpaBundleProvider provider;
|
PersistedJpaBundleProvider provider;
|
||||||
|
|
||||||
resources = result.getResources(0, 10);
|
resources = result.getResources(0, 10);
|
||||||
assertNull(result.size());
|
assertNull(result.size());
|
||||||
assertEquals(10, resources.size());
|
assertEquals(10, resources.size());
|
||||||
|
@ -244,7 +261,7 @@ public class SearchCoordinatorSvcImplTest {
|
||||||
assertEquals(10, resources.size());
|
assertEquals(10, resources.size());
|
||||||
assertEquals("20", resources.get(0).getIdElement().getValueAsString());
|
assertEquals("20", resources.get(0).getIdElement().getValueAsString());
|
||||||
assertEquals("29", resources.get(9).getIdElement().getValueAsString());
|
assertEquals("29", resources.get(9).getIdElement().getValueAsString());
|
||||||
|
|
||||||
provider = new PersistedJpaBundleProvider(result.getUuid(), myCallingDao);
|
provider = new PersistedJpaBundleProvider(result.getUuid(), myCallingDao);
|
||||||
resources = provider.getResources(20, 99999);
|
resources = provider.getResources(20, 99999);
|
||||||
assertEquals(770, resources.size());
|
assertEquals(770, resources.size());
|
||||||
|
@ -265,7 +282,7 @@ public class SearchCoordinatorSvcImplTest {
|
||||||
|
|
||||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(List.class), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(List.class), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
||||||
|
|
||||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient");
|
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective());
|
||||||
assertNotNull(result.getUuid());
|
assertNotNull(result.getUuid());
|
||||||
assertEquals(90, result.size().intValue());
|
assertEquals(90, result.size().intValue());
|
||||||
|
|
||||||
|
@ -285,18 +302,18 @@ public class SearchCoordinatorSvcImplTest {
|
||||||
@Test
|
@Test
|
||||||
public void testLoadSearchResultsFromDifferentCoordinator() {
|
public void testLoadSearchResultsFromDifferentCoordinator() {
|
||||||
final String uuid = UUID.randomUUID().toString();
|
final String uuid = UUID.randomUUID().toString();
|
||||||
|
|
||||||
final Search search = new Search();
|
final Search search = new Search();
|
||||||
search.setUuid(uuid);
|
search.setUuid(uuid);
|
||||||
search.setSearchType(SearchTypeEnum.SEARCH);
|
search.setSearchType(SearchTypeEnum.SEARCH);
|
||||||
search.setResourceType("Patient");
|
search.setResourceType("Patient");
|
||||||
|
|
||||||
when(mySearchDao.findByUuid(eq(uuid))).thenReturn(search);
|
when(mySearchDao.findByUuid(eq(uuid))).thenReturn(search);
|
||||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(List.class), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(List.class), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
||||||
|
|
||||||
PersistedJpaBundleProvider provider;
|
PersistedJpaBundleProvider provider;
|
||||||
List<IBaseResource> resources;
|
List<IBaseResource> resources;
|
||||||
|
|
||||||
new Thread() {
|
new Thread() {
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
|
@ -305,20 +322,21 @@ public class SearchCoordinatorSvcImplTest {
|
||||||
} catch (InterruptedException e) {
|
} catch (InterruptedException e) {
|
||||||
// ignore
|
// ignore
|
||||||
}
|
}
|
||||||
|
|
||||||
when(mySearchResultDao.findWithSearchUuid(any(Search.class), any(Pageable.class))).thenAnswer(new Answer<Page<SearchResult>>() {
|
when(mySearchResultDao.findWithSearchUuid(any(Search.class), any(Pageable.class))).thenAnswer(new Answer<Page<SearchResult>>() {
|
||||||
@Override
|
@Override
|
||||||
public Page<SearchResult> answer(InvocationOnMock theInvocation) throws Throwable {
|
public Page<SearchResult> answer(InvocationOnMock theInvocation) throws Throwable {
|
||||||
Pageable page = (Pageable) theInvocation.getArguments()[1];
|
Pageable page = (Pageable) theInvocation.getArguments()[1];
|
||||||
|
|
||||||
ArrayList<SearchResult> results = new ArrayList<SearchResult>();
|
ArrayList<SearchResult> results = new ArrayList<SearchResult>();
|
||||||
int max = (page.getPageNumber() * page.getPageSize()) + page.getPageSize();
|
int max = (page.getPageNumber() * page.getPageSize()) + page.getPageSize();
|
||||||
for (int i = page.getOffset(); i < max; i++) {
|
for (int i = page.getOffset(); i < max; i++) {
|
||||||
results.add(new SearchResult().setResourcePid(i + 10L));
|
results.add(new SearchResult().setResourcePid(i + 10L));
|
||||||
}
|
}
|
||||||
|
|
||||||
return new PageImpl<SearchResult>(results);
|
return new PageImpl<SearchResult>(results);
|
||||||
}});
|
}
|
||||||
|
});
|
||||||
search.setStatus(SearchStatusEnum.FINISHED);
|
search.setStatus(SearchStatusEnum.FINISHED);
|
||||||
}
|
}
|
||||||
}.start();
|
}.start();
|
||||||
|
@ -332,7 +350,7 @@ public class SearchCoordinatorSvcImplTest {
|
||||||
assertEquals(10, resources.size());
|
assertEquals(10, resources.size());
|
||||||
assertEquals("20", resources.get(0).getIdElement().getValueAsString());
|
assertEquals("20", resources.get(0).getIdElement().getValueAsString());
|
||||||
assertEquals("29", resources.get(9).getIdElement().getValueAsString());
|
assertEquals("29", resources.get(9).getIdElement().getValueAsString());
|
||||||
|
|
||||||
provider = new PersistedJpaBundleProvider(uuid, myCallingDao);
|
provider = new PersistedJpaBundleProvider(uuid, myCallingDao);
|
||||||
resources = provider.getResources(20, 40);
|
resources = provider.getResources(20, 40);
|
||||||
assertEquals(20, resources.size());
|
assertEquals(20, resources.size());
|
||||||
|
@ -353,7 +371,7 @@ public class SearchCoordinatorSvcImplTest {
|
||||||
|
|
||||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(eq(pids), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(eq(pids), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
||||||
|
|
||||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient");
|
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective());
|
||||||
assertNull(result.getUuid());
|
assertNull(result.getUuid());
|
||||||
assertEquals(790, result.size().intValue());
|
assertEquals(790, result.size().intValue());
|
||||||
|
|
||||||
|
@ -375,7 +393,7 @@ public class SearchCoordinatorSvcImplTest {
|
||||||
pids = createPidSequence(10, 110);
|
pids = createPidSequence(10, 110);
|
||||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(eq(pids), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(eq(pids), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
||||||
|
|
||||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient");
|
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective());
|
||||||
assertNull(result.getUuid());
|
assertNull(result.getUuid());
|
||||||
assertEquals(100, result.size().intValue());
|
assertEquals(100, result.size().intValue());
|
||||||
|
|
||||||
|
@ -394,7 +412,7 @@ public class SearchCoordinatorSvcImplTest {
|
||||||
|
|
||||||
private int myCount;
|
private int myCount;
|
||||||
private Iterator<T> myWrap;
|
private Iterator<T> myWrap;
|
||||||
|
|
||||||
public FailAfterNIterator(Iterator<T> theWrap, int theCount) {
|
public FailAfterNIterator(Iterator<T> theWrap, int theCount) {
|
||||||
myWrap = theWrap;
|
myWrap = theWrap;
|
||||||
myCount = theCount;
|
myCount = theCount;
|
||||||
|
@ -416,7 +434,7 @@ public class SearchCoordinatorSvcImplTest {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public static class SlowIterator<T> extends BaseIterator<T> implements Iterator<T> {
|
public static class SlowIterator<T> extends BaseIterator<T> implements Iterator<T> {
|
||||||
|
|
||||||
private int myDelay;
|
private int myDelay;
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -27,6 +27,16 @@
|
||||||
has been changed to only accept "url".
|
has been changed to only accept "url".
|
||||||
Thanks to Avinash Shanbhag for reporting!
|
Thanks to Avinash Shanbhag for reporting!
|
||||||
</action>
|
</action>
|
||||||
|
<action type="add">
|
||||||
|
JPA server now supports the use of the
|
||||||
|
<![CDATA[<code>Cache-Control</code>]]>
|
||||||
|
header in order to allow the client to selectively disable the
|
||||||
|
search result cache. This directive can also be used to disable result paging
|
||||||
|
and return results faster when only a small number of results is needed.
|
||||||
|
See the
|
||||||
|
<![CDATA[<a href="http://hapifhir.io/doc_jpa.html">JPA Page</a>]]>
|
||||||
|
for more information.
|
||||||
|
</action>
|
||||||
</release>
|
</release>
|
||||||
<release version="3.0.0" date="2017-09-27">
|
<release version="3.0.0" date="2017-09-27">
|
||||||
<action type="add">
|
<action type="add">
|
||||||
|
|
|
@ -1,315 +1,372 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<document xmlns="http://maven.apache.org/XDOC/2.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/XDOC/2.0 http://maven.apache.org/xsd/xdoc-2.0.xsd">
|
<document xmlns="http://maven.apache.org/XDOC/2.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/XDOC/2.0 http://maven.apache.org/xsd/xdoc-2.0.xsd">
|
||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<title>JPA Server</title>
|
<title>JPA Server</title>
|
||||||
<author email="jamesagnew@users.sourceforge.net">James Agnew</author>
|
<author email="jamesagnew@users.sourceforge.net">James Agnew</author>
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
|
|
||||||
<section name="JPA Server">
|
<section name="JPA Server">
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
The HAPI FHIR
|
The HAPI FHIR
|
||||||
<a href="./doc_rest_server.html">RestfulServer</a>
|
<a href="./doc_rest_server.html">RestfulServer</a>
|
||||||
module can be used to create a FHIR server endpoint against an arbitrary
|
module can be used to create a FHIR server endpoint against an arbitrary
|
||||||
data source, which could be a database of your own design, an existing
|
data source, which could be a database of your own design, an existing
|
||||||
clinical system, a set of files, or anything else you come up with.
|
clinical system, a set of files, or anything else you come up with.
|
||||||
</p>
|
</p>
|
||||||
<p>
|
<p>
|
||||||
HAPI also provides a persistence module which can be used to
|
HAPI also provides a persistence module which can be used to
|
||||||
provide a complete RESTful server implementation, backed by a database of
|
provide a complete RESTful server implementation, backed by a database of
|
||||||
your choosing. This module uses the <a href="http://en.wikipedia.org/wiki/Java_Persistence_API">JPA 2.0</a>
|
your choosing. This module uses the <a href="http://en.wikipedia.org/wiki/Java_Persistence_API">JPA 2.0</a>
|
||||||
API to store data in a database without depending on any specific database technology.
|
API to store data in a database without depending on any specific database technology.
|
||||||
</p>
|
</p>
|
||||||
<p>
|
<p>
|
||||||
<b>Important Note: </b>
|
<b>Important Note: </b>
|
||||||
This implementation uses a fairly simple table design, with a
|
This implementation uses a fairly simple table design, with a
|
||||||
single table being used to hold resource bodies (which are stored as
|
single table being used to hold resource bodies (which are stored as
|
||||||
CLOBs, optionally GZipped to save space) and a set of tables to hold search indexes, tags,
|
CLOBs, optionally GZipped to save space) and a set of tables to hold search indexes, tags,
|
||||||
history details, etc. This design is only one of many possible ways
|
history details, etc. This design is only one of many possible ways
|
||||||
of designing a FHIR server so it is worth considering whether it
|
of designing a FHIR server so it is worth considering whether it
|
||||||
is appropriate for the problem you are trying to solve.
|
is appropriate for the problem you are trying to solve.
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<subsection name="Getting Started">
|
<subsection name="Getting Started">
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
The easiest way to get started with HAPI's JPA server module is
|
The easiest way to get started with HAPI's JPA server module is
|
||||||
to begin with the example project. There is a complete sample project
|
to begin with the example project. There is a complete sample project
|
||||||
found in our GitHub repo here: <a href="https://github.com/jamesagnew/hapi-fhir/tree/master/hapi-fhir-jpaserver-example">hapi-fhir-jpaserver-example</a>
|
found in our GitHub repo here: <a href="https://github.com/jamesagnew/hapi-fhir/tree/master/hapi-fhir-jpaserver-example">hapi-fhir-jpaserver-example</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
This example is a fully contained FHIR server, supporting all standard operations (read/create/delete/etc).
|
This example is a fully contained FHIR server, supporting all standard operations (read/create/delete/etc).
|
||||||
It bundles an embedded instance of the <a href="http://db.apache.org/derby/">Apache Derby</a> Java database
|
It bundles an embedded instance of the <a href="http://db.apache.org/derby/">Apache Derby</a> Java database
|
||||||
so that the server can run without depending on any external database, but it can also be
|
so that the server can run without depending on any external database, but it can also be
|
||||||
configured to use an installation of Oracle, Postgres, etc.
|
configured to use an installation of Oracle, Postgres, etc.
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
To take this project for a spin, check out the sources from GitHib (or download a snapshot),
|
To take this project for a spin, check out the sources from GitHib (or download a snapshot),
|
||||||
and then build the project:
|
and then build the project:
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<source><![CDATA[$ cd hapi-fhir-jpaserver-example
|
<source><![CDATA[$ cd hapi-fhir-jpaserver-example
|
||||||
$ mvn install]]></source>
|
$ mvn install]]></source>
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
You now have two options for starting the server:
|
You now have two options for starting the server:
|
||||||
</p>
|
</p>
|
||||||
<ul>
|
<ul>
|
||||||
<li>
|
<li>
|
||||||
<b>Deploy to Tomcat/JBoss/Websphere/etc: </b> You will now have a file
|
<b>Deploy to Tomcat/JBoss/Websphere/etc: </b> You will now have a file
|
||||||
in your <code>target</code> directory called <code>hapi-fhir-jpaserver-example.war</code>.
|
in your <code>target</code> directory called <code>hapi-fhir-jpaserver-example.war</code>.
|
||||||
This WAR file can be deployed to any Servlet container, at which point you could
|
This WAR file can be deployed to any Servlet container, at which point you could
|
||||||
access the server by pointing your browser at a URL similar to the following
|
access the server by pointing your browser at a URL similar to the following
|
||||||
(you may need to adjust the
|
(you may need to adjust the
|
||||||
port depending on which port your container is configured to listen on):
|
port depending on which port your container is configured to listen on):
|
||||||
<a href="http://localhost:8080/hapi-fhir-jpaserver-example/">http://localhost:8080/hapi-fhir-jpaserver-example/</a>
|
<a href="http://localhost:8080/hapi-fhir-jpaserver-example/">http://localhost:8080/hapi-fhir-jpaserver-example/</a>
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
<b>Run with Maven and Embedded Jetty: </b> To start the server
|
<b>Run with Maven and Embedded Jetty: </b> To start the server
|
||||||
directly within Maven, you can execute the following command:<br/>
|
directly within Maven, you can execute the following command:<br/>
|
||||||
<source>$ mvn jetty:run</source>
|
<source>$ mvn jetty:run</source>
|
||||||
You can then access the server by pointing your browser at the following URL:
|
You can then access the server by pointing your browser at the following URL:
|
||||||
<a href="http://localhost:8080/hapi-fhir-jpaserver-example/">http://localhost:8080/hapi-fhir-jpaserver-example/</a>
|
<a href="http://localhost:8080/hapi-fhir-jpaserver-example/">http://localhost:8080/hapi-fhir-jpaserver-example/</a>
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
</subsection>
|
</subsection>
|
||||||
</section>
|
</section>
|
||||||
|
|
||||||
<section name="Configuring The JPA Server">
|
<section name="Configuring The JPA Server">
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
The JPA server is configured through a series of configuration files, most
|
The JPA server is configured through a series of configuration files, most
|
||||||
of which are documented inline.
|
of which are documented inline.
|
||||||
</p>
|
</p>
|
||||||
<ul>
|
<ul>
|
||||||
<li>
|
<li>
|
||||||
<a href="https://github.com/jamesagnew/hapi-fhir/blob/master/hapi-fhir-jpaserver-example/src/main/java/ca/uhn/fhir/jpa/demo/FhirServerConfig.java"><b>FhirServerConfig.java</b></a>:
|
<a href="https://github.com/jamesagnew/hapi-fhir/blob/master/hapi-fhir-jpaserver-example/src/main/java/ca/uhn/fhir/jpa/demo/FhirServerConfig.java"><b>FhirServerConfig.java</b></a>:
|
||||||
Configures the database connection settings
|
Configures the database connection settings
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
|
|
||||||
</section>
|
</section>
|
||||||
|
|
||||||
<section name="DaoConfig">
|
<section name="DaoConfig">
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
The Spring confguration contains a definition for a bean called <code>daoConfig</code>,
|
The Spring confguration contains a definition for a bean called <code>daoConfig</code>,
|
||||||
which will look something like the following:
|
which will look something like the following:
|
||||||
</p>
|
</p>
|
||||||
<source><![CDATA[@Bean()
|
<source><![CDATA[@Bean()
|
||||||
public DaoConfig daoConfig() {
|
public DaoConfig daoConfig() {
|
||||||
DaoConfig retVal = new DaoConfig();
|
DaoConfig retVal = new DaoConfig();
|
||||||
retVal.setAllowMultipleDelete(true);
|
retVal.setAllowMultipleDelete(true);
|
||||||
retVal.setAllowInlineMatchUrlReferences(true);
|
retVal.setAllowInlineMatchUrlReferences(true);
|
||||||
return retVal;
|
return retVal;
|
||||||
}]]></source>
|
}]]></source>
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
You can use this method to change various configuration settings on the DaoConfig bean
|
You can use this method to change various configuration settings on the DaoConfig bean
|
||||||
which define the way that the JPA server will behave.
|
which define the way that the JPA server will behave.
|
||||||
See the <a href="./apidocs-jpaserver/ca/uhn/fhir/jpa/dao/DaoConfig.html">DaoConfig JavaDoc</a>
|
See the <a href="./apidocs-jpaserver/ca/uhn/fhir/jpa/dao/DaoConfig.html">DaoConfig JavaDoc</a>
|
||||||
for information about the available settings.
|
for information about the available settings.
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<subsection name="External/Absolute Resource References">
|
<subsection name="External/Absolute Resource References">
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
Clients may sometimes post resources to your server that contain
|
Clients may sometimes post resources to your server that contain
|
||||||
absolute resource references. For example, consider the following resource:
|
absolute resource references. For example, consider the following resource:
|
||||||
</p>
|
</p>
|
||||||
<source><![CDATA[<Patient xmlns="http://hl7.org/fhir">
|
<source><![CDATA[<Patient xmlns="http://hl7.org/fhir">
|
||||||
<id value="patient-infant-01"/>
|
<id value="patient-infant-01"/>
|
||||||
<name>
|
<name>
|
||||||
<use value="official"/>
|
<use value="official"/>
|
||||||
<family value="Miller"/>
|
<family value="Miller"/>
|
||||||
<given value="Samuel"/>
|
<given value="Samuel"/>
|
||||||
</name>
|
</name>
|
||||||
<managingOrganization>
|
<managingOrganization>
|
||||||
<reference value="http://example.com/fhir/Organization/123"/>
|
<reference value="http://example.com/fhir/Organization/123"/>
|
||||||
</managingOrganization>
|
</managingOrganization>
|
||||||
</Patient>]]></source>
|
</Patient>]]></source>
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
By default, the server will reject this reference, as only
|
By default, the server will reject this reference, as only
|
||||||
local references are permitted by the server. This can be changed
|
local references are permitted by the server. This can be changed
|
||||||
however.
|
however.
|
||||||
</p>
|
</p>
|
||||||
<p>
|
<p>
|
||||||
If you want the server to recognize that this URL is actually a local
|
If you want the server to recognize that this URL is actually a local
|
||||||
reference (i.e. because the server will be deployed to the base URL
|
reference (i.e. because the server will be deployed to the base URL
|
||||||
<code>http://example.com/fhir/</code>) you can
|
<code>http://example.com/fhir/</code>) you can
|
||||||
configure the server to recognize this URL via the following DaoConfig
|
configure the server to recognize this URL via the following DaoConfig
|
||||||
setting:
|
setting:
|
||||||
</p>
|
</p>
|
||||||
<source><![CDATA[@Bean()
|
<source><![CDATA[@Bean()
|
||||||
public DaoConfig daoConfig() {
|
public DaoConfig daoConfig() {
|
||||||
DaoConfig retVal = new DaoConfig();
|
DaoConfig retVal = new DaoConfig();
|
||||||
// ... other config ...
|
// ... other config ...
|
||||||
retVal.getTreatBaseUrlsAsLocal().add("http://example.com/fhir/");
|
retVal.getTreatBaseUrlsAsLocal().add("http://example.com/fhir/");
|
||||||
return retVal;
|
return retVal;
|
||||||
}]]></source>
|
}]]></source>
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
On the other hand, if you want the server to be configurable to
|
On the other hand, if you want the server to be configurable to
|
||||||
allow remote references, you can set this with the confguration below.
|
allow remote references, you can set this with the confguration below.
|
||||||
Using the <code>setAllowExternalReferences</code> means that
|
Using the <code>setAllowExternalReferences</code> means that
|
||||||
it will be possible to search for references that refer to these
|
it will be possible to search for references that refer to these
|
||||||
external references.
|
external references.
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<source><![CDATA[@Bean()
|
<source><![CDATA[@Bean()
|
||||||
public DaoConfig daoConfig() {
|
public DaoConfig daoConfig() {
|
||||||
DaoConfig retVal = new DaoConfig();
|
DaoConfig retVal = new DaoConfig();
|
||||||
// Allow external references
|
// Allow external references
|
||||||
retVal.setAllowExternalReferences(true);
|
retVal.setAllowExternalReferences(true);
|
||||||
|
|
||||||
// If you are allowing external references, it is recommended to
|
// If you are allowing external references, it is recommended to
|
||||||
// also tell the server which references actually will be local
|
// also tell the server which references actually will be local
|
||||||
retVal.getTreatBaseUrlsAsLocal().add("http://mydomain.com/fhir");
|
retVal.getTreatBaseUrlsAsLocal().add("http://mydomain.com/fhir");
|
||||||
return retVal;
|
return retVal;
|
||||||
}]]></source>
|
}]]></source>
|
||||||
</subsection>
|
</subsection>
|
||||||
|
|
||||||
<subsection name="Logical References">
|
<subsection name="Logical References">
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
In some cases, you may have references which are <i>Logical References</i>,
|
In some cases, you may have references which are <i>Logical References</i>,
|
||||||
which means that they act as an identifier and not necessarily as a literal
|
which means that they act as an identifier and not necessarily as a literal
|
||||||
web address.
|
web address.
|
||||||
</p>
|
</p>
|
||||||
<p>
|
<p>
|
||||||
A common use for logical references is in references to conformance
|
A common use for logical references is in references to conformance
|
||||||
resources, such as ValueSets, StructureDefinitions, etc. For example,
|
resources, such as ValueSets, StructureDefinitions, etc. For example,
|
||||||
you might refer to the ValueSet
|
you might refer to the ValueSet
|
||||||
<code>http://hl7.org/fhir/ValueSet/quantity-comparator</code>
|
<code>http://hl7.org/fhir/ValueSet/quantity-comparator</code>
|
||||||
from your own resources. In this case, you are not neccesarily telling
|
from your own resources. In this case, you are not neccesarily telling
|
||||||
the server that this is a real address that it should resolve, but
|
the server that this is a real address that it should resolve, but
|
||||||
rather that this is an identifier for a ValueSet where
|
rather that this is an identifier for a ValueSet where
|
||||||
<code>ValueSet.url</code> has the given URI/URL.
|
<code>ValueSet.url</code> has the given URI/URL.
|
||||||
</p>
|
</p>
|
||||||
<p>
|
<p>
|
||||||
HAPI can be configured to treat certain URI/URL patterns as
|
HAPI can be configured to treat certain URI/URL patterns as
|
||||||
logical by using the DaoConfig#setTreatReferencesAsLogical property
|
logical by using the DaoConfig#setTreatReferencesAsLogical property
|
||||||
(see <a href="./apidocs-jpaserver/ca/uhn/fhir/jpa/dao/DaoConfig.html#setTreatReferencesAsLogical-java.util.Set-">JavaDoc</a>).
|
(see <a href="./apidocs-jpaserver/ca/uhn/fhir/jpa/dao/DaoConfig.html#setTreatReferencesAsLogical-java.util.Set-">JavaDoc</a>).
|
||||||
For example:
|
For example:
|
||||||
</p>
|
</p>
|
||||||
<code>
|
<div class="source">
|
||||||
// Treat specific URL as logical
|
<pre>
|
||||||
myDaoConfig.getTreatReferencesAsLogical().add("http://mysystem.com/ValueSet/cats-and-dogs");
|
// Treat specific URL as logical
|
||||||
|
myDaoConfig.getTreatReferencesAsLogical().add("http://mysystem.com/ValueSet/cats-and-dogs");
|
||||||
// Treat all references with given prefix as logical
|
|
||||||
myDaoConfig.getTreatReferencesAsLogical().add("http://mysystem.com/mysystem-vs-*");
|
// Treat all references with given prefix as logical
|
||||||
</code>
|
myDaoConfig.getTreatReferencesAsLogical().add("http://mysystem.com/mysystem-vs-*");
|
||||||
</subsection>
|
</pre>
|
||||||
|
</div>
|
||||||
</section>
|
<a name="search_result caching"/>
|
||||||
|
</subsection>
|
||||||
<section name="Architecture">
|
|
||||||
|
<subsection name="Search Result Caching">
|
||||||
<img src="images/jpa_architecture.png" alt="Architecture" align="right"/>
|
|
||||||
|
<p>
|
||||||
<p>
|
By default, search results will be cached for one minute. This means that
|
||||||
The HAPI JPA Server has the following components:
|
if a client performs a search for <code>Patient?name=smith</code> and gets back
|
||||||
</p>
|
500 results, if a client performs the same search within 60000 milliseconds the
|
||||||
|
previously loaded search results will be returned again. This also means that
|
||||||
<ul>
|
any new Patient resources named "Smith" within the last minute will not be
|
||||||
<li>
|
reflected in the results.
|
||||||
<b>Resource Providers: </b>
|
</p>
|
||||||
A RESTful server <a href="./doc_rest_server.html#resource_providers">Resource Provider</a> is
|
<p>
|
||||||
provided for each resource type in a given release of FHIR. Each resource provider implements
|
Under many normal scenarios this is a n acceptable performance tradeoff,
|
||||||
a
|
but in some cases it is not. If you want to disable caching, you have two
|
||||||
<a href="./apidocs/ca/uhn/fhir/rest/annotation/Search.html">@Search</a>
|
options:
|
||||||
method implementing the complete set of search parameters defined in the FHIR
|
</p>
|
||||||
specification for the given resource type.<br/><br/>
|
<p><b>Globally Disable / Change Caching Timeout</b></p>
|
||||||
The resource providers also extend a superclass which implements all of the
|
<p>
|
||||||
other FHIR methods, such as Read, Create, Delete, etc.<br/><br/>
|
You can change the global cache using the following setting:
|
||||||
Note that these resource providers are generated as a part of the HAPI build process,
|
</p>
|
||||||
so they are not checked into Git. You can see their source
|
<div class="source">
|
||||||
in the <a href="./xref-jpaserver/">JXR Report</a>,
|
<pre>
|
||||||
for example the
|
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
|
||||||
<a href="./xref-jpaserver/ca/uhn/fhir/jpa/rp/dstu2/PatientResourceProvider.html">PatientResourceProvider</a>.
|
</pre>
|
||||||
<br/><br/>
|
</div>
|
||||||
The resource providers do not actually implement any of the logic
|
<p><b>Disable Cache at the Request Level</b></p>
|
||||||
in searching, updating, etc. They simply receive the incoming HTTP calls (via the RestfulServer)
|
<p>
|
||||||
and pass along the incoming requests to the DAOs.
|
Clients can selectively disable caching for an individual request
|
||||||
<br/><br/>
|
using the Cache-Control header:
|
||||||
</li>
|
</p>
|
||||||
<li>
|
<div class="source">
|
||||||
<b>HAPI DAOs: </b>
|
<pre>
|
||||||
The DAOs actually implement all of the database business logic relating to
|
Cache-Control: nocache
|
||||||
the storage, indexing, and retrieval of FHIR resources, using the underlying JPA
|
</pre>
|
||||||
API.
|
</div>
|
||||||
<br/><br/>
|
<p><b>Disable Paging at the Request Level</b></p>
|
||||||
</li>
|
<p>
|
||||||
<li>
|
If the client knows that they will only want a small number of results
|
||||||
<b>Hibernate: </b>
|
(for example, a UI containing 20 results is being shown and the client
|
||||||
The HAPI JPA Server uses the JPA library, implemented by Hibernate. No Hibernate
|
knows that they will never load the next page of results) the client
|
||||||
specific features are used, so the library should also work with other
|
may also use the <code>nostore</code> directive along with a HAPI FHIR
|
||||||
providers (e.g. Eclipselink) but it is not tested regularly with them.
|
extension called <code>max-results</code> in order to specify that
|
||||||
<br/><br/>
|
only the given number of results should be fetched. This directive
|
||||||
</li>
|
disabled paging entirely for the request and causes the request to
|
||||||
<li>
|
return immediately when the given number of results is found. This
|
||||||
<b>Database: </b>
|
can cause a noticeable performance improvement in some cases.
|
||||||
The RESTful server uses an embedded Derby database, but can be configured to
|
</p>
|
||||||
talk to
|
<div class="source">
|
||||||
<a href="https://developer.jboss.org/wiki/SupportedDatabases2?_sscc=t">any database supported by Hibernate</a>.
|
<pre>
|
||||||
</li>
|
Cache-Control: nostore, max-results=20
|
||||||
|
</pre>
|
||||||
</ul>
|
</div>
|
||||||
|
|
||||||
</section>
|
</subsection>
|
||||||
|
|
||||||
<section name="Additional Information">
|
</section>
|
||||||
|
|
||||||
<ul>
|
<section name="Architecture">
|
||||||
<li>
|
|
||||||
<a href="https://www.openhealthhub.org/t/hapi-terminology-server-uk-snomed-ct-import/592">This page</a>
|
<img src="images/jpa_architecture.png" alt="Architecture" align="right"/>
|
||||||
has information on loading national editions (UK specifically) of SNOMED CT files into
|
|
||||||
the database.
|
<p>
|
||||||
</li>
|
The HAPI JPA Server has the following components:
|
||||||
</ul>
|
</p>
|
||||||
|
|
||||||
</section>
|
<ul>
|
||||||
|
<li>
|
||||||
<!--
|
<b>Resource Providers: </b>
|
||||||
alter table hfj_res_link ALTER COLUMN "TARGET_RESOURCE_ID" NULL;
|
A RESTful server <a href="./doc_rest_server.html#resource_providers">Resource Provider</a> is
|
||||||
|
provided for each resource type in a given release of FHIR. Each resource provider implements
|
||||||
select sp_index_status, count(*) from hfj_resource group by sp_index_status
|
a
|
||||||
delete from hfj_history_tag where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
<a href="./apidocs/ca/uhn/fhir/rest/annotation/Search.html">@Search</a>
|
||||||
delete from hfj_res_tag where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
method implementing the complete set of search parameters defined in the FHIR
|
||||||
delete from hfj_spidx_coords where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
specification for the given resource type.<br/><br/>
|
||||||
delete from hfj_spidx_number where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
The resource providers also extend a superclass which implements all of the
|
||||||
delete from hfj_spidx_quantity where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
other FHIR methods, such as Read, Create, Delete, etc.<br/><br/>
|
||||||
delete from hfj_spidx_string where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
Note that these resource providers are generated as a part of the HAPI build process,
|
||||||
delete from hfj_spidx_token where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
so they are not checked into Git. You can see their source
|
||||||
delete from hfj_spidx_uri where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
in the <a href="./xref-jpaserver/">JXR Report</a>,
|
||||||
delete from hfj_search_result where resource_pid in (select res_id from hfj_resource where sp_index_status = 2);
|
for example the
|
||||||
delete from hfj_res_link where src_resource_id in (select res_id from hfj_resource where sp_index_status = 2);
|
<a href="./xref-jpaserver/ca/uhn/fhir/jpa/rp/dstu2/PatientResourceProvider.html">PatientResourceProvider</a>.
|
||||||
delete from hfj_res_link where target_resource_id in (select res_id from hfj_resource where sp_index_status = 2);
|
<br/><br/>
|
||||||
delete from hfj_subscription where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
The resource providers do not actually implement any of the logic
|
||||||
delete from hfj_subscription_flag_res where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
in searching, updating, etc. They simply receive the incoming HTTP calls (via the RestfulServer)
|
||||||
|
and pass along the incoming requests to the DAOs.
|
||||||
|
<br/><br/>
|
||||||
delete from trm_concept_pc_link where pid in (select pid from trm_concept where codesystem_pid in (select pid from trm_codesystem_ver where res_id in (select res_id from hfj_resource where sp_index_status = 2)));
|
</li>
|
||||||
delete from trm_concept where codesystem_pid in (select pid from trm_codesystem_ver where res_id in (select res_id from hfj_resource where sp_index_status = 2));
|
<li>
|
||||||
delete from trm_codesystem_ver where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
<b>HAPI DAOs: </b>
|
||||||
delete from trm_codesystem where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
The DAOs actually implement all of the database business logic relating to
|
||||||
|
the storage, indexing, and retrieval of FHIR resources, using the underlying JPA
|
||||||
update hfj_resource set forced_id_pid = null where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
API.
|
||||||
update hfj_res_ver set forced_id_pid = null where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
<br/><br/>
|
||||||
delete from hfj_forced_id where resource_pid in (select res_id from hfj_resource where sp_index_status = 2);
|
</li>
|
||||||
delete from hfj_resource where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
<li>
|
||||||
delete from hfj_res_ver where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
<b>Hibernate: </b>
|
||||||
|
The HAPI JPA Server uses the JPA library, implemented by Hibernate. No Hibernate
|
||||||
|
specific features are used, so the library should also work with other
|
||||||
|
providers (e.g. Eclipselink) but it is not tested regularly with them.
|
||||||
-->
|
<br/><br/>
|
||||||
|
</li>
|
||||||
</body>
|
<li>
|
||||||
|
<b>Database: </b>
|
||||||
</document>
|
The RESTful server uses an embedded Derby database, but can be configured to
|
||||||
|
talk to
|
||||||
|
<a href="https://developer.jboss.org/wiki/SupportedDatabases2?_sscc=t">any database supported by Hibernate</a>.
|
||||||
|
</li>
|
||||||
|
|
||||||
|
</ul>
|
||||||
|
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section name="Additional Information">
|
||||||
|
|
||||||
|
<ul>
|
||||||
|
<li>
|
||||||
|
<a href="https://www.openhealthhub.org/t/hapi-terminology-server-uk-snomed-ct-import/592">This page</a>
|
||||||
|
has information on loading national editions (UK specifically) of SNOMED CT files into
|
||||||
|
the database.
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<!--
|
||||||
|
alter table hfj_res_link ALTER COLUMN "TARGET_RESOURCE_ID" NULL;
|
||||||
|
|
||||||
|
select sp_index_status, count(*) from hfj_resource group by sp_index_status
|
||||||
|
delete from hfj_history_tag where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||||
|
delete from hfj_res_tag where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||||
|
delete from hfj_spidx_coords where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||||
|
delete from hfj_spidx_number where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||||
|
delete from hfj_spidx_quantity where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||||
|
delete from hfj_spidx_string where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||||
|
delete from hfj_spidx_token where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||||
|
delete from hfj_spidx_uri where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||||
|
delete from hfj_search_result where resource_pid in (select res_id from hfj_resource where sp_index_status = 2);
|
||||||
|
delete from hfj_res_link where src_resource_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||||
|
delete from hfj_res_link where target_resource_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||||
|
delete from hfj_subscription where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||||
|
delete from hfj_subscription_flag_res where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||||
|
|
||||||
|
|
||||||
|
delete from trm_concept_pc_link where pid in (select pid from trm_concept where codesystem_pid in (select pid from trm_codesystem_ver where res_id in (select res_id from hfj_resource where sp_index_status = 2)));
|
||||||
|
delete from trm_concept where codesystem_pid in (select pid from trm_codesystem_ver where res_id in (select res_id from hfj_resource where sp_index_status = 2));
|
||||||
|
delete from trm_codesystem_ver where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||||
|
delete from trm_codesystem where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||||
|
|
||||||
|
update hfj_resource set forced_id_pid = null where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||||
|
update hfj_res_ver set forced_id_pid = null where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||||
|
delete from hfj_forced_id where resource_pid in (select res_id from hfj_resource where sp_index_status = 2);
|
||||||
|
delete from hfj_resource where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||||
|
delete from hfj_res_ver where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
-->
|
||||||
|
|
||||||
|
</body>
|
||||||
|
|
||||||
|
</document>
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue