Merge branch 'master' of github.com:jamesagnew/hapi-fhir
This commit is contained in:
commit
b4127674e4
|
@ -1,5 +1,6 @@
|
|||
package example;
|
||||
|
||||
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||
import org.hl7.fhir.dstu3.model.Bundle;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
|
@ -8,6 +9,7 @@ import ca.uhn.fhir.rest.api.EncodingEnum;
|
|||
import ca.uhn.fhir.rest.client.apache.GZipContentInterceptor;
|
||||
import ca.uhn.fhir.rest.client.api.*;
|
||||
import ca.uhn.fhir.rest.client.interceptor.*;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
|
||||
public class ClientExamples {
|
||||
|
||||
|
@ -52,6 +54,26 @@ public class ClientExamples {
|
|||
// END SNIPPET: processMessage
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void cacheControl() {
|
||||
FhirContext ctx = FhirContext.forDstu3();
|
||||
|
||||
// Create the client
|
||||
IGenericClient client = ctx.newRestfulGenericClient("http://localhost:9999/fhir");
|
||||
|
||||
Bundle bundle = new Bundle();
|
||||
// ..populate the bundle..
|
||||
|
||||
// START SNIPPET: cacheControl
|
||||
Bundle response = client
|
||||
.search()
|
||||
.forResource(Patient.class)
|
||||
.returnBundle(Bundle.class)
|
||||
.cacheControl(new CacheControlDirective().setNoCache(true)) // <-- add a directive
|
||||
.execute();
|
||||
// END SNIPPET: cacheControl
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void createOkHttp() {
|
||||
// START SNIPPET: okhttp
|
||||
|
|
|
@ -0,0 +1,108 @@
|
|||
package ca.uhn.fhir.rest.api;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.StringTokenizer;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.trim;
|
||||
|
||||
/**
|
||||
* Parses and stores the value(s) within HTTP Cache-Control headers
|
||||
*/
|
||||
public class CacheControlDirective {
|
||||
|
||||
private static final String MAX_RESULTS_EQUALS = Constants.CACHE_CONTROL_MAX_RESULTS + "=";
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(CacheControlDirective.class);
|
||||
private boolean myNoCache;
|
||||
private boolean myNoStore;
|
||||
private Integer myMaxResults;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public CacheControlDirective() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* If the {@link #isNoStore() no-store} directive is set, this HAPI FHIR extention
|
||||
* to the <code>Cache-Control</code> header called <code>max-results=123</code>
|
||||
* specified the maximum number of results which will be fetched from the
|
||||
* database before returning.
|
||||
*/
|
||||
public Integer getMaxResults() {
|
||||
return myMaxResults;
|
||||
}
|
||||
|
||||
/**
|
||||
* If the {@link #isNoStore() no-store} directive is set, this HAPI FHIR extention
|
||||
* to the <code>Cache-Control</code> header called <code>max-results=123</code>
|
||||
* specified the maximum number of results which will be fetched from the
|
||||
* database before returning.
|
||||
*/
|
||||
public CacheControlDirective setMaxResults(Integer theMaxResults) {
|
||||
myMaxResults = theMaxResults;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* If <code>true<</code>, adds the <code>no-cache</code> directive to the
|
||||
* request. This directive indicates that the cache should not be used to
|
||||
* serve this request.
|
||||
*/
|
||||
public boolean isNoCache() {
|
||||
return myNoCache;
|
||||
}
|
||||
|
||||
/**
|
||||
* If <code>true<</code>, adds the <code>no-cache</code> directive to the
|
||||
* request. This directive indicates that the cache should not be used to
|
||||
* serve this request.
|
||||
*/
|
||||
public CacheControlDirective setNoCache(boolean theNoCache) {
|
||||
myNoCache = theNoCache;
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean isNoStore() {
|
||||
return myNoStore;
|
||||
}
|
||||
|
||||
public CacheControlDirective setNoStore(boolean theNoStore) {
|
||||
myNoStore = theNoStore;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a list of <code>Cache-Control</code> header values
|
||||
*
|
||||
* @param theValues The <code>Cache-Control</code> header values
|
||||
*/
|
||||
public CacheControlDirective parse(List<String> theValues) {
|
||||
if (theValues != null) {
|
||||
for (String nextValue : theValues) {
|
||||
StringTokenizer tok = new StringTokenizer(nextValue, ",");
|
||||
while (tok.hasMoreTokens()) {
|
||||
String next = trim(tok.nextToken());
|
||||
if (Constants.CACHE_CONTROL_NO_CACHE.equals(next)) {
|
||||
myNoCache = true;
|
||||
} else if (Constants.CACHE_CONTROL_NO_STORE.equals(next)) {
|
||||
myNoStore = true;
|
||||
} else if (next.startsWith(MAX_RESULTS_EQUALS)) {
|
||||
String valueString = trim(next.substring(MAX_RESULTS_EQUALS.length()));
|
||||
try {
|
||||
myMaxResults = Integer.parseInt(valueString);
|
||||
} catch (NumberFormatException e) {
|
||||
ourLog.warn("Invalid {} value: {}", Constants.CACHE_CONTROL_MAX_RESULTS, valueString);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
}
|
|
@ -25,6 +25,9 @@ import java.util.*;
|
|||
|
||||
public class Constants {
|
||||
|
||||
public static final String CACHE_CONTROL_MAX_RESULTS = "max-results";
|
||||
public static final String CACHE_CONTROL_NO_CACHE = "no-cache";
|
||||
public static final String CACHE_CONTROL_NO_STORE = "no-store";
|
||||
public static final String CHARSET_NAME_UTF8 = "UTF-8";
|
||||
public static final Charset CHARSET_UTF8;
|
||||
public static final String CHARSET_UTF8_CTSUFFIX = "; charset=" + CHARSET_NAME_UTF8;
|
||||
|
@ -67,6 +70,7 @@ public class Constants {
|
|||
public static final String HEADER_AUTHORIZATION = "Authorization";
|
||||
public static final String HEADER_AUTHORIZATION_VALPREFIX_BASIC = "Basic ";
|
||||
public static final String HEADER_AUTHORIZATION_VALPREFIX_BEARER = "Bearer ";
|
||||
public static final String HEADER_CACHE_CONTROL = "Cache-Control";
|
||||
public static final String HEADER_CONTENT_DISPOSITION = "Content-Disposition";
|
||||
public static final String HEADER_CONTENT_ENCODING = "Content-Encoding";
|
||||
public static final String HEADER_CONTENT_LOCATION = "Content-Location";
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package ca.uhn.fhir.rest.gclient;
|
||||
|
||||
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||
import ca.uhn.fhir.rest.api.EncodingEnum;
|
||||
import ca.uhn.fhir.rest.api.SummaryEnum;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
@ -38,6 +39,12 @@ public interface IClientExecutable<T extends IClientExecutable<?,Y>, Y> {
|
|||
@Deprecated
|
||||
T andLogRequestAndResponse(boolean theLogRequestAndResponse);
|
||||
|
||||
/**
|
||||
* Sets the <code>Cache-Control</code> header value, which advises the server (or any cache in front of it)
|
||||
* how to behave in terms of cached requests
|
||||
*/
|
||||
T cacheControl(CacheControlDirective theCacheControlDirective);
|
||||
|
||||
/**
|
||||
* Request that the server return subsetted resources, containing only the elements specified in the given parameters.
|
||||
* For example: <code>subsetElements("name", "identifier")</code> requests that the server only return
|
||||
|
|
|
@ -0,0 +1,58 @@
|
|||
package ca.uhn.fhir.rest.api;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class CacheControlDirectiveTest {
|
||||
|
||||
@Test
|
||||
public void testParseNoCache() {
|
||||
List<String> values = Arrays.asList(Constants.CACHE_CONTROL_NO_CACHE);
|
||||
CacheControlDirective ccd = new CacheControlDirective();
|
||||
ccd.parse(values);
|
||||
assertTrue(ccd.isNoCache());
|
||||
assertFalse(ccd.isNoStore());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testParseNoCacheNoStore() {
|
||||
List<String> values = Arrays.asList(Constants.CACHE_CONTROL_NO_CACHE + " , " + Constants.CACHE_CONTROL_NO_STORE);
|
||||
CacheControlDirective ccd = new CacheControlDirective();
|
||||
ccd.parse(values);
|
||||
assertTrue(ccd.isNoCache());
|
||||
assertTrue(ccd.isNoStore());
|
||||
assertEquals(null, ccd.getMaxResults());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testParseNoCacheNoStoreMaxResults() {
|
||||
List<String> values = Arrays.asList(Constants.CACHE_CONTROL_NO_STORE + ", "+ Constants.CACHE_CONTROL_MAX_RESULTS + "=5");
|
||||
CacheControlDirective ccd = new CacheControlDirective();
|
||||
ccd.parse(values);
|
||||
assertFalse(ccd.isNoCache());
|
||||
assertTrue(ccd.isNoStore());
|
||||
assertEquals(5, ccd.getMaxResults().intValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testParseNoCacheNoStoreMaxResultsInvalid() {
|
||||
List<String> values = Arrays.asList(Constants.CACHE_CONTROL_NO_STORE + ", "+ Constants.CACHE_CONTROL_MAX_RESULTS + "=A");
|
||||
CacheControlDirective ccd = new CacheControlDirective();
|
||||
ccd.parse(values);
|
||||
assertFalse(ccd.isNoCache());
|
||||
assertTrue(ccd.isNoStore());
|
||||
assertEquals(null, ccd.getMaxResults());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testParseNull() {
|
||||
CacheControlDirective ccd = new CacheControlDirective();
|
||||
ccd.parse(null);
|
||||
assertFalse(ccd.isNoCache());
|
||||
assertFalse(ccd.isNoStore());
|
||||
}
|
||||
}
|
|
@ -34,6 +34,7 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
@ -135,7 +136,7 @@ public abstract class BaseClient implements IRestfulClient {
|
|||
public <T extends IBaseResource> T fetchResourceFromUrl(Class<T> theResourceType, String theUrl) {
|
||||
BaseHttpClientInvocation clientInvocation = new HttpGetClientInvocation(getFhirContext(), theUrl);
|
||||
ResourceResponseHandler<T> binding = new ResourceResponseHandler<T>(theResourceType);
|
||||
return invokeClient(getFhirContext(), binding, clientInvocation, null, false, false, null, null);
|
||||
return invokeClient(getFhirContext(), binding, clientInvocation, null, false, false, null, null, null);
|
||||
}
|
||||
|
||||
void forceConformanceCheck() {
|
||||
|
@ -198,11 +199,11 @@ public abstract class BaseClient implements IRestfulClient {
|
|||
}
|
||||
|
||||
<T> T invokeClient(FhirContext theContext, IClientResponseHandler<T> binding, BaseHttpClientInvocation clientInvocation, boolean theLogRequestAndResponse) {
|
||||
return invokeClient(theContext, binding, clientInvocation, null, null, theLogRequestAndResponse, null, null);
|
||||
return invokeClient(theContext, binding, clientInvocation, null, null, theLogRequestAndResponse, null, null, null);
|
||||
}
|
||||
|
||||
<T> T invokeClient(FhirContext theContext, IClientResponseHandler<T> binding, BaseHttpClientInvocation clientInvocation, EncodingEnum theEncoding, Boolean thePrettyPrint,
|
||||
boolean theLogRequestAndResponse, SummaryEnum theSummaryMode, Set<String> theSubsetElements) {
|
||||
boolean theLogRequestAndResponse, SummaryEnum theSummaryMode, Set<String> theSubsetElements, CacheControlDirective theCacheControlDirective) {
|
||||
|
||||
if (!myDontValidateConformance) {
|
||||
myFactory.validateServerBaseIfConfiguredToDoSo(myUrlBase, myClient, this);
|
||||
|
@ -244,6 +245,18 @@ public abstract class BaseClient implements IRestfulClient {
|
|||
|
||||
httpRequest = clientInvocation.asHttpRequest(myUrlBase, params, encoding, thePrettyPrint);
|
||||
|
||||
if (theCacheControlDirective != null) {
|
||||
StringBuilder b = new StringBuilder();
|
||||
addToCacheControlHeader(b, Constants.CACHE_CONTROL_NO_CACHE, theCacheControlDirective.isNoCache());
|
||||
addToCacheControlHeader(b, Constants.CACHE_CONTROL_NO_STORE, theCacheControlDirective.isNoStore());
|
||||
if (theCacheControlDirective.getMaxResults() != null) {
|
||||
addToCacheControlHeader(b, Constants.CACHE_CONTROL_MAX_RESULTS+"="+ Integer.toString(theCacheControlDirective.getMaxResults().intValue()), true);
|
||||
}
|
||||
if (b.length() > 0) {
|
||||
httpRequest.addHeader(Constants.HEADER_CACHE_CONTROL, b.toString());
|
||||
}
|
||||
}
|
||||
|
||||
if (theLogRequestAndResponse) {
|
||||
ourLog.info("Client invoking: {}", httpRequest);
|
||||
String body = httpRequest.getRequestBodyFromStream();
|
||||
|
@ -366,6 +379,15 @@ public abstract class BaseClient implements IRestfulClient {
|
|||
}
|
||||
}
|
||||
|
||||
private void addToCacheControlHeader(StringBuilder theBuilder, String theDirective, boolean theActive) {
|
||||
if (theActive) {
|
||||
if (theBuilder.length() > 0) {
|
||||
theBuilder.append(", ");
|
||||
}
|
||||
theBuilder.append(theDirective);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* For now, this is a part of the internal API of HAPI - Use with caution as this method may change!
|
||||
*/
|
||||
|
|
|
@ -120,10 +120,10 @@ public class GenericClient extends BaseClient implements IGenericClient {
|
|||
ResourceResponseHandler<T> binding = new ResourceResponseHandler<T>(theType, (Class<? extends IBaseResource>) null, id, allowHtmlResponse);
|
||||
|
||||
if (theNotModifiedHandler == null) {
|
||||
return invokeClient(myContext, binding, invocation, theEncoding, thePrettyPrint, myLogRequestAndResponse, theSummary, theSubsetElements);
|
||||
return invokeClient(myContext, binding, invocation, theEncoding, thePrettyPrint, myLogRequestAndResponse, theSummary, theSubsetElements, null);
|
||||
}
|
||||
try {
|
||||
return invokeClient(myContext, binding, invocation, theEncoding, thePrettyPrint, myLogRequestAndResponse, theSummary, theSubsetElements);
|
||||
return invokeClient(myContext, binding, invocation, theEncoding, thePrettyPrint, myLogRequestAndResponse, theSummary, theSubsetElements, null);
|
||||
} catch (NotModifiedException e) {
|
||||
return theNotModifiedHandler.call();
|
||||
}
|
||||
|
@ -373,6 +373,7 @@ public class GenericClient extends BaseClient implements IGenericClient {
|
|||
private boolean myQueryLogRequestAndResponse;
|
||||
private HashSet<String> mySubsetElements;
|
||||
protected SummaryEnum mySummaryMode;
|
||||
protected CacheControlDirective myCacheControlDirective;
|
||||
|
||||
@Deprecated // override deprecated method
|
||||
@SuppressWarnings("unchecked")
|
||||
|
@ -382,6 +383,12 @@ public class GenericClient extends BaseClient implements IGenericClient {
|
|||
return (T) this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public T cacheControl(CacheControlDirective theCacheControlDirective) {
|
||||
myCacheControlDirective = theCacheControlDirective;
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public T elementsSubset(String... theElements) {
|
||||
|
@ -434,19 +441,11 @@ public class GenericClient extends BaseClient implements IGenericClient {
|
|||
}
|
||||
|
||||
protected <Z> Z invoke(Map<String, List<String>> theParams, IClientResponseHandler<Z> theHandler, BaseHttpClientInvocation theInvocation) {
|
||||
// if (myParamEncoding != null) {
|
||||
// theParams.put(Constants.PARAM_FORMAT, Collections.singletonList(myParamEncoding.getFormatContentType()));
|
||||
// }
|
||||
//
|
||||
// if (myPrettyPrint != null) {
|
||||
// theParams.put(Constants.PARAM_PRETTY, Collections.singletonList(myPrettyPrint.toString()));
|
||||
// }
|
||||
|
||||
if (isKeepResponses()) {
|
||||
myLastRequest = theInvocation.asHttpRequest(getServerBase(), theParams, getEncoding(), myPrettyPrint);
|
||||
}
|
||||
|
||||
Z resp = invokeClient(myContext, theHandler, theInvocation, myParamEncoding, myPrettyPrint, myQueryLogRequestAndResponse || myLogRequestAndResponse, mySummaryMode, mySubsetElements);
|
||||
Z resp = invokeClient(myContext, theHandler, theInvocation, myParamEncoding, myPrettyPrint, myQueryLogRequestAndResponse || myLogRequestAndResponse, mySummaryMode, mySubsetElements, myCacheControlDirective);
|
||||
return resp;
|
||||
}
|
||||
|
||||
|
|
|
@ -36,10 +36,7 @@ import ca.uhn.fhir.jpa.util.jsonpatch.JsonPatchUtils;
|
|||
import ca.uhn.fhir.jpa.util.xmlpatch.XmlPatchUtils;
|
||||
import ca.uhn.fhir.model.api.*;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.PatchTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.QualifiedParamList;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.*;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.param.ParameterUtil;
|
||||
|
@ -928,7 +925,12 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
}
|
||||
|
||||
return mySearchCoordinatorSvc.registerSearch(this, theParams, getResourceName());
|
||||
CacheControlDirective cacheControlDirective = new CacheControlDirective();
|
||||
if (theRequestDetails != null) {
|
||||
cacheControlDirective.parse(theRequestDetails.getHeaders(Constants.HEADER_CACHE_CONTROL));
|
||||
}
|
||||
|
||||
return mySearchCoordinatorSvc.registerSearch(this, theParams, getResourceName(), cacheControlDirective);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -107,6 +107,7 @@ public class DaoConfig {
|
|||
private Set<String> myTreatBaseUrlsAsLocal = new HashSet<String>();
|
||||
private Set<String> myTreatReferencesAsLogical = new HashSet<String>(DEFAULT_LOGICAL_BASE_URLS);
|
||||
private boolean myAutoCreatePlaceholderReferenceTargets;
|
||||
private Integer myCacheControlNoStoreMaxResultsUpperLimit = 1000;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
|
@ -131,6 +132,26 @@ public class DaoConfig {
|
|||
myTreatReferencesAsLogical.add(theTreatReferencesAsLogical);
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the highest number that a client is permitted to use in a
|
||||
* <code>Cache-Control: nostore, max-results=NNN</code>
|
||||
* directive. If the client tries to exceed this limit, the
|
||||
* request will be denied. Defaults to 1000.
|
||||
*/
|
||||
public Integer getCacheControlNoStoreMaxResultsUpperLimit() {
|
||||
return myCacheControlNoStoreMaxResultsUpperLimit;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the highest number that a client is permitted to use in a
|
||||
* <code>Cache-Control: nostore, max-results=NNN</code>
|
||||
* directive. If the client tries to exceed this limit, the
|
||||
* request will be denied. Defaults to 1000.
|
||||
*/
|
||||
public void setCacheControlNoStoreMaxResultsUpperLimit(Integer theCacheControlNoStoreMaxResults) {
|
||||
myCacheControlNoStoreMaxResultsUpperLimit = theCacheControlNoStoreMaxResults;
|
||||
}
|
||||
|
||||
/**
|
||||
* When a code system is added that contains more than this number of codes,
|
||||
* the code system will be indexed later in an incremental process in order to
|
||||
|
@ -336,8 +357,11 @@ public class DaoConfig {
|
|||
/**
|
||||
* This may be used to optionally register server interceptors directly against the DAOs.
|
||||
*/
|
||||
public void setInterceptors(List<IServerInterceptor> theInterceptors) {
|
||||
myInterceptors = theInterceptors;
|
||||
public void setInterceptors(IServerInterceptor... theInterceptor) {
|
||||
setInterceptors(new ArrayList<IServerInterceptor>());
|
||||
if (theInterceptor != null && theInterceptor.length != 0) {
|
||||
getInterceptors().addAll(Arrays.asList(theInterceptor));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -434,6 +458,11 @@ public class DaoConfig {
|
|||
* This approach can improve performance, especially under heavy load, but can also mean that
|
||||
* searches may potentially return slightly out-of-date results.
|
||||
* </p>
|
||||
* <p>
|
||||
* Note that if this is set to a non-null value, clients may override this setting by using
|
||||
* the <code>Cache-Control</code> header. If this is set to <code>null</code>, the Cache-Control
|
||||
* header will be ignored.
|
||||
* </p>
|
||||
*/
|
||||
public Long getReuseCachedSearchResultsForMillis() {
|
||||
return myReuseCachedSearchResultsForMillis;
|
||||
|
@ -449,6 +478,11 @@ public class DaoConfig {
|
|||
* This approach can improve performance, especially under heavy load, but can also mean that
|
||||
* searches may potentially return slightly out-of-date results.
|
||||
* </p>
|
||||
* <p>
|
||||
* Note that if this is set to a non-null value, clients may override this setting by using
|
||||
* the <code>Cache-Control</code> header. If this is set to <code>null</code>, the Cache-Control
|
||||
* header will be ignored.
|
||||
* </p>
|
||||
*/
|
||||
public void setReuseCachedSearchResultsForMillis(Long theReuseCachedSearchResultsForMillis) {
|
||||
myReuseCachedSearchResultsForMillis = theReuseCachedSearchResultsForMillis;
|
||||
|
@ -925,11 +959,8 @@ public class DaoConfig {
|
|||
/**
|
||||
* This may be used to optionally register server interceptors directly against the DAOs.
|
||||
*/
|
||||
public void setInterceptors(IServerInterceptor... theInterceptor) {
|
||||
setInterceptors(new ArrayList<IServerInterceptor>());
|
||||
if (theInterceptor != null && theInterceptor.length != 0) {
|
||||
getInterceptors().addAll(Arrays.asList(theInterceptor));
|
||||
}
|
||||
public void setInterceptors(List<IServerInterceptor> theInterceptors) {
|
||||
myInterceptors = theInterceptors;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -65,7 +65,7 @@ public class FhirResourceDaoPatientDstu2 extends FhirResourceDaoDstu2<Patient>im
|
|||
paramMap.setLoadSynchronous(true);
|
||||
}
|
||||
|
||||
return mySearchCoordinatorSvc.registerSearch(this, paramMap, getResourceName());
|
||||
return mySearchCoordinatorSvc.registerSearch(this, paramMap, getResourceName(), new CacheControlDirective().parse(theRequestDetails.getHeaders(Constants.HEADER_CACHE_CONTROL)));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.util.Collections;
|
|||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
|
||||
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||
import org.hl7.fhir.dstu3.model.Patient;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
|
@ -66,7 +67,7 @@ public class FhirResourceDaoPatientDstu3 extends FhirResourceDaoDstu3<Patient>im
|
|||
paramMap.setLoadSynchronous(true);
|
||||
}
|
||||
|
||||
return mySearchCoordinatorSvc.registerSearch(this, paramMap, getResourceName());
|
||||
return mySearchCoordinatorSvc.registerSearch(this, paramMap, getResourceName(), new CacheControlDirective().parse(theRequestDetails.getHeaders(Constants.HEADER_CACHE_CONTROL)));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.util.Collections;
|
|||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
|
||||
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
|
@ -66,7 +67,7 @@ public class FhirResourceDaoPatientR4 extends FhirResourceDaoR4<Patient>implemen
|
|||
paramMap.setLoadSynchronous(true);
|
||||
}
|
||||
|
||||
return mySearchCoordinatorSvc.registerSearch(this, paramMap, getResourceName());
|
||||
return mySearchCoordinatorSvc.registerSearch(this, paramMap, getResourceName(), new CacheControlDirective().parse(theRequestDetails.getHeaders(Constants.HEADER_CACHE_CONTROL)));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -20,18 +20,19 @@ package ca.uhn.fhir.jpa.search;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public interface ISearchCoordinatorSvc {
|
||||
|
||||
List<Long> getResources(String theUuid, int theFrom, int theTo);
|
||||
|
||||
IBundleProvider registerSearch(IDao theCallingDao, SearchParameterMap theParams, String theResourceType);
|
||||
|
||||
void cancelAllActiveSearches();
|
||||
|
||||
List<Long> getResources(String theUuid, int theFrom, int theTo);
|
||||
|
||||
IBundleProvider registerSearch(IDao theCallingDao, SearchParameterMap theParams, String theResourceType, CacheControlDirective theCacheControlDirective);
|
||||
|
||||
}
|
||||
|
|
|
@ -24,6 +24,8 @@ import java.util.concurrent.*;
|
|||
|
||||
import javax.persistence.EntityManager;
|
||||
|
||||
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import org.apache.commons.lang3.ObjectUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.exception.ExceptionUtils;
|
||||
|
@ -55,7 +57,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
public static final int DEFAULT_SYNC_SIZE = 250;
|
||||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SearchCoordinatorSvcImpl.class);
|
||||
|
||||
private final ConcurrentHashMap<String, SearchTask> myIdToSearchTask = new ConcurrentHashMap<String, SearchTask>();
|
||||
@Autowired
|
||||
private FhirContext myContext;
|
||||
@Autowired
|
||||
|
@ -63,7 +65,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
@Autowired
|
||||
private EntityManager myEntityManager;
|
||||
private ExecutorService myExecutor;
|
||||
private final ConcurrentHashMap<String, SearchTask> myIdToSearchTask = new ConcurrentHashMap<String, SearchTask>();
|
||||
private Integer myLoadingThrottleForUnitTests = null;
|
||||
private long myMaxMillisToWaitForRemoteResults = DateUtils.MILLIS_PER_MINUTE;
|
||||
private boolean myNeverUseLocalSearchForUnitTests;
|
||||
|
@ -179,7 +180,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
}
|
||||
|
||||
@Override
|
||||
public IBundleProvider registerSearch(final IDao theCallingDao, final SearchParameterMap theParams, String theResourceType) {
|
||||
public IBundleProvider registerSearch(final IDao theCallingDao, final SearchParameterMap theParams, String theResourceType, CacheControlDirective theCacheControlDirective) {
|
||||
StopWatch w = new StopWatch();
|
||||
final String searchUuid = UUID.randomUUID().toString();
|
||||
|
||||
|
@ -187,7 +188,21 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
final ISearchBuilder sb = theCallingDao.newSearchBuilder();
|
||||
sb.setType(resourceTypeClass, theResourceType);
|
||||
|
||||
if (theParams.isLoadSynchronous()) {
|
||||
final Integer loadSynchronousUpTo;
|
||||
if (theCacheControlDirective != null && theCacheControlDirective.isNoStore()) {
|
||||
if (theCacheControlDirective.getMaxResults() != null) {
|
||||
loadSynchronousUpTo = theCacheControlDirective.getMaxResults();
|
||||
if (loadSynchronousUpTo > myDaoConfig.getCacheControlNoStoreMaxResultsUpperLimit()) {
|
||||
throw new InvalidRequestException(Constants.HEADER_CACHE_CONTROL + " header " + Constants.CACHE_CONTROL_MAX_RESULTS + " value must not exceed " + myDaoConfig.getCacheControlNoStoreMaxResultsUpperLimit());
|
||||
}
|
||||
} else {
|
||||
loadSynchronousUpTo = 100;
|
||||
}
|
||||
} else {
|
||||
loadSynchronousUpTo = null;
|
||||
}
|
||||
|
||||
if (theParams.isLoadSynchronous() || loadSynchronousUpTo != null) {
|
||||
|
||||
// Execute the query and make sure we return distinct results
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myManagedTxManager);
|
||||
|
@ -202,6 +217,9 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
Iterator<Long> resultIter = sb.createQuery(theParams, searchUuid);
|
||||
while (resultIter.hasNext()) {
|
||||
pids.add(resultIter.next());
|
||||
if (loadSynchronousUpTo != null && pids.size() >= loadSynchronousUpTo) {
|
||||
break;
|
||||
}
|
||||
if (theParams.getLoadSynchronousUpTo() != null && pids.size() >= theParams.getLoadSynchronousUpTo()) {
|
||||
break;
|
||||
}
|
||||
|
@ -231,9 +249,13 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
* See if there are any cached searches whose results we can return
|
||||
* instead
|
||||
*/
|
||||
boolean useCache = true;
|
||||
if (theCacheControlDirective != null && theCacheControlDirective.isNoCache() == true) {
|
||||
useCache = false;
|
||||
}
|
||||
final String queryString = theParams.toNormalizedQueryString(myContext);
|
||||
if (theParams.getEverythingMode() == null) {
|
||||
if (myDaoConfig.getReuseCachedSearchResultsForMillis() != null) {
|
||||
if (myDaoConfig.getReuseCachedSearchResultsForMillis() != null && useCache) {
|
||||
|
||||
final Date createdCutoff = new Date(System.currentTimeMillis() - myDaoConfig.getReuseCachedSearchResultsForMillis());
|
||||
final String resourceType = theResourceType;
|
||||
|
@ -394,16 +416,16 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
|
||||
public class SearchTask implements Callable<Void> {
|
||||
|
||||
private boolean myAbortRequested;
|
||||
private final IDao myCallingDao;
|
||||
private final CountDownLatch myCompletionLatch;
|
||||
private int myCountSaved = 0;
|
||||
private final CountDownLatch myInitialCollectionLatch = new CountDownLatch(1);
|
||||
private final SearchParameterMap myParams;
|
||||
private final String myResourceType;
|
||||
private final Search mySearch;
|
||||
private final ArrayList<Long> mySyncedPids = new ArrayList<Long>();
|
||||
private final ArrayList<Long> myUnsyncedPids = new ArrayList<Long>();
|
||||
private boolean myAbortRequested;
|
||||
private int myCountSaved = 0;
|
||||
private String mySearchUuid;
|
||||
|
||||
public SearchTask(Search theSearch, IDao theCallingDao, SearchParameterMap theParams, String theResourceType, String theSearchUuid) {
|
||||
|
|
|
@ -0,0 +1,164 @@
|
|||
package ca.uhn.fhir.jpa.provider.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl;
|
||||
import ca.uhn.fhir.parser.StrictErrorHandler;
|
||||
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Test;
|
||||
import org.springframework.test.util.AopTestUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class ResourceProviderR4CacheTest extends BaseResourceProviderR4Test {
|
||||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResourceProviderR4CacheTest.class);
|
||||
private SearchCoordinatorSvcImpl mySearchCoordinatorSvcRaw;
|
||||
|
||||
@Override
|
||||
@After
|
||||
public void after() throws Exception {
|
||||
super.after();
|
||||
myDaoConfig.setReuseCachedSearchResultsForMillis(new DaoConfig().getReuseCachedSearchResultsForMillis());
|
||||
myDaoConfig.setCacheControlNoStoreMaxResultsUpperLimit(new DaoConfig().getCacheControlNoStoreMaxResultsUpperLimit());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void before() throws Exception {
|
||||
super.before();
|
||||
myFhirCtx.setParserErrorHandler(new StrictErrorHandler());
|
||||
mySearchCoordinatorSvcRaw = AopTestUtils.getTargetObject(mySearchCoordinatorSvc);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCacheNoStore() throws IOException {
|
||||
|
||||
Patient pt1 = new Patient();
|
||||
pt1.addName().setFamily("FAM");
|
||||
ourClient.create().resource(pt1).execute();
|
||||
|
||||
Bundle results = ourClient
|
||||
.search()
|
||||
.forResource("Patient")
|
||||
.where(Patient.FAMILY.matches().value("FAM"))
|
||||
.returnBundle(Bundle.class)
|
||||
.cacheControl(new CacheControlDirective().setNoStore(true))
|
||||
.execute();
|
||||
assertEquals(1, results.getEntry().size());
|
||||
assertEquals(0, mySearchEntityDao.count());
|
||||
|
||||
Patient pt2 = new Patient();
|
||||
pt2.addName().setFamily("FAM");
|
||||
ourClient.create().resource(pt2).execute();
|
||||
|
||||
results = ourClient
|
||||
.search()
|
||||
.forResource("Patient")
|
||||
.where(Patient.FAMILY.matches().value("FAM"))
|
||||
.returnBundle(Bundle.class)
|
||||
.cacheControl(new CacheControlDirective().setNoStore(true))
|
||||
.execute();
|
||||
assertEquals(2, results.getEntry().size());
|
||||
assertEquals(0, mySearchEntityDao.count());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCacheNoStoreMaxResults() throws IOException {
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
Patient pt1 = new Patient();
|
||||
pt1.addName().setFamily("FAM" + i);
|
||||
ourClient.create().resource(pt1).execute();
|
||||
}
|
||||
|
||||
Bundle results = ourClient
|
||||
.search()
|
||||
.forResource("Patient")
|
||||
.where(Patient.FAMILY.matches().value("FAM"))
|
||||
.returnBundle(Bundle.class)
|
||||
.cacheControl(new CacheControlDirective().setNoStore(true).setMaxResults(5))
|
||||
.execute();
|
||||
assertEquals(5, results.getEntry().size());
|
||||
assertEquals(0, mySearchEntityDao.count());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCacheNoStoreMaxResultsWithIllegalValue() throws IOException {
|
||||
myDaoConfig.setCacheControlNoStoreMaxResultsUpperLimit(123);
|
||||
try {
|
||||
ourClient
|
||||
.search()
|
||||
.forResource("Patient")
|
||||
.where(Patient.FAMILY.matches().value("FAM"))
|
||||
.returnBundle(Bundle.class)
|
||||
.cacheControl(new CacheControlDirective().setNoStore(true).setMaxResults(5000))
|
||||
.execute();
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("HTTP 400 Bad Request: Cache-Control header max-results value must not exceed 123", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCacheSuppressed() throws IOException {
|
||||
|
||||
Patient pt1 = new Patient();
|
||||
pt1.addName().setFamily("FAM");
|
||||
ourClient.create().resource(pt1).execute();
|
||||
|
||||
Bundle results = ourClient.search().forResource("Patient").where(Patient.FAMILY.matches().value("FAM")).returnBundle(Bundle.class).execute();
|
||||
assertEquals(1, results.getEntry().size());
|
||||
assertEquals(1, mySearchEntityDao.count());
|
||||
|
||||
Patient pt2 = new Patient();
|
||||
pt2.addName().setFamily("FAM");
|
||||
ourClient.create().resource(pt2).execute();
|
||||
|
||||
results = ourClient
|
||||
.search()
|
||||
.forResource("Patient")
|
||||
.where(Patient.FAMILY.matches().value("FAM"))
|
||||
.returnBundle(Bundle.class)
|
||||
.cacheControl(new CacheControlDirective().setNoCache(true))
|
||||
.execute();
|
||||
assertEquals(2, results.getEntry().size());
|
||||
assertEquals(2, mySearchEntityDao.count());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCacheUsedNormally() throws IOException {
|
||||
|
||||
Patient pt1 = new Patient();
|
||||
pt1.addName().setFamily("FAM");
|
||||
ourClient.create().resource(pt1).execute();
|
||||
|
||||
Bundle results = ourClient.search().forResource("Patient").where(Patient.FAMILY.matches().value("FAM")).returnBundle(Bundle.class).execute();
|
||||
assertEquals(1, results.getEntry().size());
|
||||
assertEquals(1, mySearchEntityDao.count());
|
||||
|
||||
Patient pt2 = new Patient();
|
||||
pt2.addName().setFamily("FAM");
|
||||
ourClient.create().resource(pt2).execute();
|
||||
|
||||
results = ourClient.search().forResource("Patient").where(Patient.FAMILY.matches().value("FAM")).returnBundle(Bundle.class).execute();
|
||||
assertEquals(1, results.getEntry().size());
|
||||
assertEquals(1, mySearchEntityDao.count());
|
||||
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,41 +1,60 @@
|
|||
package ca.uhn.fhir.jpa.search;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Matchers.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.junit.*;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.*;
|
||||
import org.mockito.invocation.InvocationOnMock;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.mockito.stubbing.Answer;
|
||||
import org.springframework.data.domain.*;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.dao.*;
|
||||
import ca.uhn.fhir.jpa.dao.data.*;
|
||||
import ca.uhn.fhir.jpa.entity.*;
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
|
||||
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchIncludeDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.entity.SearchResult;
|
||||
import ca.uhn.fhir.jpa.entity.SearchStatusEnum;
|
||||
import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
|
||||
import ca.uhn.fhir.jpa.util.BaseIterator;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Patient;
|
||||
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Captor;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.invocation.InvocationOnMock;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.mockito.stubbing.Answer;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.PageImpl;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
|
||||
@SuppressWarnings({ "unchecked" })
|
||||
import javax.persistence.EntityManager;
|
||||
import java.util.*;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Matchers.anyBoolean;
|
||||
import static org.mockito.Matchers.eq;
|
||||
import static org.mockito.Matchers.same;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class SearchCoordinatorSvcImplTest {
|
||||
|
||||
private static FhirContext ourCtx = FhirContext.forDstu3();
|
||||
@Captor
|
||||
ArgumentCaptor<Iterable<SearchResult>> mySearchResultIterCaptor;
|
||||
@Mock
|
||||
private IDao myCallingDao;
|
||||
@Mock
|
||||
|
@ -49,10 +68,6 @@ public class SearchCoordinatorSvcImplTest {
|
|||
private ISearchIncludeDao mySearchIncludeDao;
|
||||
@Mock
|
||||
private ISearchResultDao mySearchResultDao;
|
||||
@Captor
|
||||
ArgumentCaptor<Iterable<SearchResult>> mySearchResultIterCaptor;
|
||||
|
||||
|
||||
private SearchCoordinatorSvcImpl mySvc;
|
||||
|
||||
@Mock
|
||||
|
@ -63,9 +78,10 @@ public class SearchCoordinatorSvcImplTest {
|
|||
public void after() {
|
||||
verify(myCallingDao, atMost(myExpectedNumberOfSearchBuildersCreated)).newSearchBuilder();
|
||||
}
|
||||
|
||||
@Before
|
||||
public void before() {
|
||||
|
||||
|
||||
mySvc = new SearchCoordinatorSvcImpl();
|
||||
mySvc.setEntityManagerForUnitTest(myEntityManager);
|
||||
mySvc.setTransactionManagerForUnitTest(myTxManager);
|
||||
|
@ -76,9 +92,9 @@ public class SearchCoordinatorSvcImplTest {
|
|||
|
||||
myDaoConfig = new DaoConfig();
|
||||
mySvc.setDaoConfigForUnitTest(myDaoConfig);
|
||||
|
||||
|
||||
when(myCallingDao.newSearchBuilder()).thenReturn(mySearchBuider);
|
||||
|
||||
|
||||
doAnswer(new Answer<Void>() {
|
||||
@Override
|
||||
public Void answer(InvocationOnMock theInvocation) throws Throwable {
|
||||
|
@ -89,7 +105,8 @@ public class SearchCoordinatorSvcImplTest {
|
|||
provider.setEntityManager(myEntityManager);
|
||||
provider.setContext(ourCtx);
|
||||
return null;
|
||||
}}).when(myCallingDao).injectDependenciesIntoBundleProvider(any(PersistedJpaBundleProvider.class));
|
||||
}
|
||||
}).when(myCallingDao).injectDependenciesIntoBundleProvider(any(PersistedJpaBundleProvider.class));
|
||||
}
|
||||
|
||||
private List<Long> createPidSequence(int from, int to) {
|
||||
|
@ -128,7 +145,7 @@ public class SearchCoordinatorSvcImplTest {
|
|||
|
||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(List.class), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
||||
|
||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient");
|
||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective());
|
||||
assertNotNull(result.getUuid());
|
||||
assertEquals(null, result.size());
|
||||
|
||||
|
@ -151,12 +168,12 @@ public class SearchCoordinatorSvcImplTest {
|
|||
|
||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(List.class), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
||||
|
||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient");
|
||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective());
|
||||
assertNotNull(result.getUuid());
|
||||
assertEquals(null, result.size());
|
||||
|
||||
List<IBaseResource> resources;
|
||||
|
||||
|
||||
resources = result.getResources(0, 100000);
|
||||
assertEquals(790, resources.size());
|
||||
assertEquals("10", resources.get(0).getIdElement().getValueAsString());
|
||||
|
@ -164,18 +181,18 @@ public class SearchCoordinatorSvcImplTest {
|
|||
|
||||
ArgumentCaptor<Search> searchCaptor = ArgumentCaptor.forClass(Search.class);
|
||||
verify(mySearchDao, atLeastOnce()).save(searchCaptor.capture());
|
||||
|
||||
|
||||
verify(mySearchResultDao, atLeastOnce()).save(mySearchResultIterCaptor.capture());
|
||||
List<SearchResult> allResults= new ArrayList<SearchResult>();
|
||||
List<SearchResult> allResults = new ArrayList<SearchResult>();
|
||||
for (Iterable<SearchResult> next : mySearchResultIterCaptor.getAllValues()) {
|
||||
allResults.addAll(Lists.newArrayList(next));
|
||||
}
|
||||
|
||||
|
||||
assertEquals(790, allResults.size());
|
||||
assertEquals(10, allResults.get(0).getResourcePid().longValue());
|
||||
assertEquals(799, allResults.get(789).getResourcePid().longValue());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testAsyncSearchLargeResultSetSameCoordinator() {
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
|
@ -187,12 +204,12 @@ public class SearchCoordinatorSvcImplTest {
|
|||
|
||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(List.class), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
||||
|
||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient");
|
||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective());
|
||||
assertNotNull(result.getUuid());
|
||||
assertEquals(null, result.size());
|
||||
|
||||
List<IBaseResource> resources;
|
||||
|
||||
|
||||
resources = result.getResources(0, 30);
|
||||
assertEquals(30, resources.size());
|
||||
assertEquals("10", resources.get(0).getIdElement().getValueAsString());
|
||||
|
@ -202,7 +219,7 @@ public class SearchCoordinatorSvcImplTest {
|
|||
|
||||
/**
|
||||
* Subsequent requests for the same search (i.e. a request for the next
|
||||
* page) within the same JVM will not use the original bundle provider
|
||||
* page) within the same JVM will not use the original bundle provider
|
||||
*/
|
||||
@Test
|
||||
public void testAsyncSearchLargeResultSetSecondRequestSameCoordinator() {
|
||||
|
@ -215,7 +232,7 @@ public class SearchCoordinatorSvcImplTest {
|
|||
|
||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(List.class), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
||||
|
||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient");
|
||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective());
|
||||
assertNotNull(result.getUuid());
|
||||
assertEquals(null, result.size());
|
||||
|
||||
|
@ -223,10 +240,10 @@ public class SearchCoordinatorSvcImplTest {
|
|||
verify(mySearchDao, atLeast(1)).save(searchCaptor.capture());
|
||||
Search search = searchCaptor.getValue();
|
||||
assertEquals(SearchTypeEnum.SEARCH, search.getSearchType());
|
||||
|
||||
|
||||
List<IBaseResource> resources;
|
||||
PersistedJpaBundleProvider provider;
|
||||
|
||||
|
||||
resources = result.getResources(0, 10);
|
||||
assertNull(result.size());
|
||||
assertEquals(10, resources.size());
|
||||
|
@ -244,7 +261,7 @@ public class SearchCoordinatorSvcImplTest {
|
|||
assertEquals(10, resources.size());
|
||||
assertEquals("20", resources.get(0).getIdElement().getValueAsString());
|
||||
assertEquals("29", resources.get(9).getIdElement().getValueAsString());
|
||||
|
||||
|
||||
provider = new PersistedJpaBundleProvider(result.getUuid(), myCallingDao);
|
||||
resources = provider.getResources(20, 99999);
|
||||
assertEquals(770, resources.size());
|
||||
|
@ -265,7 +282,7 @@ public class SearchCoordinatorSvcImplTest {
|
|||
|
||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(List.class), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
||||
|
||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient");
|
||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective());
|
||||
assertNotNull(result.getUuid());
|
||||
assertEquals(90, result.size().intValue());
|
||||
|
||||
|
@ -285,18 +302,18 @@ public class SearchCoordinatorSvcImplTest {
|
|||
@Test
|
||||
public void testLoadSearchResultsFromDifferentCoordinator() {
|
||||
final String uuid = UUID.randomUUID().toString();
|
||||
|
||||
|
||||
final Search search = new Search();
|
||||
search.setUuid(uuid);
|
||||
search.setSearchType(SearchTypeEnum.SEARCH);
|
||||
search.setResourceType("Patient");
|
||||
|
||||
|
||||
when(mySearchDao.findByUuid(eq(uuid))).thenReturn(search);
|
||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(List.class), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
||||
|
||||
PersistedJpaBundleProvider provider;
|
||||
List<IBaseResource> resources;
|
||||
|
||||
|
||||
new Thread() {
|
||||
@Override
|
||||
public void run() {
|
||||
|
@ -305,20 +322,21 @@ public class SearchCoordinatorSvcImplTest {
|
|||
} catch (InterruptedException e) {
|
||||
// ignore
|
||||
}
|
||||
|
||||
|
||||
when(mySearchResultDao.findWithSearchUuid(any(Search.class), any(Pageable.class))).thenAnswer(new Answer<Page<SearchResult>>() {
|
||||
@Override
|
||||
public Page<SearchResult> answer(InvocationOnMock theInvocation) throws Throwable {
|
||||
Pageable page = (Pageable) theInvocation.getArguments()[1];
|
||||
|
||||
|
||||
ArrayList<SearchResult> results = new ArrayList<SearchResult>();
|
||||
int max = (page.getPageNumber() * page.getPageSize()) + page.getPageSize();
|
||||
for (int i = page.getOffset(); i < max; i++) {
|
||||
results.add(new SearchResult().setResourcePid(i + 10L));
|
||||
}
|
||||
|
||||
|
||||
return new PageImpl<SearchResult>(results);
|
||||
}});
|
||||
}
|
||||
});
|
||||
search.setStatus(SearchStatusEnum.FINISHED);
|
||||
}
|
||||
}.start();
|
||||
|
@ -332,7 +350,7 @@ public class SearchCoordinatorSvcImplTest {
|
|||
assertEquals(10, resources.size());
|
||||
assertEquals("20", resources.get(0).getIdElement().getValueAsString());
|
||||
assertEquals("29", resources.get(9).getIdElement().getValueAsString());
|
||||
|
||||
|
||||
provider = new PersistedJpaBundleProvider(uuid, myCallingDao);
|
||||
resources = provider.getResources(20, 40);
|
||||
assertEquals(20, resources.size());
|
||||
|
@ -353,7 +371,7 @@ public class SearchCoordinatorSvcImplTest {
|
|||
|
||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(eq(pids), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
||||
|
||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient");
|
||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective());
|
||||
assertNull(result.getUuid());
|
||||
assertEquals(790, result.size().intValue());
|
||||
|
||||
|
@ -375,7 +393,7 @@ public class SearchCoordinatorSvcImplTest {
|
|||
pids = createPidSequence(10, 110);
|
||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(eq(pids), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
||||
|
||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient");
|
||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective());
|
||||
assertNull(result.getUuid());
|
||||
assertEquals(100, result.size().intValue());
|
||||
|
||||
|
@ -394,7 +412,7 @@ public class SearchCoordinatorSvcImplTest {
|
|||
|
||||
private int myCount;
|
||||
private Iterator<T> myWrap;
|
||||
|
||||
|
||||
public FailAfterNIterator(Iterator<T> theWrap, int theCount) {
|
||||
myWrap = theWrap;
|
||||
myCount = theCount;
|
||||
|
@ -416,7 +434,7 @@ public class SearchCoordinatorSvcImplTest {
|
|||
|
||||
}
|
||||
|
||||
|
||||
|
||||
public static class SlowIterator<T> extends BaseIterator<T> implements Iterator<T> {
|
||||
|
||||
private int myDelay;
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -32,6 +32,16 @@
|
|||
a search URL longer than 255 characters caused a mysterious failure. Thanks to
|
||||
Chris Schuler and Bryn Rhodes for all of their help in reproducing this issue.
|
||||
</action>
|
||||
<action type="add">
|
||||
JPA server now supports the use of the
|
||||
<![CDATA[<code>Cache-Control</code>]]>
|
||||
header in order to allow the client to selectively disable the
|
||||
search result cache. This directive can also be used to disable result paging
|
||||
and return results faster when only a small number of results is needed.
|
||||
See the
|
||||
<![CDATA[<a href="http://hapifhir.io/doc_jpa.html">JPA Page</a>]]>
|
||||
for more information.
|
||||
</action>
|
||||
</release>
|
||||
<release version="3.0.0" date="2017-09-27">
|
||||
<action type="add">
|
||||
|
|
|
@ -1,315 +1,372 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<document xmlns="http://maven.apache.org/XDOC/2.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/XDOC/2.0 http://maven.apache.org/xsd/xdoc-2.0.xsd">
|
||||
|
||||
<properties>
|
||||
<title>JPA Server</title>
|
||||
<author email="jamesagnew@users.sourceforge.net">James Agnew</author>
|
||||
</properties>
|
||||
|
||||
<body>
|
||||
|
||||
<section name="JPA Server">
|
||||
|
||||
<p>
|
||||
The HAPI FHIR
|
||||
<a href="./doc_rest_server.html">RestfulServer</a>
|
||||
module can be used to create a FHIR server endpoint against an arbitrary
|
||||
data source, which could be a database of your own design, an existing
|
||||
clinical system, a set of files, or anything else you come up with.
|
||||
</p>
|
||||
<p>
|
||||
HAPI also provides a persistence module which can be used to
|
||||
provide a complete RESTful server implementation, backed by a database of
|
||||
your choosing. This module uses the <a href="http://en.wikipedia.org/wiki/Java_Persistence_API">JPA 2.0</a>
|
||||
API to store data in a database without depending on any specific database technology.
|
||||
</p>
|
||||
<p>
|
||||
<b>Important Note: </b>
|
||||
This implementation uses a fairly simple table design, with a
|
||||
single table being used to hold resource bodies (which are stored as
|
||||
CLOBs, optionally GZipped to save space) and a set of tables to hold search indexes, tags,
|
||||
history details, etc. This design is only one of many possible ways
|
||||
of designing a FHIR server so it is worth considering whether it
|
||||
is appropriate for the problem you are trying to solve.
|
||||
</p>
|
||||
|
||||
<subsection name="Getting Started">
|
||||
|
||||
<p>
|
||||
The easiest way to get started with HAPI's JPA server module is
|
||||
to begin with the example project. There is a complete sample project
|
||||
found in our GitHub repo here: <a href="https://github.com/jamesagnew/hapi-fhir/tree/master/hapi-fhir-jpaserver-example">hapi-fhir-jpaserver-example</a>
|
||||
</p>
|
||||
|
||||
<p>
|
||||
This example is a fully contained FHIR server, supporting all standard operations (read/create/delete/etc).
|
||||
It bundles an embedded instance of the <a href="http://db.apache.org/derby/">Apache Derby</a> Java database
|
||||
so that the server can run without depending on any external database, but it can also be
|
||||
configured to use an installation of Oracle, Postgres, etc.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
To take this project for a spin, check out the sources from GitHib (or download a snapshot),
|
||||
and then build the project:
|
||||
</p>
|
||||
|
||||
<source><![CDATA[$ cd hapi-fhir-jpaserver-example
|
||||
$ mvn install]]></source>
|
||||
|
||||
<p>
|
||||
You now have two options for starting the server:
|
||||
</p>
|
||||
<ul>
|
||||
<li>
|
||||
<b>Deploy to Tomcat/JBoss/Websphere/etc: </b> You will now have a file
|
||||
in your <code>target</code> directory called <code>hapi-fhir-jpaserver-example.war</code>.
|
||||
This WAR file can be deployed to any Servlet container, at which point you could
|
||||
access the server by pointing your browser at a URL similar to the following
|
||||
(you may need to adjust the
|
||||
port depending on which port your container is configured to listen on):
|
||||
<a href="http://localhost:8080/hapi-fhir-jpaserver-example/">http://localhost:8080/hapi-fhir-jpaserver-example/</a>
|
||||
</li>
|
||||
<li>
|
||||
<b>Run with Maven and Embedded Jetty: </b> To start the server
|
||||
directly within Maven, you can execute the following command:<br/>
|
||||
<source>$ mvn jetty:run</source>
|
||||
You can then access the server by pointing your browser at the following URL:
|
||||
<a href="http://localhost:8080/hapi-fhir-jpaserver-example/">http://localhost:8080/hapi-fhir-jpaserver-example/</a>
|
||||
</li>
|
||||
</ul>
|
||||
</subsection>
|
||||
</section>
|
||||
|
||||
<section name="Configuring The JPA Server">
|
||||
|
||||
<p>
|
||||
The JPA server is configured through a series of configuration files, most
|
||||
of which are documented inline.
|
||||
</p>
|
||||
<ul>
|
||||
<li>
|
||||
<a href="https://github.com/jamesagnew/hapi-fhir/blob/master/hapi-fhir-jpaserver-example/src/main/java/ca/uhn/fhir/jpa/demo/FhirServerConfig.java"><b>FhirServerConfig.java</b></a>:
|
||||
Configures the database connection settings
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
</section>
|
||||
|
||||
<section name="DaoConfig">
|
||||
|
||||
<p>
|
||||
The Spring confguration contains a definition for a bean called <code>daoConfig</code>,
|
||||
which will look something like the following:
|
||||
</p>
|
||||
<source><![CDATA[@Bean()
|
||||
public DaoConfig daoConfig() {
|
||||
DaoConfig retVal = new DaoConfig();
|
||||
retVal.setAllowMultipleDelete(true);
|
||||
retVal.setAllowInlineMatchUrlReferences(true);
|
||||
return retVal;
|
||||
}]]></source>
|
||||
|
||||
<p>
|
||||
You can use this method to change various configuration settings on the DaoConfig bean
|
||||
which define the way that the JPA server will behave.
|
||||
See the <a href="./apidocs-jpaserver/ca/uhn/fhir/jpa/dao/DaoConfig.html">DaoConfig JavaDoc</a>
|
||||
for information about the available settings.
|
||||
</p>
|
||||
|
||||
<subsection name="External/Absolute Resource References">
|
||||
|
||||
<p>
|
||||
Clients may sometimes post resources to your server that contain
|
||||
absolute resource references. For example, consider the following resource:
|
||||
</p>
|
||||
<source><![CDATA[<Patient xmlns="http://hl7.org/fhir">
|
||||
<id value="patient-infant-01"/>
|
||||
<name>
|
||||
<use value="official"/>
|
||||
<family value="Miller"/>
|
||||
<given value="Samuel"/>
|
||||
</name>
|
||||
<managingOrganization>
|
||||
<reference value="http://example.com/fhir/Organization/123"/>
|
||||
</managingOrganization>
|
||||
</Patient>]]></source>
|
||||
|
||||
<p>
|
||||
By default, the server will reject this reference, as only
|
||||
local references are permitted by the server. This can be changed
|
||||
however.
|
||||
</p>
|
||||
<p>
|
||||
If you want the server to recognize that this URL is actually a local
|
||||
reference (i.e. because the server will be deployed to the base URL
|
||||
<code>http://example.com/fhir/</code>) you can
|
||||
configure the server to recognize this URL via the following DaoConfig
|
||||
setting:
|
||||
</p>
|
||||
<source><![CDATA[@Bean()
|
||||
public DaoConfig daoConfig() {
|
||||
DaoConfig retVal = new DaoConfig();
|
||||
// ... other config ...
|
||||
retVal.getTreatBaseUrlsAsLocal().add("http://example.com/fhir/");
|
||||
return retVal;
|
||||
}]]></source>
|
||||
|
||||
<p>
|
||||
On the other hand, if you want the server to be configurable to
|
||||
allow remote references, you can set this with the confguration below.
|
||||
Using the <code>setAllowExternalReferences</code> means that
|
||||
it will be possible to search for references that refer to these
|
||||
external references.
|
||||
</p>
|
||||
|
||||
<source><![CDATA[@Bean()
|
||||
public DaoConfig daoConfig() {
|
||||
DaoConfig retVal = new DaoConfig();
|
||||
// Allow external references
|
||||
retVal.setAllowExternalReferences(true);
|
||||
|
||||
// If you are allowing external references, it is recommended to
|
||||
// also tell the server which references actually will be local
|
||||
retVal.getTreatBaseUrlsAsLocal().add("http://mydomain.com/fhir");
|
||||
return retVal;
|
||||
}]]></source>
|
||||
</subsection>
|
||||
|
||||
<subsection name="Logical References">
|
||||
|
||||
<p>
|
||||
In some cases, you may have references which are <i>Logical References</i>,
|
||||
which means that they act as an identifier and not necessarily as a literal
|
||||
web address.
|
||||
</p>
|
||||
<p>
|
||||
A common use for logical references is in references to conformance
|
||||
resources, such as ValueSets, StructureDefinitions, etc. For example,
|
||||
you might refer to the ValueSet
|
||||
<code>http://hl7.org/fhir/ValueSet/quantity-comparator</code>
|
||||
from your own resources. In this case, you are not neccesarily telling
|
||||
the server that this is a real address that it should resolve, but
|
||||
rather that this is an identifier for a ValueSet where
|
||||
<code>ValueSet.url</code> has the given URI/URL.
|
||||
</p>
|
||||
<p>
|
||||
HAPI can be configured to treat certain URI/URL patterns as
|
||||
logical by using the DaoConfig#setTreatReferencesAsLogical property
|
||||
(see <a href="./apidocs-jpaserver/ca/uhn/fhir/jpa/dao/DaoConfig.html#setTreatReferencesAsLogical-java.util.Set-">JavaDoc</a>).
|
||||
For example:
|
||||
</p>
|
||||
<code>
|
||||
// Treat specific URL as logical
|
||||
myDaoConfig.getTreatReferencesAsLogical().add("http://mysystem.com/ValueSet/cats-and-dogs");
|
||||
|
||||
// Treat all references with given prefix as logical
|
||||
myDaoConfig.getTreatReferencesAsLogical().add("http://mysystem.com/mysystem-vs-*");
|
||||
</code>
|
||||
</subsection>
|
||||
|
||||
</section>
|
||||
|
||||
<section name="Architecture">
|
||||
|
||||
<img src="images/jpa_architecture.png" alt="Architecture" align="right"/>
|
||||
|
||||
<p>
|
||||
The HAPI JPA Server has the following components:
|
||||
</p>
|
||||
|
||||
<ul>
|
||||
<li>
|
||||
<b>Resource Providers: </b>
|
||||
A RESTful server <a href="./doc_rest_server.html#resource_providers">Resource Provider</a> is
|
||||
provided for each resource type in a given release of FHIR. Each resource provider implements
|
||||
a
|
||||
<a href="./apidocs/ca/uhn/fhir/rest/annotation/Search.html">@Search</a>
|
||||
method implementing the complete set of search parameters defined in the FHIR
|
||||
specification for the given resource type.<br/><br/>
|
||||
The resource providers also extend a superclass which implements all of the
|
||||
other FHIR methods, such as Read, Create, Delete, etc.<br/><br/>
|
||||
Note that these resource providers are generated as a part of the HAPI build process,
|
||||
so they are not checked into Git. You can see their source
|
||||
in the <a href="./xref-jpaserver/">JXR Report</a>,
|
||||
for example the
|
||||
<a href="./xref-jpaserver/ca/uhn/fhir/jpa/rp/dstu2/PatientResourceProvider.html">PatientResourceProvider</a>.
|
||||
<br/><br/>
|
||||
The resource providers do not actually implement any of the logic
|
||||
in searching, updating, etc. They simply receive the incoming HTTP calls (via the RestfulServer)
|
||||
and pass along the incoming requests to the DAOs.
|
||||
<br/><br/>
|
||||
</li>
|
||||
<li>
|
||||
<b>HAPI DAOs: </b>
|
||||
The DAOs actually implement all of the database business logic relating to
|
||||
the storage, indexing, and retrieval of FHIR resources, using the underlying JPA
|
||||
API.
|
||||
<br/><br/>
|
||||
</li>
|
||||
<li>
|
||||
<b>Hibernate: </b>
|
||||
The HAPI JPA Server uses the JPA library, implemented by Hibernate. No Hibernate
|
||||
specific features are used, so the library should also work with other
|
||||
providers (e.g. Eclipselink) but it is not tested regularly with them.
|
||||
<br/><br/>
|
||||
</li>
|
||||
<li>
|
||||
<b>Database: </b>
|
||||
The RESTful server uses an embedded Derby database, but can be configured to
|
||||
talk to
|
||||
<a href="https://developer.jboss.org/wiki/SupportedDatabases2?_sscc=t">any database supported by Hibernate</a>.
|
||||
</li>
|
||||
|
||||
</ul>
|
||||
|
||||
</section>
|
||||
|
||||
<section name="Additional Information">
|
||||
|
||||
<ul>
|
||||
<li>
|
||||
<a href="https://www.openhealthhub.org/t/hapi-terminology-server-uk-snomed-ct-import/592">This page</a>
|
||||
has information on loading national editions (UK specifically) of SNOMED CT files into
|
||||
the database.
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
</section>
|
||||
|
||||
<!--
|
||||
alter table hfj_res_link ALTER COLUMN "TARGET_RESOURCE_ID" NULL;
|
||||
|
||||
select sp_index_status, count(*) from hfj_resource group by sp_index_status
|
||||
delete from hfj_history_tag where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_res_tag where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_spidx_coords where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_spidx_number where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_spidx_quantity where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_spidx_string where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_spidx_token where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_spidx_uri where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_search_result where resource_pid in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_res_link where src_resource_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_res_link where target_resource_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_subscription where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_subscription_flag_res where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
|
||||
|
||||
delete from trm_concept_pc_link where pid in (select pid from trm_concept where codesystem_pid in (select pid from trm_codesystem_ver where res_id in (select res_id from hfj_resource where sp_index_status = 2)));
|
||||
delete from trm_concept where codesystem_pid in (select pid from trm_codesystem_ver where res_id in (select res_id from hfj_resource where sp_index_status = 2));
|
||||
delete from trm_codesystem_ver where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from trm_codesystem where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
|
||||
update hfj_resource set forced_id_pid = null where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
update hfj_res_ver set forced_id_pid = null where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_forced_id where resource_pid in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_resource where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_res_ver where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
|
||||
|
||||
|
||||
-->
|
||||
|
||||
</body>
|
||||
|
||||
</document>
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<document xmlns="http://maven.apache.org/XDOC/2.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/XDOC/2.0 http://maven.apache.org/xsd/xdoc-2.0.xsd">
|
||||
|
||||
<properties>
|
||||
<title>JPA Server</title>
|
||||
<author email="jamesagnew@users.sourceforge.net">James Agnew</author>
|
||||
</properties>
|
||||
|
||||
<body>
|
||||
|
||||
<section name="JPA Server">
|
||||
|
||||
<p>
|
||||
The HAPI FHIR
|
||||
<a href="./doc_rest_server.html">RestfulServer</a>
|
||||
module can be used to create a FHIR server endpoint against an arbitrary
|
||||
data source, which could be a database of your own design, an existing
|
||||
clinical system, a set of files, or anything else you come up with.
|
||||
</p>
|
||||
<p>
|
||||
HAPI also provides a persistence module which can be used to
|
||||
provide a complete RESTful server implementation, backed by a database of
|
||||
your choosing. This module uses the <a href="http://en.wikipedia.org/wiki/Java_Persistence_API">JPA 2.0</a>
|
||||
API to store data in a database without depending on any specific database technology.
|
||||
</p>
|
||||
<p>
|
||||
<b>Important Note: </b>
|
||||
This implementation uses a fairly simple table design, with a
|
||||
single table being used to hold resource bodies (which are stored as
|
||||
CLOBs, optionally GZipped to save space) and a set of tables to hold search indexes, tags,
|
||||
history details, etc. This design is only one of many possible ways
|
||||
of designing a FHIR server so it is worth considering whether it
|
||||
is appropriate for the problem you are trying to solve.
|
||||
</p>
|
||||
|
||||
<subsection name="Getting Started">
|
||||
|
||||
<p>
|
||||
The easiest way to get started with HAPI's JPA server module is
|
||||
to begin with the example project. There is a complete sample project
|
||||
found in our GitHub repo here: <a href="https://github.com/jamesagnew/hapi-fhir/tree/master/hapi-fhir-jpaserver-example">hapi-fhir-jpaserver-example</a>
|
||||
</p>
|
||||
|
||||
<p>
|
||||
This example is a fully contained FHIR server, supporting all standard operations (read/create/delete/etc).
|
||||
It bundles an embedded instance of the <a href="http://db.apache.org/derby/">Apache Derby</a> Java database
|
||||
so that the server can run without depending on any external database, but it can also be
|
||||
configured to use an installation of Oracle, Postgres, etc.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
To take this project for a spin, check out the sources from GitHib (or download a snapshot),
|
||||
and then build the project:
|
||||
</p>
|
||||
|
||||
<source><![CDATA[$ cd hapi-fhir-jpaserver-example
|
||||
$ mvn install]]></source>
|
||||
|
||||
<p>
|
||||
You now have two options for starting the server:
|
||||
</p>
|
||||
<ul>
|
||||
<li>
|
||||
<b>Deploy to Tomcat/JBoss/Websphere/etc: </b> You will now have a file
|
||||
in your <code>target</code> directory called <code>hapi-fhir-jpaserver-example.war</code>.
|
||||
This WAR file can be deployed to any Servlet container, at which point you could
|
||||
access the server by pointing your browser at a URL similar to the following
|
||||
(you may need to adjust the
|
||||
port depending on which port your container is configured to listen on):
|
||||
<a href="http://localhost:8080/hapi-fhir-jpaserver-example/">http://localhost:8080/hapi-fhir-jpaserver-example/</a>
|
||||
</li>
|
||||
<li>
|
||||
<b>Run with Maven and Embedded Jetty: </b> To start the server
|
||||
directly within Maven, you can execute the following command:<br/>
|
||||
<source>$ mvn jetty:run</source>
|
||||
You can then access the server by pointing your browser at the following URL:
|
||||
<a href="http://localhost:8080/hapi-fhir-jpaserver-example/">http://localhost:8080/hapi-fhir-jpaserver-example/</a>
|
||||
</li>
|
||||
</ul>
|
||||
</subsection>
|
||||
</section>
|
||||
|
||||
<section name="Configuring The JPA Server">
|
||||
|
||||
<p>
|
||||
The JPA server is configured through a series of configuration files, most
|
||||
of which are documented inline.
|
||||
</p>
|
||||
<ul>
|
||||
<li>
|
||||
<a href="https://github.com/jamesagnew/hapi-fhir/blob/master/hapi-fhir-jpaserver-example/src/main/java/ca/uhn/fhir/jpa/demo/FhirServerConfig.java"><b>FhirServerConfig.java</b></a>:
|
||||
Configures the database connection settings
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
</section>
|
||||
|
||||
<section name="DaoConfig">
|
||||
|
||||
<p>
|
||||
The Spring confguration contains a definition for a bean called <code>daoConfig</code>,
|
||||
which will look something like the following:
|
||||
</p>
|
||||
<source><![CDATA[@Bean()
|
||||
public DaoConfig daoConfig() {
|
||||
DaoConfig retVal = new DaoConfig();
|
||||
retVal.setAllowMultipleDelete(true);
|
||||
retVal.setAllowInlineMatchUrlReferences(true);
|
||||
return retVal;
|
||||
}]]></source>
|
||||
|
||||
<p>
|
||||
You can use this method to change various configuration settings on the DaoConfig bean
|
||||
which define the way that the JPA server will behave.
|
||||
See the <a href="./apidocs-jpaserver/ca/uhn/fhir/jpa/dao/DaoConfig.html">DaoConfig JavaDoc</a>
|
||||
for information about the available settings.
|
||||
</p>
|
||||
|
||||
<subsection name="External/Absolute Resource References">
|
||||
|
||||
<p>
|
||||
Clients may sometimes post resources to your server that contain
|
||||
absolute resource references. For example, consider the following resource:
|
||||
</p>
|
||||
<source><![CDATA[<Patient xmlns="http://hl7.org/fhir">
|
||||
<id value="patient-infant-01"/>
|
||||
<name>
|
||||
<use value="official"/>
|
||||
<family value="Miller"/>
|
||||
<given value="Samuel"/>
|
||||
</name>
|
||||
<managingOrganization>
|
||||
<reference value="http://example.com/fhir/Organization/123"/>
|
||||
</managingOrganization>
|
||||
</Patient>]]></source>
|
||||
|
||||
<p>
|
||||
By default, the server will reject this reference, as only
|
||||
local references are permitted by the server. This can be changed
|
||||
however.
|
||||
</p>
|
||||
<p>
|
||||
If you want the server to recognize that this URL is actually a local
|
||||
reference (i.e. because the server will be deployed to the base URL
|
||||
<code>http://example.com/fhir/</code>) you can
|
||||
configure the server to recognize this URL via the following DaoConfig
|
||||
setting:
|
||||
</p>
|
||||
<source><![CDATA[@Bean()
|
||||
public DaoConfig daoConfig() {
|
||||
DaoConfig retVal = new DaoConfig();
|
||||
// ... other config ...
|
||||
retVal.getTreatBaseUrlsAsLocal().add("http://example.com/fhir/");
|
||||
return retVal;
|
||||
}]]></source>
|
||||
|
||||
<p>
|
||||
On the other hand, if you want the server to be configurable to
|
||||
allow remote references, you can set this with the confguration below.
|
||||
Using the <code>setAllowExternalReferences</code> means that
|
||||
it will be possible to search for references that refer to these
|
||||
external references.
|
||||
</p>
|
||||
|
||||
<source><![CDATA[@Bean()
|
||||
public DaoConfig daoConfig() {
|
||||
DaoConfig retVal = new DaoConfig();
|
||||
// Allow external references
|
||||
retVal.setAllowExternalReferences(true);
|
||||
|
||||
// If you are allowing external references, it is recommended to
|
||||
// also tell the server which references actually will be local
|
||||
retVal.getTreatBaseUrlsAsLocal().add("http://mydomain.com/fhir");
|
||||
return retVal;
|
||||
}]]></source>
|
||||
</subsection>
|
||||
|
||||
<subsection name="Logical References">
|
||||
|
||||
<p>
|
||||
In some cases, you may have references which are <i>Logical References</i>,
|
||||
which means that they act as an identifier and not necessarily as a literal
|
||||
web address.
|
||||
</p>
|
||||
<p>
|
||||
A common use for logical references is in references to conformance
|
||||
resources, such as ValueSets, StructureDefinitions, etc. For example,
|
||||
you might refer to the ValueSet
|
||||
<code>http://hl7.org/fhir/ValueSet/quantity-comparator</code>
|
||||
from your own resources. In this case, you are not neccesarily telling
|
||||
the server that this is a real address that it should resolve, but
|
||||
rather that this is an identifier for a ValueSet where
|
||||
<code>ValueSet.url</code> has the given URI/URL.
|
||||
</p>
|
||||
<p>
|
||||
HAPI can be configured to treat certain URI/URL patterns as
|
||||
logical by using the DaoConfig#setTreatReferencesAsLogical property
|
||||
(see <a href="./apidocs-jpaserver/ca/uhn/fhir/jpa/dao/DaoConfig.html#setTreatReferencesAsLogical-java.util.Set-">JavaDoc</a>).
|
||||
For example:
|
||||
</p>
|
||||
<div class="source">
|
||||
<pre>
|
||||
// Treat specific URL as logical
|
||||
myDaoConfig.getTreatReferencesAsLogical().add("http://mysystem.com/ValueSet/cats-and-dogs");
|
||||
|
||||
// Treat all references with given prefix as logical
|
||||
myDaoConfig.getTreatReferencesAsLogical().add("http://mysystem.com/mysystem-vs-*");
|
||||
</pre>
|
||||
</div>
|
||||
<a name="search_result caching"/>
|
||||
</subsection>
|
||||
|
||||
<subsection name="Search Result Caching">
|
||||
|
||||
<p>
|
||||
By default, search results will be cached for one minute. This means that
|
||||
if a client performs a search for <code>Patient?name=smith</code> and gets back
|
||||
500 results, if a client performs the same search within 60000 milliseconds the
|
||||
previously loaded search results will be returned again. This also means that
|
||||
any new Patient resources named "Smith" within the last minute will not be
|
||||
reflected in the results.
|
||||
</p>
|
||||
<p>
|
||||
Under many normal scenarios this is a n acceptable performance tradeoff,
|
||||
but in some cases it is not. If you want to disable caching, you have two
|
||||
options:
|
||||
</p>
|
||||
<p><b>Globally Disable / Change Caching Timeout</b></p>
|
||||
<p>
|
||||
You can change the global cache using the following setting:
|
||||
</p>
|
||||
<div class="source">
|
||||
<pre>
|
||||
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
|
||||
</pre>
|
||||
</div>
|
||||
<p><b>Disable Cache at the Request Level</b></p>
|
||||
<p>
|
||||
Clients can selectively disable caching for an individual request
|
||||
using the Cache-Control header:
|
||||
</p>
|
||||
<div class="source">
|
||||
<pre>
|
||||
Cache-Control: nocache
|
||||
</pre>
|
||||
</div>
|
||||
<p><b>Disable Paging at the Request Level</b></p>
|
||||
<p>
|
||||
If the client knows that they will only want a small number of results
|
||||
(for example, a UI containing 20 results is being shown and the client
|
||||
knows that they will never load the next page of results) the client
|
||||
may also use the <code>nostore</code> directive along with a HAPI FHIR
|
||||
extension called <code>max-results</code> in order to specify that
|
||||
only the given number of results should be fetched. This directive
|
||||
disabled paging entirely for the request and causes the request to
|
||||
return immediately when the given number of results is found. This
|
||||
can cause a noticeable performance improvement in some cases.
|
||||
</p>
|
||||
<div class="source">
|
||||
<pre>
|
||||
Cache-Control: nostore, max-results=20
|
||||
</pre>
|
||||
</div>
|
||||
|
||||
</subsection>
|
||||
|
||||
</section>
|
||||
|
||||
<section name="Architecture">
|
||||
|
||||
<img src="images/jpa_architecture.png" alt="Architecture" align="right"/>
|
||||
|
||||
<p>
|
||||
The HAPI JPA Server has the following components:
|
||||
</p>
|
||||
|
||||
<ul>
|
||||
<li>
|
||||
<b>Resource Providers: </b>
|
||||
A RESTful server <a href="./doc_rest_server.html#resource_providers">Resource Provider</a> is
|
||||
provided for each resource type in a given release of FHIR. Each resource provider implements
|
||||
a
|
||||
<a href="./apidocs/ca/uhn/fhir/rest/annotation/Search.html">@Search</a>
|
||||
method implementing the complete set of search parameters defined in the FHIR
|
||||
specification for the given resource type.<br/><br/>
|
||||
The resource providers also extend a superclass which implements all of the
|
||||
other FHIR methods, such as Read, Create, Delete, etc.<br/><br/>
|
||||
Note that these resource providers are generated as a part of the HAPI build process,
|
||||
so they are not checked into Git. You can see their source
|
||||
in the <a href="./xref-jpaserver/">JXR Report</a>,
|
||||
for example the
|
||||
<a href="./xref-jpaserver/ca/uhn/fhir/jpa/rp/dstu2/PatientResourceProvider.html">PatientResourceProvider</a>.
|
||||
<br/><br/>
|
||||
The resource providers do not actually implement any of the logic
|
||||
in searching, updating, etc. They simply receive the incoming HTTP calls (via the RestfulServer)
|
||||
and pass along the incoming requests to the DAOs.
|
||||
<br/><br/>
|
||||
</li>
|
||||
<li>
|
||||
<b>HAPI DAOs: </b>
|
||||
The DAOs actually implement all of the database business logic relating to
|
||||
the storage, indexing, and retrieval of FHIR resources, using the underlying JPA
|
||||
API.
|
||||
<br/><br/>
|
||||
</li>
|
||||
<li>
|
||||
<b>Hibernate: </b>
|
||||
The HAPI JPA Server uses the JPA library, implemented by Hibernate. No Hibernate
|
||||
specific features are used, so the library should also work with other
|
||||
providers (e.g. Eclipselink) but it is not tested regularly with them.
|
||||
<br/><br/>
|
||||
</li>
|
||||
<li>
|
||||
<b>Database: </b>
|
||||
The RESTful server uses an embedded Derby database, but can be configured to
|
||||
talk to
|
||||
<a href="https://developer.jboss.org/wiki/SupportedDatabases2?_sscc=t">any database supported by Hibernate</a>.
|
||||
</li>
|
||||
|
||||
</ul>
|
||||
|
||||
</section>
|
||||
|
||||
<section name="Additional Information">
|
||||
|
||||
<ul>
|
||||
<li>
|
||||
<a href="https://www.openhealthhub.org/t/hapi-terminology-server-uk-snomed-ct-import/592">This page</a>
|
||||
has information on loading national editions (UK specifically) of SNOMED CT files into
|
||||
the database.
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
</section>
|
||||
|
||||
<!--
|
||||
alter table hfj_res_link ALTER COLUMN "TARGET_RESOURCE_ID" NULL;
|
||||
|
||||
select sp_index_status, count(*) from hfj_resource group by sp_index_status
|
||||
delete from hfj_history_tag where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_res_tag where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_spidx_coords where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_spidx_number where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_spidx_quantity where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_spidx_string where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_spidx_token where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_spidx_uri where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_search_result where resource_pid in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_res_link where src_resource_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_res_link where target_resource_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_subscription where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_subscription_flag_res where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
|
||||
|
||||
delete from trm_concept_pc_link where pid in (select pid from trm_concept where codesystem_pid in (select pid from trm_codesystem_ver where res_id in (select res_id from hfj_resource where sp_index_status = 2)));
|
||||
delete from trm_concept where codesystem_pid in (select pid from trm_codesystem_ver where res_id in (select res_id from hfj_resource where sp_index_status = 2));
|
||||
delete from trm_codesystem_ver where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from trm_codesystem where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
|
||||
update hfj_resource set forced_id_pid = null where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
update hfj_res_ver set forced_id_pid = null where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_forced_id where resource_pid in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_resource where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
delete from hfj_res_ver where res_id in (select res_id from hfj_resource where sp_index_status = 2);
|
||||
|
||||
|
||||
|
||||
-->
|
||||
|
||||
</body>
|
||||
|
||||
</document>
|
||||
|
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue