diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
new file mode 100644
index 00000000000..3829daa7291
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -0,0 +1,32 @@
+---
+name: Bug report
+about: Create a report to help us improve
+
+---
+
+NOTE: Before filing a ticket, please see the following URL:
+https://github.com/jamesagnew/hapi-fhir/wiki/Getting-Help
+
+**Describe the bug**
+A clear and concise description of what the bug is.
+
+**To Reproduce**
+Steps to reproduce the behavior:
+1. Go to '...'
+2. Click on '....'
+3. Scroll down to '....'
+4. See error
+
+**Expected behavior**
+A clear and concise description of what you expected to happen.
+
+**Screenshots**
+If applicable, add screenshots to help explain your problem.
+
+**Environment (please complete the following information):**
+ - HAPI FHIR Version
+ - OS: [e.g. iOS]
+ - Browser [e.g. chrome, safari]
+
+**Additional context**
+Add any other context about the problem here.
diff --git a/README.md b/README.md
index ad66edaa9d7..d753df33492 100644
--- a/README.md
+++ b/README.md
@@ -17,3 +17,5 @@ A demonstration of this project is available here:
http://hapi.fhir.org/
This project is Open Source, licensed under the Apache Software License 2.0.
+
+Please see [this wiki page](https://github.com/jamesagnew/hapi-fhir/wiki/Getting-Help) for information on where to get help with HAPI FHIR. Please see [Smile CDR](https://smilecdr.com) for information on commercial support.
diff --git a/examples/src/main/java/example/AuthorizationInterceptors.java b/examples/src/main/java/example/AuthorizationInterceptors.java
index 71132a821f2..3ef5822b6d1 100644
--- a/examples/src/main/java/example/AuthorizationInterceptors.java
+++ b/examples/src/main/java/example/AuthorizationInterceptors.java
@@ -4,6 +4,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.util.List;
+import org.hl7.fhir.dstu3.model.IdType;
import org.hl7.fhir.instance.model.api.IBaseResource;
import ca.uhn.fhir.model.dstu2.resource.Patient;
@@ -140,5 +141,21 @@ public class AuthorizationInterceptors {
}
};
//END SNIPPET: authorizeTenantAction
+
+
+ //START SNIPPET: patchAll
+ new AuthorizationInterceptor(PolicyEnum.DENY) {
+ @Override
+ public List buildRuleList(RequestDetails theRequestDetails) {
+ return new RuleBuilder()
+ // Authorize patch requests
+ .allow().patch().allRequests().andThen()
+ // Authorize actual writes that patch may perform
+ .allow().write().allResources().inCompartment("Patient", new IdType("Patient/123")).andThen()
+ .build();
+ }
+ };
+ //END SNIPPET: patchAll
+
}
}
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/FhirContext.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/FhirContext.java
index 01ccc3ca7b8..86514b5ae53 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/FhirContext.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/FhirContext.java
@@ -1,7 +1,34 @@
package ca.uhn.fhir.context;
+import ca.uhn.fhir.context.api.AddProfileTagEnum;
+import ca.uhn.fhir.context.support.IContextValidationSupport;
+import ca.uhn.fhir.fluentpath.IFluentPath;
+import ca.uhn.fhir.i18n.HapiLocalizer;
+import ca.uhn.fhir.model.api.IElement;
+import ca.uhn.fhir.model.api.IFhirVersion;
+import ca.uhn.fhir.model.api.IResource;
+import ca.uhn.fhir.model.view.ViewGenerator;
+import ca.uhn.fhir.narrative.INarrativeGenerator;
+import ca.uhn.fhir.parser.*;
+import ca.uhn.fhir.rest.api.IVersionSpecificBundleFactory;
+import ca.uhn.fhir.rest.client.api.IBasicClient;
+import ca.uhn.fhir.rest.client.api.IGenericClient;
+import ca.uhn.fhir.rest.client.api.IRestfulClient;
+import ca.uhn.fhir.rest.client.api.IRestfulClientFactory;
+import ca.uhn.fhir.util.FhirTerser;
+import ca.uhn.fhir.util.ReflectionUtil;
+import ca.uhn.fhir.util.VersionUtil;
+import ca.uhn.fhir.validation.FhirValidator;
+import org.apache.commons.lang3.Validate;
+import org.hl7.fhir.instance.model.api.IBase;
+import org.hl7.fhir.instance.model.api.IBaseBundle;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+
import java.io.IOException;
import java.lang.reflect.Method;
+import java.lang.reflect.Modifier;
+import java.util.*;
+import java.util.Map.Entry;
/*
* #%L
@@ -23,30 +50,10 @@ import java.lang.reflect.Method;
* #L%
*/
-import java.lang.reflect.Modifier;
-import java.util.*;
-import java.util.Map.Entry;
-
-import org.apache.commons.lang3.Validate;
-import org.hl7.fhir.instance.model.api.*;
-
-import ca.uhn.fhir.context.api.AddProfileTagEnum;
-import ca.uhn.fhir.context.support.IContextValidationSupport;
-import ca.uhn.fhir.fluentpath.IFluentPath;
-import ca.uhn.fhir.i18n.HapiLocalizer;
-import ca.uhn.fhir.model.api.*;
-import ca.uhn.fhir.model.view.ViewGenerator;
-import ca.uhn.fhir.narrative.INarrativeGenerator;
-import ca.uhn.fhir.parser.*;
-import ca.uhn.fhir.rest.api.IVersionSpecificBundleFactory;
-import ca.uhn.fhir.rest.client.api.*;
-import ca.uhn.fhir.util.*;
-import ca.uhn.fhir.validation.FhirValidator;
-
/**
* The FHIR context is the central starting point for the use of the HAPI FHIR API. It should be created once, and then
* used as a factory for various other types of objects (parsers, clients, etc.).
- *
+ *
*
* Important usage notes:
*
@@ -68,6 +75,7 @@ public class FhirContext {
private static final List> EMPTY_LIST = Collections.emptyList();
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirContext.class);
+ private final IFhirVersion myVersion;
private AddProfileTagEnum myAddProfileTagWhenEncoding = AddProfileTagEnum.ONLY_FOR_CUSTOM;
private volatile Map, BaseRuntimeElementDefinition>> myClassToElementDefinition = Collections.emptyMap();
private ArrayList> myCustomTypes;
@@ -87,14 +95,11 @@ public class FhirContext {
private volatile IRestfulClientFactory myRestfulClientFactory;
private volatile RuntimeChildUndeclaredExtensionDefinition myRuntimeChildUndeclaredExtensionDefinition;
private IContextValidationSupport, ?, ?, ?, ?, ?> myValidationSupport;
-
- private final IFhirVersion myVersion;
-
private Map>> myVersionToNameToResourceType = Collections.emptyMap();
/**
* @deprecated It is recommended that you use one of the static initializer methods instead
- * of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
+ * of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
*/
@Deprecated
public FhirContext() {
@@ -103,7 +108,7 @@ public class FhirContext {
/**
* @deprecated It is recommended that you use one of the static initializer methods instead
- * of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
+ * of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
*/
@Deprecated
public FhirContext(Class extends IBaseResource> theResourceType) {
@@ -112,7 +117,7 @@ public class FhirContext {
/**
* @deprecated It is recommended that you use one of the static initializer methods instead
- * of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
+ * of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
*/
@Deprecated
public FhirContext(Class>... theResourceTypes) {
@@ -121,7 +126,7 @@ public class FhirContext {
/**
* @deprecated It is recommended that you use one of the static initializer methods instead
- * of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
+ * of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
*/
@Deprecated
public FhirContext(Collection> theResourceTypes) {
@@ -161,7 +166,7 @@ public class FhirContext {
if (theVersion == null) {
ourLog.info("Creating new FhirContext with auto-detected version [{}]. It is recommended to explicitly select a version for future compatibility by invoking FhirContext.forDstuX()",
- myVersion.getVersion().name());
+ myVersion.getVersion().name());
} else {
ourLog.info("Creating new FHIR context for FHIR version [{}]", myVersion.getVersion().name());
}
@@ -201,13 +206,37 @@ public class FhirContext {
* When encoding resources, this setting configures the parser to include
* an entry in the resource's metadata section which indicates which profile(s) the
* resource claims to conform to. The default is {@link AddProfileTagEnum#ONLY_FOR_CUSTOM}.
- *
+ *
* @see #setAddProfileTagWhenEncoding(AddProfileTagEnum) for more information
*/
public AddProfileTagEnum getAddProfileTagWhenEncoding() {
return myAddProfileTagWhenEncoding;
}
+ /**
+ * When encoding resources, this setting configures the parser to include
+ * an entry in the resource's metadata section which indicates which profile(s) the
+ * resource claims to conform to. The default is {@link AddProfileTagEnum#ONLY_FOR_CUSTOM}.
+ *
+ * This feature is intended for situations where custom resource types are being used,
+ * avoiding the need to manually add profile declarations for these custom types.
+ *
+ *
+ * See Profiling and Extensions
+ * for more information on using custom types.
+ *
+ *
+ * Note that this feature automatically adds the profile, but leaves any profile tags
+ * which have been manually added in place as well.
+ *
+ *
+ * @param theAddProfileTagWhenEncoding The add profile mode (must not be null
)
+ */
+ public void setAddProfileTagWhenEncoding(AddProfileTagEnum theAddProfileTagWhenEncoding) {
+ Validate.notNull(theAddProfileTagWhenEncoding, "theAddProfileTagWhenEncoding must not be null");
+ myAddProfileTagWhenEncoding = theAddProfileTagWhenEncoding;
+ }
+
Collection getAllResourceDefinitions() {
validateInitialized();
return myNameToResourceDefinition.values();
@@ -215,7 +244,7 @@ public class FhirContext {
/**
* Returns the default resource type for the given profile
- *
+ *
* @see #setDefaultTypeForProfile(String, Class)
*/
public Class extends IBaseResource> getDefaultTypeForProfile(String theProfile) {
@@ -249,7 +278,9 @@ public class FhirContext {
return myNameToElementDefinition.get(theElementName.toLowerCase());
}
- /** For unit tests only */
+ /**
+ * For unit tests only
+ */
int getElementDefinitionCount() {
validateInitialized();
return myClassToElementDefinition.size();
@@ -274,20 +305,43 @@ public class FhirContext {
return myLocalizer;
}
+ /**
+ * This feature is not yet in its final state and should be considered an internal part of HAPI for now - use with
+ * caution
+ */
+ public void setLocalizer(HapiLocalizer theMessages) {
+ myLocalizer = theMessages;
+ }
+
public INarrativeGenerator getNarrativeGenerator() {
return myNarrativeGenerator;
}
+ public void setNarrativeGenerator(INarrativeGenerator theNarrativeGenerator) {
+ myNarrativeGenerator = theNarrativeGenerator;
+ }
+
/**
* Returns the parser options object which will be used to supply default
* options to newly created parsers
- *
+ *
* @return The parser options - Will not return null
*/
public ParserOptions getParserOptions() {
return myParserOptions;
}
+ /**
+ * Sets the parser options object which will be used to supply default
+ * options to newly created parsers
+ *
+ * @param theParserOptions The parser options object - Must not be null
+ */
+ public void setParserOptions(ParserOptions theParserOptions) {
+ Validate.notNull(theParserOptions, "theParserOptions must not be null");
+ myParserOptions = theParserOptions;
+ }
+
/**
* Get the configured performance options
*/
@@ -295,6 +349,32 @@ public class FhirContext {
return myPerformanceOptions;
}
+ // /**
+ // * Return an unmodifiable collection containing all known resource definitions
+ // */
+ // public Collection getResourceDefinitions() {
+ //
+ // Set> datatypes = Collections.emptySet();
+ // Map, BaseRuntimeElementDefinition>> existing = Collections.emptyMap();
+ // HashMap> types = new HashMap>();
+ // ModelScanner.scanVersionPropertyFile(datatypes, types, myVersion.getVersion(), existing);
+ // for (int next : types.)
+ //
+ // return Collections.unmodifiableCollection(myIdToResourceDefinition.values());
+ // }
+
+ /**
+ * Sets the configured performance options
+ *
+ * @see PerformanceOptionsEnum for a list of available options
+ */
+ public void setPerformanceOptions(Collection theOptions) {
+ myPerformanceOptions.clear();
+ if (theOptions != null) {
+ myPerformanceOptions.addAll(theOptions);
+ }
+ }
+
/**
* Returns the scanned runtime model for the given type. This is an advanced feature which is generally only needed
* for extending the core library.
@@ -359,8 +439,12 @@ public class FhirContext {
*
* Note that this method is case insensitive!
*
+ *
+ * @throws DataFormatException If the resource name is not known
*/
- public RuntimeResourceDefinition getResourceDefinition(String theResourceName) {
+ // Multiple spots in HAPI FHIR and Smile CDR depend on DataFormatException being
+ // thrown by this method, don't change that.
+ public RuntimeResourceDefinition getResourceDefinition(String theResourceName) throws DataFormatException {
validateInitialized();
Validate.notBlank(theResourceName, "theResourceName must not be blank");
@@ -380,20 +464,6 @@ public class FhirContext {
return retVal;
}
- // /**
- // * Return an unmodifiable collection containing all known resource definitions
- // */
- // public Collection getResourceDefinitions() {
- //
- // Set> datatypes = Collections.emptySet();
- // Map, BaseRuntimeElementDefinition>> existing = Collections.emptyMap();
- // HashMap> types = new HashMap>();
- // ModelScanner.scanVersionPropertyFile(datatypes, types, myVersion.getVersion(), existing);
- // for (int next : types.)
- //
- // return Collections.unmodifiableCollection(myIdToResourceDefinition.values());
- // }
-
/**
* Returns the scanned runtime model for the given type. This is an advanced feature which is generally only needed
* for extending the core library.
@@ -412,10 +482,40 @@ public class FhirContext {
return myIdToResourceDefinition.values();
}
+ /**
+ * Returns an unmodifiable set containing all resource names known to this
+ * context
+ */
+ public Set getResourceNames() {
+ Set resourceNames = new HashSet<>();
+
+ if (myNameToResourceDefinition.isEmpty()) {
+ Properties props = new Properties();
+ try {
+ props.load(myVersion.getFhirVersionPropertiesFile());
+ } catch (IOException theE) {
+ throw new ConfigurationException("Failed to load version properties file");
+ }
+ Enumeration> propNames = props.propertyNames();
+ while (propNames.hasMoreElements()) {
+ String next = (String) propNames.nextElement();
+ if (next.startsWith("resource.")) {
+ resourceNames.add(next.substring("resource.".length()).trim());
+ }
+ }
+ }
+
+ for (RuntimeResourceDefinition next : myNameToResourceDefinition.values()) {
+ resourceNames.add(next.getName());
+ }
+
+ return Collections.unmodifiableSet(resourceNames);
+ }
+
/**
* Get the restful client factory. If no factory has been set, this will be initialized with
* a new ApacheRestfulClientFactory.
- *
+ *
* @return the factory used to create the restful clients
*/
public IRestfulClientFactory getRestfulClientFactory() {
@@ -429,6 +529,16 @@ public class FhirContext {
return myRestfulClientFactory;
}
+ /**
+ * Set the restful client factory
+ *
+ * @param theRestfulClientFactory
+ */
+ public void setRestfulClientFactory(IRestfulClientFactory theRestfulClientFactory) {
+ Validate.notNull(theRestfulClientFactory, "theRestfulClientFactory must not be null");
+ this.myRestfulClientFactory = theRestfulClientFactory;
+ }
+
public RuntimeChildUndeclaredExtensionDefinition getRuntimeChildUndeclaredExtensionDefinition() {
validateInitialized();
return myRuntimeChildUndeclaredExtensionDefinition;
@@ -438,7 +548,7 @@ public class FhirContext {
* Returns the validation support module configured for this context, creating a default
* implementation if no module has been passed in via the {@link #setValidationSupport(IContextValidationSupport)}
* method
- *
+ *
* @see #setValidationSupport(IContextValidationSupport)
*/
public IContextValidationSupport, ?, ?, ?, ?, ?> getValidationSupport() {
@@ -448,6 +558,15 @@ public class FhirContext {
return myValidationSupport;
}
+ /**
+ * Sets the validation support module to use for this context. The validation support module
+ * is used to supply underlying infrastructure such as conformance resources (StructureDefinition, ValueSet, etc)
+ * as well as to provide terminology services to modules such as the validator and FluentPath executor
+ */
+ public void setValidationSupport(IContextValidationSupport, ?, ?, ?, ?, ?> theValidationSupport) {
+ myValidationSupport = theValidationSupport;
+ }
+
public IFhirVersion getVersion() {
return myVersion;
}
@@ -455,7 +574,7 @@ public class FhirContext {
/**
* Returns true
if any default types for specific profiles have been defined
* within this context.
- *
+ *
* @see #setDefaultTypeForProfile(String, Class)
* @see #getDefaultTypeForProfile(String)
*/
@@ -483,7 +602,7 @@ public class FhirContext {
* on a context for a previous version of fhir will result in an
* {@link UnsupportedOperationException}
*
- *
+ *
* @since 2.2
*/
public IFluentPath newFluentPath() {
@@ -492,7 +611,7 @@ public class FhirContext {
/**
* Create and return a new JSON parser.
- *
+ *
*
* Thread safety: Parsers are not guaranteed to be thread safe. Create a new parser instance for every thread
* or every message being parsed/encoded.
@@ -513,19 +632,16 @@ public class FhirContext {
* sub-interface {@link IBasicClient}). See the RESTful Client documentation for more
* information on how to define this interface.
- *
+ *
*
* Performance Note: This method is cheap to call, and may be called once for every operation invocation
* without incurring any performance penalty
*
- *
- * @param theClientType
- * The client type, which is an interface type to be instantiated
- * @param theServerBase
- * The URL of the base for the restful FHIR server to connect to
+ *
+ * @param theClientType The client type, which is an interface type to be instantiated
+ * @param theServerBase The URL of the base for the restful FHIR server to connect to
* @return A newly created client
- * @throws ConfigurationException
- * If the interface type is not an interface
+ * @throws ConfigurationException If the interface type is not an interface
*/
public T newRestfulClient(Class theClientType, String theServerBase) {
return getRestfulClientFactory().newClient(theClientType, theServerBase);
@@ -535,14 +651,13 @@ public class FhirContext {
* Instantiates a new generic client. A generic client is able to perform any of the FHIR RESTful operations against
* a compliant server, but does not have methods defining the specific functionality required (as is the case with
* {@link #newRestfulClient(Class, String) non-generic clients}).
- *
+ *
*
* Performance Note: This method is cheap to call, and may be called once for every operation invocation
* without incurring any performance penalty
*
- *
- * @param theServerBase
- * The URL of the base for the restful FHIR server to connect to
+ *
+ * @param theServerBase The URL of the base for the restful FHIR server to connect to
*/
public IGenericClient newRestfulGenericClient(String theServerBase) {
return getRestfulClientFactory().newGenericClient(theServerBase);
@@ -569,7 +684,7 @@ public class FhirContext {
/**
* Create and return a new XML parser.
- *
+ *
*
* Thread safety: Parsers are not guaranteed to be thread safe. Create a new parser instance for every thread
* or every message being parsed/encoded.
@@ -592,9 +707,8 @@ public class FhirContext {
* THREAD SAFETY WARNING: This method is not thread safe. It should be called before any
* threads are able to call any methods on this context.
*
- *
- * @param theType
- * The custom type to add (must not be null
)
+ *
+ * @param theType The custom type to add (must not be null
)
*/
public void registerCustomType(Class extends IBase> theType) {
Validate.notNull(theType, "theType must not be null");
@@ -612,9 +726,8 @@ public class FhirContext {
* THREAD SAFETY WARNING: This method is not thread safe. It should be called before any
* threads are able to call any methods on this context.
*
- *
- * @param theTypes
- * The custom types to add (must not be null
or contain null elements in the collection)
+ *
+ * @param theTypes The custom types to add (must not be null
or contain null elements in the collection)
*/
public void registerCustomTypes(Collection> theTypes) {
Validate.notNull(theTypes, "theTypes must not be null");
@@ -698,31 +811,6 @@ public class FhirContext {
return classToElementDefinition;
}
- /**
- * When encoding resources, this setting configures the parser to include
- * an entry in the resource's metadata section which indicates which profile(s) the
- * resource claims to conform to. The default is {@link AddProfileTagEnum#ONLY_FOR_CUSTOM}.
- *
- * This feature is intended for situations where custom resource types are being used,
- * avoiding the need to manually add profile declarations for these custom types.
- *
- *
- * See Profiling and Extensions
- * for more information on using custom types.
- *
- *
- * Note that this feature automatically adds the profile, but leaves any profile tags
- * which have been manually added in place as well.
- *
- *
- * @param theAddProfileTagWhenEncoding
- * The add profile mode (must not be null
)
- */
- public void setAddProfileTagWhenEncoding(AddProfileTagEnum theAddProfileTagWhenEncoding) {
- Validate.notNull(theAddProfileTagWhenEncoding, "theAddProfileTagWhenEncoding must not be null");
- myAddProfileTagWhenEncoding = theAddProfileTagWhenEncoding;
- }
-
/**
* Sets the default type which will be used when parsing a resource that is found to be
* of the given profile.
@@ -732,12 +820,10 @@ public class FhirContext {
* if the parser is parsing a resource and finds that it declares that it conforms to that profile,
* the MyPatient
type will be used unless otherwise specified.
*
- *
- * @param theProfile
- * The profile string, e.g. "http://example.com/some_patient_profile"
. Must not be
- * null
or empty.
- * @param theClass
- * The resource type, or null
to clear any existing type
+ *
+ * @param theProfile The profile string, e.g. "http://example.com/some_patient_profile"
. Must not be
+ * null
or empty.
+ * @param theClass The resource type, or null
to clear any existing type
*/
public void setDefaultTypeForProfile(String theProfile, Class extends IBaseResource> theClass) {
Validate.notBlank(theProfile, "theProfile must not be null or empty");
@@ -748,56 +834,19 @@ public class FhirContext {
}
}
- /**
- * This feature is not yet in its final state and should be considered an internal part of HAPI for now - use with
- * caution
- */
- public void setLocalizer(HapiLocalizer theMessages) {
- myLocalizer = theMessages;
- }
-
- public void setNarrativeGenerator(INarrativeGenerator theNarrativeGenerator) {
- myNarrativeGenerator = theNarrativeGenerator;
- }
-
/**
* Sets a parser error handler to use by default on all parsers
- *
- * @param theParserErrorHandler
- * The error handler
+ *
+ * @param theParserErrorHandler The error handler
*/
public void setParserErrorHandler(IParserErrorHandler theParserErrorHandler) {
Validate.notNull(theParserErrorHandler, "theParserErrorHandler must not be null");
myParserErrorHandler = theParserErrorHandler;
}
- /**
- * Sets the parser options object which will be used to supply default
- * options to newly created parsers
- *
- * @param theParserOptions
- * The parser options object - Must not be null
- */
- public void setParserOptions(ParserOptions theParserOptions) {
- Validate.notNull(theParserOptions, "theParserOptions must not be null");
- myParserOptions = theParserOptions;
- }
-
/**
* Sets the configured performance options
- *
- * @see PerformanceOptionsEnum for a list of available options
- */
- public void setPerformanceOptions(Collection theOptions) {
- myPerformanceOptions.clear();
- if (theOptions != null) {
- myPerformanceOptions.addAll(theOptions);
- }
- }
-
- /**
- * Sets the configured performance options
- *
+ *
* @see PerformanceOptionsEnum for a list of available options
*/
public void setPerformanceOptions(PerformanceOptionsEnum... thePerformanceOptions) {
@@ -808,26 +857,7 @@ public class FhirContext {
setPerformanceOptions(asList);
}
- /**
- * Set the restful client factory
- *
- * @param theRestfulClientFactory
- */
- public void setRestfulClientFactory(IRestfulClientFactory theRestfulClientFactory) {
- Validate.notNull(theRestfulClientFactory, "theRestfulClientFactory must not be null");
- this.myRestfulClientFactory = theRestfulClientFactory;
- }
-
- /**
- * Sets the validation support module to use for this context. The validation support module
- * is used to supply underlying infrastructure such as conformance resources (StructureDefinition, ValueSet, etc)
- * as well as to provide terminology services to modules such as the validator and FluentPath executor
- */
- public void setValidationSupport(IContextValidationSupport, ?, ?, ?, ?, ?> theValidationSupport) {
- myValidationSupport = theValidationSupport;
- }
-
- @SuppressWarnings({ "cast" })
+ @SuppressWarnings({"cast"})
private List> toElementList(Collection> theResourceTypes) {
if (theResourceTypes == null) {
return null;
@@ -858,13 +888,6 @@ public class FhirContext {
return new FhirContext(FhirVersionEnum.DSTU2);
}
- /**
- * Creates and returns a new FhirContext with version {@link FhirVersionEnum#DSTU2 DSTU2} (2016 May DSTU3 Snapshot)
- */
- public static FhirContext forDstu2_1() {
- return new FhirContext(FhirVersionEnum.DSTU2_1);
- }
-
/**
* Creates and returns a new FhirContext with version {@link FhirVersionEnum#DSTU2_HL7ORG DSTU2} (using the Reference
* Implementation Structures)
@@ -873,9 +896,16 @@ public class FhirContext {
return new FhirContext(FhirVersionEnum.DSTU2_HL7ORG);
}
+ /**
+ * Creates and returns a new FhirContext with version {@link FhirVersionEnum#DSTU2 DSTU2} (2016 May DSTU3 Snapshot)
+ */
+ public static FhirContext forDstu2_1() {
+ return new FhirContext(FhirVersionEnum.DSTU2_1);
+ }
+
/**
* Creates and returns a new FhirContext with version {@link FhirVersionEnum#DSTU3 DSTU3}
- *
+ *
* @since 1.4
*/
public static FhirContext forDstu3() {
@@ -884,14 +914,13 @@ public class FhirContext {
/**
* Creates and returns a new FhirContext with version {@link FhirVersionEnum#DSTU3 DSTU3}
- *
+ *
* @since 3.0.0
*/
public static FhirContext forR4() {
return new FhirContext(FhirVersionEnum.R4);
}
-
private static Collection> toCollection(Class extends IBaseResource> theResourceType) {
ArrayList> retVal = new ArrayList>(1);
retVal.add(theResourceType);
@@ -909,34 +938,4 @@ public class FhirContext {
}
return retVal;
}
-
- /**
- * Returns an unmodifiable set containing all resource names known to this
- * context
- */
- public Set getResourceNames() {
- Set resourceNames= new HashSet<>();
-
- if (myNameToResourceDefinition.isEmpty()) {
- Properties props = new Properties();
- try {
- props.load(myVersion.getFhirVersionPropertiesFile());
- } catch (IOException theE) {
- throw new ConfigurationException("Failed to load version properties file");
- }
- Enumeration> propNames = props.propertyNames();
- while (propNames.hasMoreElements()){
- String next = (String) propNames.nextElement();
- if (next.startsWith("resource.")) {
- resourceNames.add(next.substring("resource.".length()).trim());
- }
- }
- }
-
- for (RuntimeResourceDefinition next : myNameToResourceDefinition.values()) {
- resourceNames.add(next.getName());
- }
-
- return Collections.unmodifiableSet(resourceNames);
- }
}
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeResourceDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeResourceDefinition.java
index 93330b674b5..7ef427cbbf2 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeResourceDefinition.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeResourceDefinition.java
@@ -185,14 +185,29 @@ public class RuntimeResourceDefinition extends BaseRuntimeElementCompositeDefini
});
mySearchParams = Collections.unmodifiableList(searchParams);
- Map> compartmentNameToSearchParams = new HashMap>();
+ Map> compartmentNameToSearchParams = new HashMap<>();
for (RuntimeSearchParam next : searchParams) {
if (next.getProvidesMembershipInCompartments() != null) {
for (String nextCompartment : next.getProvidesMembershipInCompartments()) {
if (!compartmentNameToSearchParams.containsKey(nextCompartment)) {
- compartmentNameToSearchParams.put(nextCompartment, new ArrayList());
+ compartmentNameToSearchParams.put(nextCompartment, new ArrayList<>());
+ }
+ List searchParamsForCompartment = compartmentNameToSearchParams.get(nextCompartment);
+ searchParamsForCompartment.add(next);
+
+ /*
+ * If one search parameter marks an SP as making a resource
+ * a part of a compartment, let's also denote all other
+ * SPs with the same path the same way. This behaviour is
+ * used by AuthorizationInterceptor
+ */
+ for (RuntimeSearchParam nextAlternate : searchParams) {
+ if (nextAlternate.getPath().equals(next.getPath())) {
+ if (!nextAlternate.getName().equals(next.getName())) {
+ searchParamsForCompartment.add(nextAlternate);
+ }
+ }
}
- compartmentNameToSearchParams.get(nextCompartment).add(next);
}
}
}
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeSearchParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeSearchParam.java
index e1e02f2f9f2..2c1ae3dc023 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeSearchParam.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeSearchParam.java
@@ -5,6 +5,10 @@ import static org.apache.commons.lang3.StringUtils.trim;
import java.util.*;
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.apache.commons.lang3.builder.ToStringBuilder;
+import org.apache.commons.lang3.builder.ToStringStyle;
import org.hl7.fhir.instance.model.api.IIdType;
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
@@ -38,6 +42,18 @@ public class RuntimeSearchParam {
private final RestSearchParameterTypeEnum myParamType;
private final String myPath;
private final Set myTargets;
+
+ @Override
+ public String toString() {
+ return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
+ .append("base", myBase)
+ .append("name", myName)
+ .append("path", myPath)
+ .append("id", myId)
+ .append("uri", myUri)
+ .toString();
+ }
+
private final Set myProvidesMembershipInCompartments;
private final RuntimeSearchParamStatusEnum myStatus;
private final String myUri;
@@ -55,9 +71,36 @@ public class RuntimeSearchParam {
this(theId, theUri, theName, theDescription, thePath, theParamType, theCompositeOf, theProvidesMembershipInCompartments, theTargets, theStatus, null);
}
+ @Override
+ public boolean equals(Object theO) {
+ if (this == theO) return true;
+
+ if (theO == null || getClass() != theO.getClass()) return false;
+
+ RuntimeSearchParam that = (RuntimeSearchParam) theO;
+
+ return new EqualsBuilder()
+ .append(getId(), that.getId())
+ .append(getName(), that.getName())
+ .append(getPath(), that.getPath())
+ .append(getUri(), that.getUri())
+ .isEquals();
+ }
+
+ @Override
+ public int hashCode() {
+ return new HashCodeBuilder(17, 37)
+ .append(getId())
+ .append(getName())
+ .append(getPath())
+ .append(getUri())
+ .toHashCode();
+ }
+
public RuntimeSearchParam(IIdType theId, String theUri, String theName, String theDescription, String thePath, RestSearchParameterTypeEnum theParamType, List theCompositeOf,
- Set theProvidesMembershipInCompartments, Set theTargets, RuntimeSearchParamStatusEnum theStatus, Collection theBase) {
+ Set theProvidesMembershipInCompartments, Set theTargets, RuntimeSearchParamStatusEnum theStatus, Collection theBase) {
super();
+
myId = theId;
myUri = theUri;
myName = theName;
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/EncodingEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/EncodingEnum.java
index 4a6e398198c..d4b450aeff8 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/EncodingEnum.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/EncodingEnum.java
@@ -156,7 +156,12 @@ public enum EncodingEnum {
*
*/
public static EncodingEnum forContentType(String theContentType) {
- return ourContentTypeToEncoding.get(theContentType);
+ String contentTypeSplitted = getTypeWithoutCharset(theContentType);
+ if (contentTypeSplitted == null) {
+ return null;
+ } else {
+ return ourContentTypeToEncoding.get(contentTypeSplitted );
+ }
}
@@ -170,14 +175,33 @@ public enum EncodingEnum {
* @see #forContentType(String)
*/
public static EncodingEnum forContentTypeStrict(String theContentType) {
- return ourContentTypeToEncodingStrict.get(theContentType);
+ String contentTypeSplitted = getTypeWithoutCharset(theContentType);
+ if (contentTypeSplitted == null) {
+ return null;
+ } else {
+ return ourContentTypeToEncodingStrict.get(contentTypeSplitted);
+ }
+ }
+
+ private static String getTypeWithoutCharset(String theContentType) {
+ if (theContentType == null) {
+ return null;
+ } else {
+ String[] contentTypeSplitted = theContentType.split(";");
+ return contentTypeSplitted[0];
+ }
}
/**
* Is the given type a FHIR legacy (pre-DSTU3) content type?
*/
- public static boolean isLegacy(String theFormat) {
- return ourContentTypeToEncodingLegacy.containsKey(theFormat);
+ public static boolean isLegacy(String theContentType) {
+ String contentTypeSplitted = getTypeWithoutCharset(theContentType);
+ if (contentTypeSplitted == null) {
+ return false;
+ } else {
+ return ourContentTypeToEncodingLegacy.containsKey(contentTypeSplitted);
+ }
}
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/UrlPathTokenizer.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/UrlPathTokenizer.java
index 0e60d47a30f..7c7a99f2915 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/UrlPathTokenizer.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/UrlPathTokenizer.java
@@ -9,9 +9,9 @@ package ca.uhn.fhir.util;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -34,8 +34,17 @@ public class UrlPathTokenizer {
return myTok.hasMoreTokens();
}
- public String nextToken() {
- return UrlUtil.unescape(myTok.nextToken());
+ /**
+ * Returns the next portion. Any URL-encoding is undone, but we will
+ * HTML encode the < and " marks since they are both
+ * not useful un URL paths in FHIR and potentially represent injection
+ * attacks.
+ *
+ * @see UrlUtil#sanitizeUrlPart(String)
+ * @see UrlUtil#unescape(String)
+ */
+ public String nextTokenUnescapedAndSanitized() {
+ return UrlUtil.sanitizeUrlPart(UrlUtil.unescape(myTok.nextToken()));
}
}
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/UrlUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/UrlUtil.java
index 627b6efefa6..5061148b7c1 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/UrlUtil.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/UrlUtil.java
@@ -25,9 +25,9 @@ import static org.apache.commons.lang3.StringUtils.isBlank;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -70,7 +70,7 @@ public class UrlUtil {
return theExtensionUrl;
}
if (theExtensionUrl == null) {
- return theExtensionUrl;
+ return null;
}
int parentLastSlashIdx = theParentExtensionUrl.lastIndexOf('/');
@@ -119,6 +119,18 @@ public class UrlUtil {
return value.startsWith("http://") || value.startsWith("https://");
}
+ public static boolean isNeedsSanitization(String theString) {
+ if (theString != null) {
+ for (int i = 0; i < theString.length(); i++) {
+ char nextChar = theString.charAt(i);
+ if (nextChar == '<' || nextChar == '"') {
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
public static boolean isValid(String theUrl) {
if (theUrl == null || theUrl.length() < 8) {
return false;
@@ -164,7 +176,7 @@ public class UrlUtil {
}
public static Map parseQueryString(String theQueryString) {
- HashMap> map = new HashMap>();
+ HashMap> map = new HashMap<>();
parseQueryString(theQueryString, map);
return toQueryStringMap(map);
}
@@ -197,17 +209,13 @@ public class UrlUtil {
nextKey = unescape(nextKey);
nextValue = unescape(nextValue);
- List list = map.get(nextKey);
- if (list == null) {
- list = new ArrayList<>();
- map.put(nextKey, list);
- }
+ List list = map.computeIfAbsent(nextKey, k -> new ArrayList<>());
list.add(nextValue);
}
}
public static Map parseQueryStrings(String... theQueryString) {
- HashMap> map = new HashMap>();
+ HashMap> map = new HashMap<>();
for (String next : theQueryString) {
parseQueryString(next, map);
}
@@ -222,7 +230,6 @@ public class UrlUtil {
* [Resource Type]/[Resource ID]/_history/[Version ID]
*
*/
- //@formatter:on
public static UrlParts parseUrl(String theUrl) {
String url = theUrl;
UrlParts retVal = new UrlParts();
@@ -243,7 +250,7 @@ public class UrlUtil {
retVal.setVersionId(id.getVersionIdPart());
return retVal;
}
- if (url.matches("\\/[a-zA-Z]+\\?.*")) {
+ if (url.matches("/[a-zA-Z]+\\?.*")) {
url = url.substring(1);
}
int nextStart = 0;
@@ -282,12 +289,47 @@ public class UrlUtil {
}
- //@formatter:off
+ /**
+ * This method specifically HTML-encodes the " and
+ * < characters in order to prevent injection attacks
+ */
+ public static String sanitizeUrlPart(String theString) {
+ if (theString == null) {
+ return null;
+ }
+
+ boolean needsSanitization = isNeedsSanitization(theString);
+
+ if (needsSanitization) {
+ // Ok, we're sanitizing
+ StringBuilder buffer = new StringBuilder(theString.length() + 10);
+ for (int j = 0; j < theString.length(); j++) {
+
+ char nextChar = theString.charAt(j);
+ switch (nextChar) {
+ case '"':
+ buffer.append(""");
+ break;
+ case '<':
+ buffer.append("<");
+ break;
+ default:
+ buffer.append(nextChar);
+ break;
+ }
+
+ } // for build escaped string
+
+ return buffer.toString();
+ }
+
+ return theString;
+ }
private static Map toQueryStringMap(HashMap> map) {
- HashMap retVal = new HashMap();
+ HashMap retVal = new HashMap<>();
for (Entry> nextEntry : map.entrySet()) {
- retVal.put(nextEntry.getKey(), nextEntry.getValue().toArray(new String[nextEntry.getValue().size()]));
+ retVal.put(nextEntry.getKey(), nextEntry.getValue().toArray(new String[0]));
}
return retVal;
}
diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/UploadTerminologyCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/UploadTerminologyCommand.java
index 756567ed7d4..5179a03afbc 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/UploadTerminologyCommand.java
+++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/UploadTerminologyCommand.java
@@ -28,9 +28,6 @@ import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.hl7.fhir.dstu3.model.Parameters;
-import org.hl7.fhir.dstu3.model.StringType;
-import org.hl7.fhir.dstu3.model.UriType;
import org.hl7.fhir.instance.model.api.IBaseParameters;
import static org.apache.commons.lang3.StringUtils.isBlank;
@@ -82,10 +79,17 @@ public class UploadTerminologyCommand extends BaseCommand {
IGenericClient client = super.newClient(theCommandLine);
IBaseParameters inputParameters;
if (ctx.getVersion().getVersion() == FhirVersionEnum.DSTU3) {
- Parameters p = new Parameters();
- p.addParameter().setName("url").setValue(new UriType(termUrl));
+ org.hl7.fhir.dstu3.model.Parameters p = new org.hl7.fhir.dstu3.model.Parameters();
+ p.addParameter().setName("url").setValue(new org.hl7.fhir.dstu3.model.UriType(termUrl));
for (String next : datafile) {
- p.addParameter().setName("localfile").setValue(new StringType(next));
+ p.addParameter().setName("localfile").setValue(new org.hl7.fhir.dstu3.model.StringType(next));
+ }
+ inputParameters = p;
+ } else if (ctx.getVersion().getVersion() == FhirVersionEnum.R4) {
+ org.hl7.fhir.r4.model.Parameters p = new org.hl7.fhir.r4.model.Parameters();
+ p.addParameter().setName("url").setValue(new org.hl7.fhir.r4.model.UriType(termUrl));
+ for (String next : datafile) {
+ p.addParameter().setName("localfile").setValue(new org.hl7.fhir.r4.model.StringType(next));
}
inputParameters = p;
} else {
diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/resources/logback-cli-on.xml b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/resources/logback-cli-on.xml
index 9a96c590af6..730679bd5bb 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/resources/logback-cli-on.xml
+++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/resources/logback-cli-on.xml
@@ -31,6 +31,13 @@
+
+
+
+
+
diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/GenericClient.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/GenericClient.java
index 5dd55c8bf31..000e0eb2a48 100644
--- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/GenericClient.java
+++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/GenericClient.java
@@ -1103,7 +1103,7 @@ public class GenericClient extends BaseClient implements IGenericClient {
@SuppressWarnings("unchecked")
@Override
public Object execute() {
- if (myOperationName != null && myOperationName.equals(Constants.EXTOP_PROCESS_MESSAGE)) {
+ if (myOperationName != null && myOperationName.equals(Constants.EXTOP_PROCESS_MESSAGE) && myMsgBundle != null) {
Map> urlParams = new LinkedHashMap>();
// Set Url parameter Async and Response-Url
if (myIsAsync != null) {
diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseHttpClientInvocationWithContents.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseHttpClientInvocationWithContents.java
index 8dc61aa7d0d..86724743f68 100644
--- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseHttpClientInvocationWithContents.java
+++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseHttpClientInvocationWithContents.java
@@ -57,17 +57,6 @@ abstract class BaseHttpClientInvocationWithContents extends BaseHttpClientInvoca
private IIdType myForceResourceId;
-
- public BaseHttpClientInvocationWithContents(FhirContext theContext, IBaseResource theResource, Map> theParams, String... theUrlPath) {
- super(theContext);
- myResource = theResource;
- myUrlPath = StringUtils.join(theUrlPath, '/');
- myResources = null;
- myContents = null;
- myParams = theParams;
- myBundleType = null;
- }
-
public BaseHttpClientInvocationWithContents(FhirContext theContext, IBaseResource theResource, String theUrlPath) {
super(theContext);
myResource = theResource;
@@ -105,17 +94,6 @@ abstract class BaseHttpClientInvocationWithContents extends BaseHttpClientInvoca
myBundleType = null;
}
- public BaseHttpClientInvocationWithContents(FhirContext theContext, String theContents, Map> theParams, String... theUrlPath) {
- super(theContext);
- myResource = null;
- myUrlPath = StringUtils.join(theUrlPath, '/');
- myResources = null;
- myContents = theContents;
- myParams = theParams;
- myBundleType = null;
- }
-
-
@Override
public IHttpRequest asHttpRequest(String theUrlBase, Map> theExtraParams, EncodingEnum theEncoding, Boolean thePrettyPrint) throws DataFormatException {
StringBuilder url = new StringBuilder();
diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml
index a267db49303..0954f47e0de 100644
--- a/hapi-fhir-jpaserver-base/pom.xml
+++ b/hapi-fhir-jpaserver-base/pom.xml
@@ -502,6 +502,7 @@
com.github.ben-manes.caffeine
caffeine
+
com.google.guava
guava-testlib
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseDstu2Config.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseDstu2Config.java
index 80d5a91471b..9da25a9f395 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseDstu2Config.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseDstu2Config.java
@@ -8,10 +8,10 @@ import ca.uhn.fhir.jpa.util.ResourceCountCache;
import ca.uhn.fhir.model.dstu2.composite.MetaDt;
import ca.uhn.fhir.validation.IValidatorModule;
import org.apache.commons.lang3.time.DateUtils;
+import org.hl7.fhir.instance.hapi.validation.CachingValidationSupport;
import org.hl7.fhir.instance.hapi.validation.DefaultProfileValidationSupport;
import org.hl7.fhir.instance.hapi.validation.FhirInstanceValidator;
import org.hl7.fhir.instance.hapi.validation.ValidationSupportChain;
-import org.hl7.fhir.instance.utils.IResourceValidator.BestPracticeWarningLevel;
import org.hl7.fhir.r4.utils.IResourceValidator;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.context.annotation.Bean;
@@ -81,7 +81,7 @@ public class BaseDstu2Config extends BaseConfig {
public IValidatorModule instanceValidatorDstu2() {
FhirInstanceValidator retVal = new FhirInstanceValidator();
retVal.setBestPracticeWarningLevel(IResourceValidator.BestPracticeWarningLevel.Warning);
- retVal.setValidationSupport(new ValidationSupportChain(new DefaultProfileValidationSupport(), jpaValidationSupportDstu2()));
+ retVal.setValidationSupport(new CachingValidationSupport(new ValidationSupportChain(new DefaultProfileValidationSupport(), jpaValidationSupportDstu2())));
return retVal;
}
@@ -91,6 +91,13 @@ public class BaseDstu2Config extends BaseConfig {
return retVal;
}
+ @Bean(name = "myResourceCountsCache")
+ public ResourceCountCache resourceCountsCache() {
+ ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoDstu2().getResourceCounts());
+ retVal.setCacheMillis(60 * DateUtils.MILLIS_PER_SECOND);
+ return retVal;
+ }
+
@Bean(autowire = Autowire.BY_TYPE)
public IFulltextSearchSvc searchDao() {
FulltextSearchSvcImpl searchDao = new FulltextSearchSvcImpl();
@@ -121,13 +128,6 @@ public class BaseDstu2Config extends BaseConfig {
return retVal;
}
- @Bean(name = "myResourceCountsCache")
- public ResourceCountCache resourceCountsCache() {
- ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoDstu2().getResourceCounts());
- retVal.setCacheMillis(60 * DateUtils.MILLIS_PER_SECOND);
- return retVal;
- }
-
@Bean(autowire = Autowire.BY_TYPE)
public IHapiTerminologySvc terminologyService() {
return new HapiTerminologySvcDstu2();
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/dstu3/BaseDstu3Config.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/dstu3/BaseDstu3Config.java
index 44314f57377..3ec6c62431e 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/dstu3/BaseDstu3Config.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/dstu3/BaseDstu3Config.java
@@ -19,6 +19,7 @@ import ca.uhn.fhir.jpa.validation.JpaValidationSupportChainDstu3;
import ca.uhn.fhir.validation.IValidatorModule;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
+import org.hl7.fhir.dstu3.hapi.validation.CachingValidationSupport;
import org.hl7.fhir.dstu3.hapi.validation.FhirInstanceValidator;
import org.hl7.fhir.r4.utils.IResourceValidator;
import org.springframework.beans.factory.annotation.Autowire;
@@ -78,13 +79,17 @@ public class BaseDstu3Config extends BaseConfig {
return val;
}
+ @Bean
+ public JpaValidationSupportChainDstu3 jpaValidationSupportChain() {
+ return new JpaValidationSupportChainDstu3();
+ }
+
@Bean(name = "myJpaValidationSupportDstu3", autowire = Autowire.BY_NAME)
public ca.uhn.fhir.jpa.dao.dstu3.IJpaValidationSupportDstu3 jpaValidationSupportDstu3() {
ca.uhn.fhir.jpa.dao.dstu3.JpaValidationSupportDstu3 retVal = new ca.uhn.fhir.jpa.dao.dstu3.JpaValidationSupportDstu3();
return retVal;
}
-
@Bean(name = "myResourceCountsCache")
public ResourceCountCache resourceCountsCache() {
ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoDstu3().getResourceCounts());
@@ -142,7 +147,7 @@ public class BaseDstu3Config extends BaseConfig {
@Primary
@Bean(autowire = Autowire.BY_NAME, name = "myJpaValidationSupportChainDstu3")
public IValidationSupport validationSupportChainDstu3() {
- return new JpaValidationSupportChainDstu3();
+ return new CachingValidationSupport(jpaValidationSupportChain());
}
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4/BaseR4Config.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4/BaseR4Config.java
index dced4a6f8aa..df65c52dafe 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4/BaseR4Config.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4/BaseR4Config.java
@@ -21,6 +21,7 @@ import ca.uhn.fhir.validation.IValidatorModule;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
import org.hl7.fhir.r4.hapi.rest.server.GraphQLProvider;
+import org.hl7.fhir.r4.hapi.validation.CachingValidationSupport;
import org.hl7.fhir.r4.hapi.validation.FhirInstanceValidator;
import org.hl7.fhir.r4.utils.GraphQLEngine;
import org.hl7.fhir.r4.utils.IResourceValidator.BestPracticeWarningLevel;
@@ -93,6 +94,11 @@ public class BaseR4Config extends BaseConfig {
return val;
}
+ @Bean
+ public JpaValidationSupportChainR4 jpaValidationSupportChain() {
+ return new JpaValidationSupportChainR4();
+ }
+
@Bean(name = "myJpaValidationSupportR4", autowire = Autowire.BY_NAME)
public ca.uhn.fhir.jpa.dao.r4.IJpaValidationSupportR4 jpaValidationSupportR4() {
ca.uhn.fhir.jpa.dao.r4.JpaValidationSupportR4 retVal = new ca.uhn.fhir.jpa.dao.r4.JpaValidationSupportR4();
@@ -156,7 +162,7 @@ public class BaseR4Config extends BaseConfig {
@Primary
@Bean(autowire = Autowire.BY_NAME, name = "myJpaValidationSupportChainR4")
public IValidationSupport validationSupportChainR4() {
- return new JpaValidationSupportChainR4();
+ return new CachingValidationSupport(jpaValidationSupportChain());
}
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
index 016ad9fa615..4b643375b78 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
@@ -1,25 +1,5 @@
package ca.uhn.fhir.jpa.dao;
-/*
- * #%L
- * HAPI FHIR JPA Server
- * %%
- * Copyright (C) 2014 - 2018 University Health Network
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
import ca.uhn.fhir.context.*;
import ca.uhn.fhir.jpa.dao.data.*;
import ca.uhn.fhir.jpa.entity.*;
@@ -58,7 +38,6 @@ import ca.uhn.fhir.util.*;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
import com.google.common.collect.ArrayListMultimap;
-import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.hash.HashFunction;
import com.google.common.hash.Hashing;
@@ -104,6 +83,26 @@ import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.*;
+/*
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2018 University Health Network
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
@SuppressWarnings("WeakerAccess")
@Repository
public abstract class BaseHapiFhirDao implements IDao, ApplicationContextAware {
@@ -186,6 +185,8 @@ public abstract class BaseHapiFhirDao implements IDao,
protected IResourceTableDao myResourceTableDao;
@Autowired
protected IResourceTagDao myResourceTagDao;
+ @Autowired
+ protected IResourceSearchViewDao myResourceViewDao;
@Autowired(required = true)
private DaoConfig myConfig;
private FhirContext myContext;
@@ -199,8 +200,8 @@ public abstract class BaseHapiFhirDao implements IDao,
private ISearchParamPresenceSvc mySearchParamPresenceSvc;
@Autowired
private ISearchParamRegistry mySearchParamRegistry;
- @Autowired
- private ISearchResultDao mySearchResultDao;
+ //@Autowired
+ //private ISearchResultDao mySearchResultDao;
@Autowired
private IResourceIndexedCompositeStringUniqueDao myResourceIndexedCompositeStringUniqueDao;
private ApplicationContext myApplicationContext;
@@ -227,6 +228,7 @@ public abstract class BaseHapiFhirDao implements IDao,
}
protected ExpungeOutcome doExpunge(String theResourceName, Long theResourceId, Long theVersion, ExpungeOptions theExpungeOptions) {
+ TransactionTemplate txTemplate = new TransactionTemplate(myPlatformTransactionManager);
if (!getConfig().isExpungeEnabled()) {
throw new MethodNotAllowedException("$expunge is not enabled on this server");
@@ -245,32 +247,39 @@ public abstract class BaseHapiFhirDao implements IDao,
/*
* Delete historical versions of deleted resources
*/
- Pageable page = new PageRequest(0, remainingCount.get());
- Slice resourceIds;
- if (theResourceId != null) {
- resourceIds = myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceId, theResourceName);
- } else {
- if (theResourceName != null) {
- resourceIds = myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceName);
+ Pageable page = PageRequest.of(0, remainingCount.get());
+ Slice resourceIds = txTemplate.execute(t -> {
+ if (theResourceId != null) {
+ return myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceId, theResourceName);
} else {
- resourceIds = myResourceTableDao.findIdsOfDeletedResources(page);
+ if (theResourceName != null) {
+ return myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceName);
+ } else {
+ return myResourceTableDao.findIdsOfDeletedResources(page);
+ }
}
- }
+ });
for (Long next : resourceIds) {
- expungeHistoricalVersionsOfId(next, remainingCount);
- if (remainingCount.get() <= 0) {
- return toExpungeOutcome(theExpungeOptions, remainingCount);
- }
+ txTemplate.execute(t -> {
+ expungeHistoricalVersionsOfId(next, remainingCount);
+ if (remainingCount.get() <= 0) {
+ return toExpungeOutcome(theExpungeOptions, remainingCount);
+ }
+ return null;
+ });
}
/*
* Delete current versions of deleted resources
*/
for (Long next : resourceIds) {
- expungeCurrentVersionOfResource(next);
- if (remainingCount.get() <= 0) {
- return toExpungeOutcome(theExpungeOptions, remainingCount);
- }
+ txTemplate.execute(t -> {
+ expungeCurrentVersionOfResource(next);
+ if (remainingCount.get() <= 0) {
+ return toExpungeOutcome(theExpungeOptions, remainingCount);
+ }
+ return null;
+ });
}
}
@@ -280,22 +289,26 @@ public abstract class BaseHapiFhirDao implements IDao,
/*
* Delete historical versions of non-deleted resources
*/
- Pageable page = new PageRequest(0, remainingCount.get());
- Slice historicalIds;
- if (theResourceId != null && theVersion != null) {
- historicalIds = toSlice(myResourceHistoryTableDao.findForIdAndVersion(theResourceId, theVersion));
- } else {
- if (theResourceName != null) {
- historicalIds = myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page, theResourceName);
+ Pageable page = PageRequest.of(0, remainingCount.get());
+ Slice historicalIds = txTemplate.execute(t -> {
+ if (theResourceId != null && theVersion != null) {
+ return toSlice(myResourceHistoryTableDao.findForIdAndVersion(theResourceId, theVersion));
} else {
- historicalIds = myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page);
+ if (theResourceName != null) {
+ return myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page, theResourceName);
+ } else {
+ return myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page);
+ }
}
- }
+ });
for (Long next : historicalIds) {
- expungeHistoricalVersion(next);
- if (remainingCount.decrementAndGet() <= 0) {
- return toExpungeOutcome(theExpungeOptions, remainingCount);
- }
+ txTemplate.execute(t -> {
+ expungeHistoricalVersion(next);
+ if (remainingCount.decrementAndGet() <= 0) {
+ return toExpungeOutcome(theExpungeOptions, remainingCount);
+ }
+ return null;
+ });
}
}
@@ -315,7 +328,6 @@ public abstract class BaseHapiFhirDao implements IDao,
});
txTemplate.execute(t -> {
doExpungeEverythingQuery("DELETE from " + SearchParamPresent.class.getSimpleName() + " d");
- doExpungeEverythingQuery("DELETE from " + SearchParam.class.getSimpleName() + " d");
doExpungeEverythingQuery("DELETE from " + ForcedId.class.getSimpleName() + " d");
doExpungeEverythingQuery("DELETE from " + ResourceIndexedSearchParamDate.class.getSimpleName() + " d");
doExpungeEverythingQuery("DELETE from " + ResourceIndexedSearchParamNumber.class.getSimpleName() + " d");
@@ -704,58 +716,6 @@ public abstract class BaseHapiFhirDao implements IDao,
return retVal;
}
-
- @SuppressWarnings("unchecked")
- public IFhirResourceDao getDao(Class theType) {
- Map, IFhirResourceDao>> resourceTypeToDao = getDaos();
- IFhirResourceDao dao = (IFhirResourceDao) resourceTypeToDao.get(theType);
- return dao;
- }
-
- protected IFhirResourceDao> getDaoOrThrowException(Class extends IBaseResource> theClass) {
- IFhirResourceDao extends IBaseResource> retVal = getDao(theClass);
- if (retVal == null) {
- List supportedResourceTypes = getDaos()
- .keySet()
- .stream()
- .map(t->myContext.getResourceDefinition(t).getName())
- .sorted()
- .collect(Collectors.toList());
- throw new InvalidRequestException("Unable to process request, this server does not know how to handle resources of type " + getContext().getResourceDefinition(theClass).getName() + " - Can handle: " + supportedResourceTypes);
- }
- return retVal;
- }
-
-
- private Map, IFhirResourceDao>> getDaos() {
- if (myResourceTypeToDao == null) {
- Map, IFhirResourceDao>> resourceTypeToDao = new HashMap<>();
-
- Map daos = myApplicationContext.getBeansOfType(IFhirResourceDao.class, false, false);
-
- String[] beanNames = myApplicationContext.getBeanNamesForType(IFhirResourceDao.class);
-
- for (IFhirResourceDao> next : daos.values()) {
- resourceTypeToDao.put(next.getResourceType(), next);
- }
-
- if (this instanceof IFhirResourceDao>) {
- IFhirResourceDao> thiz = (IFhirResourceDao>) this;
- resourceTypeToDao.put(thiz.getResourceType(), thiz);
- }
-
- myResourceTypeToDao = resourceTypeToDao;
- }
-
- return Collections.unmodifiableMap(myResourceTypeToDao);
- }
-
- @PostConstruct
- public void startClearCaches() {
- myResourceTypeToDao = null;
- }
-
-
protected Set extractSearchParamCoords(ResourceTable theEntity, IBaseResource theResource) {
return mySearchParamExtractor.extractSearchParamCoords(theEntity, theResource);
}
@@ -910,7 +870,8 @@ public abstract class BaseHapiFhirDao implements IDao,
param = new ResourceIndexedSearchParamQuantity();
break;
case STRING:
- param = new ResourceIndexedSearchParamString();
+ param = new ResourceIndexedSearchParamString()
+ .setDaoConfig(myConfig);
break;
case TOKEN:
param = new ResourceIndexedSearchParamToken();
@@ -957,18 +918,6 @@ public abstract class BaseHapiFhirDao implements IDao,
return myConfig;
}
- @Override
- public void setApplicationContext(ApplicationContext theApplicationContext) throws BeansException {
- /*
- * We do a null check here because Smile's module system tries to
- * initialize the application context twice if two modules depend on
- * the persistence module. The second time sets the dependency's appctx.
- */
- if (myApplicationContext == null) {
- myApplicationContext = theApplicationContext;
- }
- }
-
public void setConfig(DaoConfig theConfig) {
myConfig = theConfig;
}
@@ -995,6 +944,50 @@ public abstract class BaseHapiFhirDao implements IDao,
}
}
+ @SuppressWarnings("unchecked")
+ public IFhirResourceDao getDao(Class theType) {
+ Map, IFhirResourceDao>> resourceTypeToDao = getDaos();
+ IFhirResourceDao dao = (IFhirResourceDao) resourceTypeToDao.get(theType);
+ return dao;
+ }
+
+ protected IFhirResourceDao> getDaoOrThrowException(Class extends IBaseResource> theClass) {
+ IFhirResourceDao extends IBaseResource> retVal = getDao(theClass);
+ if (retVal == null) {
+ List supportedResourceTypes = getDaos()
+ .keySet()
+ .stream()
+ .map(t -> myContext.getResourceDefinition(t).getName())
+ .sorted()
+ .collect(Collectors.toList());
+ throw new InvalidRequestException("Unable to process request, this server does not know how to handle resources of type " + getContext().getResourceDefinition(theClass).getName() + " - Can handle: " + supportedResourceTypes);
+ }
+ return retVal;
+ }
+
+ private Map, IFhirResourceDao>> getDaos() {
+ if (myResourceTypeToDao == null) {
+ Map, IFhirResourceDao>> resourceTypeToDao = new HashMap<>();
+
+ Map daos = myApplicationContext.getBeansOfType(IFhirResourceDao.class, false, false);
+
+ String[] beanNames = myApplicationContext.getBeanNamesForType(IFhirResourceDao.class);
+
+ for (IFhirResourceDao> next : daos.values()) {
+ resourceTypeToDao.put(next.getResourceType(), next);
+ }
+
+ if (this instanceof IFhirResourceDao>) {
+ IFhirResourceDao> thiz = (IFhirResourceDao>) this;
+ resourceTypeToDao.put(thiz.getResourceType(), thiz);
+ }
+
+ myResourceTypeToDao = resourceTypeToDao;
+ }
+
+ return Collections.unmodifiableMap(myResourceTypeToDao);
+ }
+
public IResourceIndexedCompositeStringUniqueDao getResourceIndexedCompositeStringUniqueDao() {
return myResourceIndexedCompositeStringUniqueDao;
}
@@ -1172,9 +1165,9 @@ public abstract class BaseHapiFhirDao implements IDao,
@Override
public SearchBuilder newSearchBuilder() {
- SearchBuilder builder = new SearchBuilder(getContext(), myEntityManager, myFulltextSearchSvc, this, myResourceIndexedSearchParamUriDao,
- myForcedIdDao,
- myTerminologySvc, mySerarchParamRegistry);
+ SearchBuilder builder = new SearchBuilder(
+ getContext(), myEntityManager, myFulltextSearchSvc, this, myResourceIndexedSearchParamUriDao,
+ myForcedIdDao, myTerminologySvc, mySerarchParamRegistry, myResourceTagDao, myResourceViewDao);
return builder;
}
@@ -1223,7 +1216,7 @@ public abstract class BaseHapiFhirDao implements IDao,
}
}
- private void populateResourceIdFromEntity(BaseHasResource theEntity, final IBaseResource theResource) {
+ private void populateResourceIdFromEntity(IBaseResourceEntity theEntity, final IBaseResource theResource) {
IIdType id = theEntity.getIdDt();
if (getContext().getVersion().getVersion().isRi()) {
id = getContext().getVersion().newIdType().setValue(id.getValue());
@@ -1308,20 +1301,24 @@ public abstract class BaseHapiFhirDao implements IDao,
}
}
- // Don't keep duplicate tags
+ Set allTagsNew = getAllTagDefinitions(theEntity);
Set allDefsPresent = new HashSet<>();
- theEntity.getTags().removeIf(theResourceTag -> !allDefsPresent.add(theResourceTag.getTag()));
+ allTagsNew.forEach(tag -> {
- // Remove any tags that have been removed
- for (ResourceTag next : allTagsOld) {
- if (!allDefs.contains(next)) {
- if (shouldDroppedTagBeRemovedOnUpdate(theRequest, next)) {
- theEntity.getTags().remove(next);
+ // Don't keep duplicate tags
+ if (!allDefsPresent.add(tag.getTag())) {
+ theEntity.getTags().remove(tag);
+ }
+
+ // Drop any tags that have been removed
+ if (!allDefs.contains(tag)) {
+ if (shouldDroppedTagBeRemovedOnUpdate(theRequest, tag)) {
+ theEntity.getTags().remove(tag);
}
}
- }
- Set allTagsNew = getAllTagDefinitions(theEntity);
+ });
+
if (!allTagsOld.equals(allTagsNew)) {
changed = true;
}
@@ -1355,7 +1352,7 @@ public abstract class BaseHapiFhirDao implements IDao,
}
@SuppressWarnings("unchecked")
- private R populateResourceMetadataHapi(Class theResourceType, BaseHasResource theEntity, boolean theForHistoryOperation, IResource res) {
+ private R populateResourceMetadataHapi(Class theResourceType, IBaseResourceEntity theEntity, Collection extends BaseTag> theTagList, boolean theForHistoryOperation, IResource res) {
R retVal = (R) res;
if (theEntity.getDeleted() != null) {
res = (IResource) myContext.getResourceDefinition(theResourceType).newInstance();
@@ -1384,7 +1381,7 @@ public abstract class BaseHapiFhirDao implements IDao,
ResourceMetadataKeyEnum.UPDATED.put(res, theEntity.getUpdated());
IDao.RESOURCE_PID.put(res, theEntity.getId());
- Collection extends BaseTag> tags = theEntity.getTags();
+ Collection extends BaseTag> tags = theTagList;
if (theEntity.isHasTags()) {
TagList tagList = new TagList();
List securityLabels = new ArrayList<>();
@@ -1421,7 +1418,7 @@ public abstract class BaseHapiFhirDao implements IDao,
}
@SuppressWarnings("unchecked")
- private R populateResourceMetadataRi(Class theResourceType, BaseHasResource theEntity, boolean theForHistoryOperation, IAnyResource res) {
+ private R populateResourceMetadataRi(Class theResourceType, IBaseResourceEntity theEntity, Collection extends BaseTag> theTagList, boolean theForHistoryOperation, IAnyResource res) {
R retVal = (R) res;
if (theEntity.getDeleted() != null) {
res = (IAnyResource) myContext.getResourceDefinition(theResourceType).newInstance();
@@ -1454,7 +1451,7 @@ public abstract class BaseHapiFhirDao implements IDao,
res.getMeta().setLastUpdated(theEntity.getUpdatedDate());
IDao.RESOURCE_PID.put(res, theEntity.getId());
- Collection extends BaseTag> tags = theEntity.getTags();
+ Collection extends BaseTag> tags = theTagList;
if (theEntity.isHasTags()) {
for (BaseTag next : tags) {
@@ -1480,6 +1477,15 @@ public abstract class BaseHapiFhirDao implements IDao,
return retVal;
}
+ /**
+ * Subclasses may override to provide behaviour. Called when a pre-existing resource has been updated in the database
+ *
+ * @param theEntity The resource
+ */
+ protected void postDelete(ResourceTable theEntity) {
+ // nothing
+ }
+
/**
* Subclasses may override to provide behaviour. Called when a resource has been inserted into the database for the first time.
*
@@ -1536,6 +1542,18 @@ public abstract class BaseHapiFhirDao implements IDao,
return retVal;
}
+ @Override
+ public void setApplicationContext(ApplicationContext theApplicationContext) throws BeansException {
+ /*
+ * We do a null check here because Smile's module system tries to
+ * initialize the application context twice if two modules depend on
+ * the persistence module. The second time sets the dependency's appctx.
+ */
+ if (myApplicationContext == null) {
+ myApplicationContext = theApplicationContext;
+ }
+ }
+
private void setUpdatedTime(Collection extends BaseResourceIndexedSearchParam> theParams, Date theUpdateTime) {
for (BaseResourceIndexedSearchParam nextSearchParam : theParams) {
nextSearchParam.setUpdated(theUpdateTime);
@@ -1592,6 +1610,11 @@ public abstract class BaseHapiFhirDao implements IDao,
return false;
}
+ @PostConstruct
+ public void startClearCaches() {
+ myResourceTypeToDao = null;
+ }
+
private ExpungeOutcome toExpungeOutcome(ExpungeOptions theExpungeOptions, AtomicInteger theRemainingCount) {
return new ExpungeOutcome()
.setDeletedCount(theExpungeOptions.getLimit() - theRemainingCount.get());
@@ -1601,28 +1624,47 @@ public abstract class BaseHapiFhirDao implements IDao,
public IBaseResource toResource(BaseHasResource theEntity, boolean theForHistoryOperation) {
RuntimeResourceDefinition type = myContext.getResourceDefinition(theEntity.getResourceType());
Class extends IBaseResource> resourceType = type.getImplementingClass();
- return toResource(resourceType, theEntity, theForHistoryOperation);
+ return toResource(resourceType, theEntity, null, theForHistoryOperation);
}
@SuppressWarnings("unchecked")
@Override
- public R toResource(Class theResourceType, BaseHasResource theEntity,
- boolean theForHistoryOperation) {
+ public R toResource(Class theResourceType, IBaseResourceEntity theEntity, Collection theTagList, boolean theForHistoryOperation) {
+
+ // 1. get resource, it's encoding and the tags if any
+ byte[] resourceBytes = null;
+ ResourceEncodingEnum resourceEncoding = null;
+ Collection extends BaseTag> myTagList = null;
- ResourceHistoryTable history;
if (theEntity instanceof ResourceHistoryTable) {
- history = (ResourceHistoryTable) theEntity;
+ ResourceHistoryTable history = (ResourceHistoryTable) theEntity;
+ resourceBytes = history.getResource();
+ resourceEncoding = history.getEncoding();
+ myTagList = history.getTags();
+ } else if (theEntity instanceof ResourceTable) {
+ ResourceTable resource = (ResourceTable) theEntity;
+ ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), theEntity.getVersion());
+ if (history == null) {
+ return null;
+ }
+ resourceBytes = history.getResource();
+ resourceEncoding = history.getEncoding();
+ myTagList = resource.getTags();
+ } else if (theEntity instanceof ResourceSearchView) {
+ // This is the search View
+ ResourceSearchView myView = (ResourceSearchView) theEntity;
+ resourceBytes = myView.getResource();
+ resourceEncoding = myView.getEncoding();
+ if (theTagList == null)
+ myTagList = new HashSet<>();
+ else
+ myTagList = theTagList;
} else {
- history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), theEntity.getVersion());
- }
-
- if (history == null) {
+ // something wrong
return null;
}
- byte[] resourceBytes = history.getResource();
- ResourceEncodingEnum resourceEncoding = history.getEncoding();
-
+ // 2. get The text
String resourceText = null;
switch (resourceEncoding) {
case JSON:
@@ -1639,12 +1681,10 @@ public abstract class BaseHapiFhirDao implements IDao,
break;
}
- /*
- * Use the appropriate custom type if one is specified in the context
- */
+ // 3. Use the appropriate custom type if one is specified in the context
Class resourceType = theResourceType;
if (myContext.hasDefaultTypeForProfile()) {
- for (BaseTag nextTag : theEntity.getTags()) {
+ for (BaseTag nextTag : myTagList) {
if (nextTag.getTag().getTagType() == TagTypeEnum.PROFILE) {
String profile = nextTag.getTag().getCode();
if (isNotBlank(profile)) {
@@ -1659,6 +1699,7 @@ public abstract class BaseHapiFhirDao implements IDao,
}
}
+ // 4. parse the text to FHIR
R retVal;
if (resourceEncoding != ResourceEncodingEnum.DEL) {
IParser parser = resourceEncoding.newParser(getContext(theEntity.getFhirVersion()));
@@ -1689,15 +1730,15 @@ public abstract class BaseHapiFhirDao implements IDao,
}
+ // 5. fill MetaData
if (retVal instanceof IResource) {
IResource res = (IResource) retVal;
- retVal = populateResourceMetadataHapi(resourceType, theEntity, theForHistoryOperation, res);
+ retVal = populateResourceMetadataHapi(resourceType, theEntity, myTagList, theForHistoryOperation, res);
} else {
IAnyResource res = (IAnyResource) retVal;
- retVal = populateResourceMetadataRi(resourceType, theEntity, theForHistoryOperation, res);
+ retVal = populateResourceMetadataRi(resourceType, theEntity, myTagList, theForHistoryOperation, res);
}
-
return retVal;
}
@@ -1735,8 +1776,12 @@ public abstract class BaseHapiFhirDao implements IDao,
protected ResourceTable updateEntity(RequestDetails theRequest, final IBaseResource theResource, ResourceTable
theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing,
boolean theUpdateVersion, Date theUpdateTime, boolean theForceUpdate, boolean theCreateNewHistoryEntry) {
+ Validate.notNull(theEntity);
+ Validate.isTrue(theDeletedTimestampOrNull != null || theResource != null, "Must have either a resource[{}] or a deleted timestamp[{}] for resource PID[{}]", theDeletedTimestampOrNull != null, theResource != null, theEntity.getId());
+
ourLog.debug("Starting entity update");
+
/*
* This should be the very first thing..
*/
@@ -1826,6 +1871,7 @@ public abstract class BaseHapiFhirDao implements IDao,
theEntity.setNarrativeTextParsedIntoWords(null);
theEntity.setContentTextParsedIntoWords(null);
theEntity.setHashSha256(null);
+ theEntity.setIndexStatus(INDEX_STATUS_INDEXED);
changed = populateResourceIntoEntity(theRequest, theResource, theEntity, true);
} else {
@@ -2003,6 +2049,11 @@ public abstract class BaseHapiFhirDao implements IDao,
postPersist(theEntity, (T) theResource);
+ } else if (theEntity.getDeleted() != null) {
+ theEntity = myEntityManager.merge(theEntity);
+
+ postDelete(theEntity);
+
} else {
theEntity = myEntityManager.merge(theEntity);
@@ -2014,10 +2065,6 @@ public abstract class BaseHapiFhirDao implements IDao,
*/
if (theCreateNewHistoryEntry) {
final ResourceHistoryTable historyEntry = theEntity.toHistory();
-// if (theEntity.getVersion() > 1) {
-// existing = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), theEntity.getVersion());
-// ourLog.warn("Reusing existing history entry entity {}", theEntity.getIdDt().getValue());
-// }
historyEntry.setEncoding(changed.getEncoding());
historyEntry.setResource(changed.getResource());
@@ -2057,6 +2104,7 @@ public abstract class BaseHapiFhirDao implements IDao,
if (thePerformIndexing) {
for (ResourceIndexedSearchParamString next : removeCommon(existingStringParams, stringParams)) {
+ next.setDaoConfig(myConfig);
myEntityManager.remove(next);
theEntity.getParamsString().remove(next);
}
@@ -2148,12 +2196,11 @@ public abstract class BaseHapiFhirDao implements IDao,
} // if thePerformIndexing
- theEntity = myEntityManager.merge(theEntity);
-
if (theResource != null) {
populateResourceIdFromEntity(theEntity, theResource);
}
+
return theEntity;
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
index 6cff70e1f1e..b8a5df18c3f 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
@@ -50,6 +50,7 @@ import ca.uhn.fhir.rest.server.method.SearchMethodBinding;
import ca.uhn.fhir.util.*;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.*;
+import org.hl7.fhir.r4.model.InstantType;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Required;
import org.springframework.lang.NonNull;
@@ -207,7 +208,7 @@ public abstract class BaseHapiFhirResourceDao extends B
StopWatch w = new StopWatch();
- T resourceToDelete = toResource(myResourceType, entity, false);
+ T resourceToDelete = toResource(myResourceType, entity, null, false);
// Notify IServerOperationInterceptors about pre-action call
if (theReques != null) {
@@ -289,7 +290,7 @@ public abstract class BaseHapiFhirResourceDao extends B
ResourceTable entity = myEntityManager.find(ResourceTable.class, pid);
deletedResources.add(entity);
- T resourceToDelete = toResource(myResourceType, entity, false);
+ T resourceToDelete = toResource(myResourceType, entity, null, false);
// Notify IServerOperationInterceptors about pre-action call
if (theRequest != null) {
@@ -394,16 +395,6 @@ public abstract class BaseHapiFhirResourceDao extends B
"This server cannot create an entity with a user-specified numeric ID - Client should not specify an ID when creating a new resource, or should include at least one letter in the ID to force a client-defined ID");
}
createForcedIdIfNeeded(entity, theResource.getIdElement());
-
- if (entity.getForcedId() != null) {
- try {
- translateForcedIdToPid(getResourceName(), theResource.getIdElement().getIdPart());
- throw new UnprocessableEntityException(getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "duplicateCreateForcedId", theResource.getIdElement().getIdPart()));
- } catch (ResourceNotFoundException e) {
- // good, this ID doesn't exist so we can create it
- }
- }
-
}
// Notify interceptors
@@ -517,6 +508,7 @@ public abstract class BaseHapiFhirResourceDao extends B
}
@Override
+ @Transactional(propagation = Propagation.NEVER)
public ExpungeOutcome expunge(IIdType theId, ExpungeOptions theExpungeOptions) {
BaseHasResource entity = readEntity(theId);
if (theId.hasVersionIdPart()) {
@@ -532,6 +524,7 @@ public abstract class BaseHapiFhirResourceDao extends B
}
@Override
+ @Transactional(propagation = Propagation.NEVER)
public ExpungeOutcome expunge(ExpungeOptions theExpungeOptions) {
ourLog.info("Beginning TYPE[{}] expunge operation", getResourceName());
@@ -854,16 +847,10 @@ public abstract class BaseHapiFhirResourceDao extends B
BaseHasResource entity = readEntity(theId);
validateResourceType(entity);
- T retVal = toResource(myResourceType, entity, false);
+ T retVal = toResource(myResourceType, entity, null, false);
- IPrimitiveType deleted;
- if (retVal instanceof IResource) {
- deleted = ResourceMetadataKeyEnum.DELETED_AT.get((IResource) retVal);
- } else {
- deleted = ResourceMetadataKeyEnum.DELETED_AT.get((IAnyResource) retVal);
- }
- if (deleted != null && !deleted.isEmpty()) {
- throw new ResourceGoneException("Resource was deleted at " + deleted.getValueAsString());
+ if (entity.getDeleted() != null) {
+ throw new ResourceGoneException("Resource was deleted at " + new InstantType(entity.getDeleted()).getValueAsString());
}
ourLog.debug("Processed read on {} in {}ms", theId.getValue(), w.getMillisAndRestart());
@@ -930,10 +917,14 @@ public abstract class BaseHapiFhirResourceDao extends B
@Override
public void reindex(T theResource, ResourceTable theEntity) {
- ourLog.debug("Indexing resource {} - PID {}", theResource.getIdElement().getValue(), theEntity.getId());
- CURRENTLY_REINDEXING.put(theResource, Boolean.TRUE);
- updateEntity(null, theResource, theEntity, null, true, false, theEntity.getUpdatedDate(), true, false);
- CURRENTLY_REINDEXING.put(theResource, null);
+ ourLog.debug("Indexing resource {} - PID {}", theEntity.getIdDt().getValue(), theEntity.getId());
+ if (theResource != null) {
+ CURRENTLY_REINDEXING.put(theResource, Boolean.TRUE);
+ }
+ updateEntity(null, theResource, theEntity, theEntity.getDeleted(), true, false, theEntity.getUpdatedDate(), true, false);
+ if (theResource != null) {
+ CURRENTLY_REINDEXING.put(theResource, null);
+ }
}
@Override
@@ -1065,6 +1056,11 @@ public abstract class BaseHapiFhirResourceDao extends B
mySecondaryPrimaryKeyParamName = theSecondaryPrimaryKeyParamName;
}
+ @PostConstruct
+ public void start() {
+ ourLog.debug("Starting resource DAO for type: {}", getResourceName());
+ }
+
protected MT toMetaDt(Class theType, Collection tagDefinitions) {
MT retVal;
try {
@@ -1205,7 +1201,9 @@ public abstract class BaseHapiFhirResourceDao extends B
}
} else {
/*
- * Note: resourcdeId will not be null or empty here, because we check it and reject requests in BaseOutcomeReturningMethodBindingWithResourceParam
+ * Note: resourceId will not be null or empty here, because we
+ * check it and reject requests in
+ * BaseOutcomeReturningMethodBindingWithResourceParam
*/
resourceId = theResource.getIdElement();
@@ -1336,9 +1334,4 @@ public abstract class BaseHapiFhirResourceDao extends B
}
}
- @PostConstruct
- public void start() {
- ourLog.info("Starting resource DAO for type: {}", getResourceName());
- }
-
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java
index 58d9d74ad1c..25639f637ec 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java
@@ -257,7 +257,7 @@ public abstract class BaseHapiFhirSystemDao extends BaseHapiFhirDao extends BaseHapiFhirDao resourceClass = getContext().getResourceDefinition(resourceTable.getResourceType()).getImplementingClass();
+ @SuppressWarnings("rawtypes") final IFhirResourceDao dao = getDaoOrThrowException(resourceClass);
dao.reindex(resource, resourceTable);
return null;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseSearchParamExtractor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseSearchParamExtractor.java
index 62fb3442679..258b5bf291a 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseSearchParamExtractor.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseSearchParamExtractor.java
@@ -20,45 +20,43 @@ package ca.uhn.fhir.jpa.dao;
* #L%
*/
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import java.util.regex.Pattern;
-
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.context.RuntimeResourceDefinition;
+import ca.uhn.fhir.context.RuntimeSearchParam;
+import ca.uhn.fhir.util.FhirTerser;
+import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang3.ObjectUtils;
import org.hl7.fhir.instance.model.api.IBaseDatatype;
import org.hl7.fhir.instance.model.api.IBaseExtension;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.beans.factory.annotation.Autowired;
-import com.google.common.annotations.VisibleForTesting;
-
-import ca.uhn.fhir.context.FhirContext;
-import ca.uhn.fhir.context.RuntimeResourceDefinition;
-import ca.uhn.fhir.context.RuntimeSearchParam;
-import ca.uhn.fhir.util.FhirTerser;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.regex.Pattern;
public abstract class BaseSearchParamExtractor implements ISearchParamExtractor {
-
- private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseSearchParamExtractor.class);
- public static final Pattern SPLIT = Pattern.compile("\\||( or )");
+ public static final Pattern SPLIT = Pattern.compile("\\||( or )");
+ private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseSearchParamExtractor.class);
@Autowired
private FhirContext myContext;
-
+ @Autowired
+ private DaoConfig myDaoConfig;
@Autowired
private ISearchParamRegistry mySearchParamRegistry;
-
public BaseSearchParamExtractor() {
super();
}
- public BaseSearchParamExtractor(FhirContext theCtx, ISearchParamRegistry theSearchParamRegistry) {
+ public BaseSearchParamExtractor(DaoConfig theDaoConfig, FhirContext theCtx, ISearchParamRegistry theSearchParamRegistry) {
myContext = theCtx;
mySearchParamRegistry = theSearchParamRegistry;
+ myDaoConfig = theDaoConfig;
}
-
+
@Override
public List extractResourceLinks(IBaseResource theResource, RuntimeSearchParam theNextSpDef) {
List refs = new ArrayList();
@@ -95,20 +93,24 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
}
} catch (Exception e) {
RuntimeResourceDefinition def = myContext.getResourceDefinition(theResource);
- ourLog.warn("Failed to index values from path[{}] in resource type[{}]: {}", new Object[] { nextPathTrimmed, def.getName(), e.toString(), e } );
+ ourLog.warn("Failed to index values from path[{}] in resource type[{}]: {}", new Object[] {nextPathTrimmed, def.getName(), e.toString(), e});
}
}
return values;
}
-
+
protected FhirContext getContext() {
return myContext;
}
+ public DaoConfig getDaoConfig() {
+ return myDaoConfig;
+ }
+
public Collection getSearchParams(IBaseResource theResource) {
RuntimeResourceDefinition def = getContext().getResourceDefinition(theResource);
Collection retVal = mySearchParamRegistry.getActiveSearchParams(def.getName()).values();
- List defaultList= Collections.emptyList();
+ List defaultList = Collections.emptyList();
retVal = ObjectUtils.defaultIfNull(retVal, defaultList);
return retVal;
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java
index d2d3595f6bc..9c6a156cefc 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java
@@ -89,7 +89,7 @@ public class DaoConfig {
/**
* update setter javadoc if default changes
*/
- private boolean myAllowContainsSearches = true;
+ private boolean myAllowContainsSearches = false;
/**
* update setter javadoc if default changes
@@ -754,7 +754,15 @@ public class DaoConfig {
* If enabled, the server will support the use of :contains searches,
* which are helpful but can have adverse effects on performance.
*
- * Default is true
+ * Default is false
(Note that prior to HAPI FHIR
+ * 3.5.0 the default was true
)
+ *
+ *
+ * Note: If you change this value after data already has
+ * already been stored in the database, you must for a reindexing
+ * of all data in the database or resources may not be
+ * searchable.
+ *
*/
public boolean isAllowContainsSearches() {
return myAllowContainsSearches;
@@ -764,12 +772,21 @@ public class DaoConfig {
* If enabled, the server will support the use of :contains searches,
* which are helpful but can have adverse effects on performance.
*
- * Default is true
+ * Default is false
(Note that prior to HAPI FHIR
+ * 3.5.0 the default was true
)
+ *
+ *
+ * Note: If you change this value after data already has
+ * already been stored in the database, you must for a reindexing
+ * of all data in the database or resources may not be
+ * searchable.
+ *
*/
public void setAllowContainsSearches(boolean theAllowContainsSearches) {
this.myAllowContainsSearches = theAllowContainsSearches;
}
+
/**
* If set to true
(default is false
) the server will allow
* resources to have references to external servers. For example if this server is
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoMessageHeaderDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoMessageHeaderDstu2.java
new file mode 100644
index 00000000000..19c1c627d0e
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoMessageHeaderDstu2.java
@@ -0,0 +1,38 @@
+package ca.uhn.fhir.jpa.dao;
+
+/*
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2018 University Health Network
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+import ca.uhn.fhir.model.dstu2.resource.MessageHeader;
+import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.server.exceptions.NotImplementedOperationException;
+import org.hl7.fhir.instance.model.api.IBaseBundle;
+
+public class FhirResourceDaoMessageHeaderDstu2 extends FhirResourceDaoDstu2 implements IFhirResourceDaoMessageHeader {
+
+ @Override
+ public IBaseBundle messageHeaderProcessMessage(RequestDetails theRequestDetails, IBaseBundle theMessage) {
+ return FhirResourceDaoMessageHeaderDstu2.throwProcessMessageNotImplemented();
+ }
+
+ public static IBaseBundle throwProcessMessageNotImplemented() {
+ throw new NotImplementedOperationException("This operation is not yet implemented on this server");
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoValueSetDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoValueSetDstu2.java
index fb97650f2f6..b3a87e35165 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoValueSetDstu2.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoValueSetDstu2.java
@@ -28,6 +28,7 @@ import java.util.*;
import javax.annotation.PostConstruct;
import org.apache.commons.codec.binary.StringUtils;
+import org.hl7.fhir.instance.hapi.validation.CachingValidationSupport;
import org.hl7.fhir.instance.hapi.validation.DefaultProfileValidationSupport;
import org.hl7.fhir.instance.hapi.validation.ValidationSupportChain;
import org.hl7.fhir.instance.model.api.IIdType;
@@ -62,7 +63,7 @@ public class FhirResourceDaoValueSetDstu2 extends FhirResourceDaoDstu2
@Qualifier("myFhirContextDstu2Hl7Org")
private FhirContext myRiCtx;
- private ValidationSupportChain myValidationSupport;
+ private CachingValidationSupport myValidationSupport;
private void addCompose(String theFilter, ValueSet theValueSetToPopulate, ValueSet theSourceValueSet, CodeSystemConcept theConcept) {
if (isBlank(theFilter)) {
@@ -252,7 +253,7 @@ public class FhirResourceDaoValueSetDstu2 extends FhirResourceDaoDstu2
public void postConstruct() {
super.postConstruct();
myDefaultProfileValidationSupport = new DefaultProfileValidationSupport();
- myValidationSupport = new ValidationSupportChain(myDefaultProfileValidationSupport, myJpaValidationSupport);
+ myValidationSupport = new CachingValidationSupport(new ValidationSupportChain(myDefaultProfileValidationSupport, myJpaValidationSupport));
}
@Override
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IDao.java
index 90f8e6e4a15..fd47335fb25 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IDao.java
@@ -1,15 +1,18 @@
package ca.uhn.fhir.jpa.dao;
+import java.util.Collection;
+import java.util.Set;
+
+import org.hl7.fhir.instance.model.api.IBaseResource;
+
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.jpa.entity.BaseHasResource;
+import ca.uhn.fhir.jpa.entity.IBaseResourceEntity;
import ca.uhn.fhir.jpa.entity.ResourceTable;
+import ca.uhn.fhir.jpa.entity.ResourceTag;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
-import org.hl7.fhir.instance.model.api.IBaseResource;
-
-import java.util.Collection;
-import java.util.Set;
/*
* #%L
@@ -56,6 +59,6 @@ public interface IDao {
IBaseResource toResource(BaseHasResource theEntity, boolean theForHistoryOperation);
- R toResource(Class theResourceType, BaseHasResource theEntity, boolean theForHistoryOperation);
+ R toResource(Class theResourceType, IBaseResourceEntity theEntity, Collection theTagList, boolean theForHistoryOperation);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFhirResourceDaoMessageHeader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFhirResourceDaoMessageHeader.java
new file mode 100644
index 00000000000..1bbc494b96c
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFhirResourceDaoMessageHeader.java
@@ -0,0 +1,31 @@
+package ca.uhn.fhir.jpa.dao;
+
+import ca.uhn.fhir.rest.api.server.RequestDetails;
+import org.hl7.fhir.instance.model.api.IBaseBundle;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+
+/*
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2018 University Health Network
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+public interface IFhirResourceDaoMessageHeader extends IFhirResourceDao {
+
+ IBaseBundle messageHeaderProcessMessage(RequestDetails theRequestDetails, IBaseBundle theMessage);
+
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java
index bc33de028e1..2d76bb864d8 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java
@@ -23,6 +23,8 @@ package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.context.*;
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamUriDao;
+import ca.uhn.fhir.jpa.dao.data.IResourceSearchViewDao;
+import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
import ca.uhn.fhir.jpa.entity.*;
import ca.uhn.fhir.jpa.search.JpaRuntimeSearchParam;
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
@@ -53,7 +55,6 @@ import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.apache.commons.lang3.ObjectUtils;
-import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
@@ -61,6 +62,8 @@ import org.apache.commons.lang3.tuple.Pair;
import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults;
import org.hibernate.query.Query;
+import org.hibernate.query.criteria.internal.CriteriaBuilderImpl;
+import org.hibernate.query.criteria.internal.predicate.BooleanStaticAssertionPredicate;
import org.hl7.fhir.dstu3.model.BaseResource;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseResource;
@@ -69,7 +72,6 @@ import org.hl7.fhir.instance.model.api.IIdType;
import javax.persistence.EntityManager;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.*;
-import javax.persistence.criteria.CriteriaBuilder.In;
import java.math.BigDecimal;
import java.math.MathContext;
import java.util.*;
@@ -108,12 +110,17 @@ public class SearchBuilder implements ISearchBuilder {
private IHapiTerminologySvc myTerminologySvc;
private int myFetchSize;
+ protected IResourceTagDao myResourceTagDao;
+ protected IResourceSearchViewDao myResourceSearchViewDao;
+
/**
* Constructor
*/
- public SearchBuilder(FhirContext theFhirContext, EntityManager theEntityManager, IFulltextSearchSvc theFulltextSearchSvc,
- BaseHapiFhirDao> theDao,
- IResourceIndexedSearchParamUriDao theResourceIndexedSearchParamUriDao, IForcedIdDao theForcedIdDao, IHapiTerminologySvc theTerminologySvc, ISearchParamRegistry theSearchParamRegistry) {
+ public SearchBuilder(FhirContext theFhirContext, EntityManager theEntityManager,
+ IFulltextSearchSvc theFulltextSearchSvc, BaseHapiFhirDao> theDao,
+ IResourceIndexedSearchParamUriDao theResourceIndexedSearchParamUriDao, IForcedIdDao theForcedIdDao,
+ IHapiTerminologySvc theTerminologySvc, ISearchParamRegistry theSearchParamRegistry,
+ IResourceTagDao theResourceTagDao, IResourceSearchViewDao theResourceViewDao) {
myContext = theFhirContext;
myEntityManager = theEntityManager;
myFulltextSearchSvc = theFulltextSearchSvc;
@@ -122,6 +129,8 @@ public class SearchBuilder implements ISearchBuilder {
myForcedIdDao = theForcedIdDao;
myTerminologySvc = theTerminologySvc;
mySearchParamRegistry = theSearchParamRegistry;
+ myResourceTagDao = theResourceTagDao;
+ myResourceSearchViewDao = theResourceViewDao;
}
private void addPredicateComposite(String theResourceName, RuntimeSearchParam theParamDef, List extends IQueryParameterType> theNextAnd) {
@@ -257,7 +266,7 @@ public class SearchBuilder implements ISearchBuilder {
return;
}
- List codePredicates = new ArrayList();
+ List codePredicates = new ArrayList<>();
for (IQueryParameterType nextOr : theList) {
IQueryParameterType params = nextOr;
@@ -273,8 +282,9 @@ public class SearchBuilder implements ISearchBuilder {
ParamPrefixEnum prefix = ObjectUtils.defaultIfNull(param.getPrefix(), ParamPrefixEnum.EQUAL);
String invalidMessageName = "invalidNumberPrefix";
- Predicate num = createPredicateNumeric(theResourceName, theParamName, join, myBuilder, params, prefix, value, fromObj, invalidMessageName);
- codePredicates.add(num);
+ Predicate predicateNumeric = createPredicateNumeric(theResourceName, theParamName, join, myBuilder, params, prefix, value, fromObj, invalidMessageName);
+ Predicate predicateOuter = combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, join, predicateNumeric );
+ codePredicates.add(predicateOuter);
} else {
throw new IllegalArgumentException("Invalid token type: " + params.getClass());
@@ -287,11 +297,10 @@ public class SearchBuilder implements ISearchBuilder {
private void addPredicateParamMissing(String theResourceName, String theParamName, boolean theMissing) {
Join paramPresentJoin = myResourceTableRoot.join("mySearchParamPresents", JoinType.LEFT);
- Join paramJoin = paramPresentJoin.join("mySearchParam", JoinType.LEFT);
- myPredicates.add(myBuilder.equal(paramJoin.get("myResourceName"), theResourceName));
- myPredicates.add(myBuilder.equal(paramJoin.get("myParamName"), theParamName));
- myPredicates.add(myBuilder.equal(paramPresentJoin.get("myPresent"), !theMissing));
+ Expression hashPresence = paramPresentJoin.get("myHashPresence").as(Long.class);
+ Long hash = SearchParamPresent.calculateHashPresence(theResourceName, theParamName, !theMissing);
+ myPredicates.add(myBuilder.equal(hashPresence, hash));
}
private void addPredicateParamMissing(String theResourceName, String theParamName, boolean theMissing, Join theJoin) {
@@ -309,7 +318,7 @@ public class SearchBuilder implements ISearchBuilder {
return;
}
- List codePredicates = new ArrayList();
+ List codePredicates = new ArrayList<>();
for (IQueryParameterType nextOr : theList) {
Predicate singleCode = createPredicateQuantity(nextOr, theResourceName, theParamName, myBuilder, join);
@@ -332,7 +341,7 @@ public class SearchBuilder implements ISearchBuilder {
Join join = createOrReuseJoin(JoinEnum.REFERENCE, theParamName);
- List codePredicates = new ArrayList();
+ List codePredicates = new ArrayList<>();
for (IQueryParameterType nextOr : theList) {
@@ -429,7 +438,7 @@ public class SearchBuilder implements ISearchBuilder {
} else {
RuntimeResourceDefinition resDef = myContext.getResourceDefinition(ref.getResourceType());
- resourceTypes = new ArrayList>(1);
+ resourceTypes = new ArrayList<>(1);
resourceTypes.add(resDef.getImplementingClass());
resourceId = ref.getIdPart();
}
@@ -474,7 +483,7 @@ public class SearchBuilder implements ISearchBuilder {
IQueryParameterType chainValue;
if (remainingChain != null) {
if (param == null || param.getParamType() != RestSearchParameterTypeEnum.REFERENCE) {
- ourLog.debug("Type {} parameter {} is not a reference, can not chain {}", new Object[] {nextType.getSimpleName(), chain, remainingChain});
+ ourLog.debug("Type {} parameter {} is not a reference, can not chain {}", nextType.getSimpleName(), chain, remainingChain);
continue;
}
@@ -495,7 +504,7 @@ public class SearchBuilder implements ISearchBuilder {
Root subQfrom = subQ.from(ResourceTable.class);
subQ.select(subQfrom.get("myId").as(Long.class));
- List> andOrParams = new ArrayList>();
+ List> andOrParams = new ArrayList<>();
andOrParams.add(Collections.singletonList(chainValue));
/*
@@ -546,7 +555,7 @@ public class SearchBuilder implements ISearchBuilder {
private void addPredicateResourceId(List> theValues) {
for (List extends IQueryParameterType> nextValue : theValues) {
- Set orPids = new HashSet();
+ Set orPids = new HashSet<>();
for (IQueryParameterType next : nextValue) {
String value = next.getValueAsQueryToken(myContext);
if (value != null && value.startsWith("|")) {
@@ -594,10 +603,9 @@ public class SearchBuilder implements ISearchBuilder {
return;
}
- List codePredicates = new ArrayList();
+ List codePredicates = new ArrayList<>();
for (IQueryParameterType nextOr : theList) {
- IQueryParameterType theParameter = nextOr;
- Predicate singleCode = createPredicateString(theParameter, theResourceName, theParamName, myBuilder, join);
+ Predicate singleCode = createPredicateString(nextOr, theResourceName, theParamName, myBuilder, join);
codePredicates.add(singleCode);
}
@@ -742,7 +750,7 @@ public class SearchBuilder implements ISearchBuilder {
return;
}
- List codePredicates = new ArrayList();
+ List codePredicates = new ArrayList<>();
for (IQueryParameterType nextOr : theList) {
if (nextOr instanceof TokenParam) {
@@ -785,7 +793,6 @@ public class SearchBuilder implements ISearchBuilder {
continue;
}
- Predicate predicate;
if (param.getQualifier() == UriParamQualifierEnum.ABOVE) {
/*
@@ -814,14 +821,24 @@ public class SearchBuilder implements ISearchBuilder {
continue;
}
- predicate = join.get("myUri").as(String.class).in(toFind);
+ Predicate uriPredicate = join.get("myUri").as(String.class).in(toFind);
+ Predicate hashAndUriPredicate = combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, join, uriPredicate);
+ codePredicates.add(hashAndUriPredicate);
} else if (param.getQualifier() == UriParamQualifierEnum.BELOW) {
- predicate = myBuilder.like(join.get("myUri").as(String.class), createLeftMatchLikeExpression(value));
+
+ Predicate uriPredicate = myBuilder.like(join.get("myUri").as(String.class), createLeftMatchLikeExpression(value));
+ Predicate hashAndUriPredicate = combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, join, uriPredicate);
+ codePredicates.add(hashAndUriPredicate);
+
} else {
- predicate = myBuilder.equal(join.get("myUri").as(String.class), value);
+
+ long hashUri = ResourceIndexedSearchParamUri.calculateHashUri(theResourceName, theParamName, value);
+ Predicate hashPredicate = myBuilder.equal(join.get("myHashUri"), hashUri);
+ codePredicates.add(hashPredicate);
+
}
- codePredicates.add(predicate);
+
} else {
throw new IllegalArgumentException("Invalid URI type: " + nextOr.getClass());
}
@@ -839,16 +856,13 @@ public class SearchBuilder implements ISearchBuilder {
}
Predicate orPredicate = myBuilder.or(toArray(codePredicates));
-
- Predicate outerPredicate = combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, join, orPredicate);
- myPredicates.add(outerPredicate);
+ myPredicates.add(orPredicate);
}
private Predicate combineParamIndexPredicateWithParamNamePredicate(String theResourceName, String theParamName, From, ? extends BaseResourceIndexedSearchParam> theFrom, Predicate thePredicate) {
- Predicate resourceTypePredicate = myBuilder.equal(theFrom.get("myResourceType"), theResourceName);
- Predicate paramNamePredicate = myBuilder.equal(theFrom.get("myParamName"), theParamName);
- Predicate outerPredicate = myBuilder.and(resourceTypePredicate, paramNamePredicate, thePredicate);
- return outerPredicate;
+ long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity(theResourceName, theParamName);
+ Predicate hashIdentityPredicate = myBuilder.equal(theFrom.get("myHashIdentity"), hashIdentity);
+ return myBuilder.and(hashIdentityPredicate, thePredicate);
}
private Predicate createCompositeParamPart(String theResourceName, Root theRoot, RuntimeSearchParam theParam, IQueryParameterType leftValue) {
@@ -1028,7 +1042,7 @@ public class SearchBuilder implements ISearchBuilder {
if (theParamName == null) {
return num;
}
- return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, num);
+ return num;
}
private Predicate createPredicateQuantity(IQueryParameterType theParam, String theResourceName, String theParamName, CriteriaBuilder theBuilder,
@@ -1054,39 +1068,31 @@ public class SearchBuilder implements ISearchBuilder {
throw new IllegalArgumentException("Invalid quantity type: " + theParam.getClass());
}
- Predicate system = null;
- if (!isBlank(systemValue)) {
- system = theBuilder.equal(theFrom.get("mySystem"), systemValue);
- }
-
- Predicate code = null;
- if (!isBlank(unitsValue)) {
- code = theBuilder.equal(theFrom.get("myUnits"), unitsValue);
+ Predicate hashPredicate;
+ if (!isBlank(systemValue) && !isBlank(unitsValue)) {
+ long hash = ResourceIndexedSearchParamQuantity.calculateHashSystemAndUnits(theResourceName, theParamName, systemValue, unitsValue);
+ hashPredicate = myBuilder.equal(theFrom.get("myHashIdentitySystemAndUnits"), hash);
+ } else if (!isBlank(unitsValue)) {
+ long hash = ResourceIndexedSearchParamQuantity.calculateHashUnits(theResourceName, theParamName, unitsValue);
+ hashPredicate = myBuilder.equal(theFrom.get("myHashIdentityAndUnits"), hash);
+ } else {
+ long hash = BaseResourceIndexedSearchParam.calculateHashIdentity(theResourceName, theParamName);
+ hashPredicate = myBuilder.equal(theFrom.get("myHashIdentity"), hash);
}
cmpValue = ObjectUtils.defaultIfNull(cmpValue, ParamPrefixEnum.EQUAL);
final Expression path = theFrom.get("myValue");
String invalidMessageName = "invalidQuantityPrefix";
- Predicate num = createPredicateNumeric(theResourceName, null, theFrom, theBuilder, theParam, cmpValue, valueValue, path, invalidMessageName);
+ Predicate numericPredicate = createPredicateNumeric(theResourceName, null, theFrom, theBuilder, theParam, cmpValue, valueValue, path, invalidMessageName);
- Predicate singleCode;
- if (system == null && code == null) {
- singleCode = num;
- } else if (system == null) {
- singleCode = theBuilder.and(code, num);
- } else if (code == null) {
- singleCode = theBuilder.and(system, num);
- } else {
- singleCode = theBuilder.and(system, code, num);
- }
-
- return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, singleCode);
+ return theBuilder.and(hashPredicate, numericPredicate);
}
private Predicate createPredicateString(IQueryParameterType theParameter, String theResourceName, String theParamName, CriteriaBuilder theBuilder,
From, ResourceIndexedSearchParamString> theFrom) {
String rawSearchTerm;
+ DaoConfig daoConfig = myCallingDao.getConfig();
if (theParameter instanceof TokenParam) {
TokenParam id = (TokenParam) theParameter;
if (!id.isText()) {
@@ -1097,7 +1103,7 @@ public class SearchBuilder implements ISearchBuilder {
StringParam id = (StringParam) theParameter;
rawSearchTerm = id.getValue();
if (id.isContains()) {
- if (!myCallingDao.getConfig().isAllowContainsSearches()) {
+ if (!daoConfig.isAllowContainsSearches()) {
throw new MethodNotAllowedException(":contains modifier is disabled on this server");
}
}
@@ -1113,22 +1119,34 @@ public class SearchBuilder implements ISearchBuilder {
+ ResourceIndexedSearchParamString.MAX_LENGTH + "): " + rawSearchTerm);
}
- String likeExpression = BaseHapiFhirDao.normalizeString(rawSearchTerm);
- if (theParameter instanceof StringParam &&
- ((StringParam) theParameter).isContains() &&
- myCallingDao.getConfig().isAllowContainsSearches()) {
- likeExpression = createLeftAndRightMatchLikeExpression(likeExpression);
+ boolean exactMatch = theParameter instanceof StringParam && ((StringParam) theParameter).isExact();
+ if (exactMatch) {
+
+ // Exact match
+
+ Long hash = ResourceIndexedSearchParamString.calculateHashExact(theResourceName, theParamName, rawSearchTerm);
+ return theBuilder.equal(theFrom.get("myHashExact").as(Long.class), hash);
+
} else {
- likeExpression = createLeftMatchLikeExpression(likeExpression);
- }
- Predicate singleCode = theBuilder.like(theFrom.get("myValueNormalized").as(String.class), likeExpression);
- if (theParameter instanceof StringParam && ((StringParam) theParameter).isExact()) {
- Predicate exactCode = theBuilder.equal(theFrom.get("myValueExact"), rawSearchTerm);
- singleCode = theBuilder.and(singleCode, exactCode);
- }
+ // Normalized Match
- return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, singleCode);
+ String normalizedString = BaseHapiFhirDao.normalizeString(rawSearchTerm);
+ String likeExpression;
+ if (theParameter instanceof StringParam &&
+ ((StringParam) theParameter).isContains() &&
+ daoConfig.isAllowContainsSearches()) {
+ likeExpression = createLeftAndRightMatchLikeExpression(normalizedString);
+ } else {
+ likeExpression = createLeftMatchLikeExpression(normalizedString);
+ }
+
+ Long hash = ResourceIndexedSearchParamString.calculateHashNormalized(daoConfig, theResourceName, theParamName, normalizedString);
+ Predicate hashCode = theBuilder.equal(theFrom.get("myHashNormalizedPrefix").as(Long.class), hash);
+ Predicate singleCode = theBuilder.like(theFrom.get("myValueNormalized").as(String.class), likeExpression);
+ return theBuilder.and(hashCode, singleCode);
+
+ }
}
private List createPredicateTagList(Path theDefJoin, CriteriaBuilder theBuilder, TagTypeEnum theTagType, List> theTokens) {
@@ -1183,7 +1201,7 @@ public class SearchBuilder implements ISearchBuilder {
* Process token modifiers (:in, :below, :above)
*/
- List codes = null;
+ List codes;
if (modifier == TokenParamModifier.IN) {
codes = myTerminologySvc.expandValueSet(code);
} else if (modifier == TokenParamModifier.ABOVE) {
@@ -1192,81 +1210,53 @@ public class SearchBuilder implements ISearchBuilder {
} else if (modifier == TokenParamModifier.BELOW) {
system = determineSystemIfMissing(theParamName, code, system);
codes = myTerminologySvc.findCodesBelow(system, code);
- }
-
- ArrayList singleCodePredicates = new ArrayList<>();
- if (codes != null) {
-
- if (codes.isEmpty()) {
-
- // This will never match anything
- Predicate codePredicate = theBuilder.isNull(theFrom.get("myMissing"));
- singleCodePredicates.add(codePredicate);
-
- } else {
- List orPredicates = new ArrayList();
- Map> map = new HashMap>();
- for (VersionIndependentConcept nextCode : codes) {
- List systemCodes = map.get(nextCode.getSystem());
- if (null == systemCodes) {
- systemCodes = new ArrayList<>();
- map.put(nextCode.getSystem(), systemCodes);
- }
- systemCodes.add(nextCode);
- }
- // Use "in" in case of large numbers of codes due to param modifiers
- final Path systemExpression = theFrom.get("mySystem");
- final Path valueExpression = theFrom.get("myValue");
- for (Map.Entry> entry : map.entrySet()) {
- Predicate systemPredicate = theBuilder.equal(systemExpression, entry.getKey());
- In codePredicate = theBuilder.in(valueExpression);
- for (VersionIndependentConcept nextCode : entry.getValue()) {
- codePredicate.value(nextCode.getCode());
- }
- orPredicates.add(theBuilder.and(systemPredicate, codePredicate));
- }
-
- singleCodePredicates.add(theBuilder.or(orPredicates.toArray(new Predicate[orPredicates.size()])));
- }
-
} else {
+ codes = Collections.singletonList(new VersionIndependentConcept(system, code));
+ }
- /*
- * Ok, this is a normal query
- */
+ if (codes.isEmpty()) {
+ // This will never match anything
+ return new BooleanStaticAssertionPredicate((CriteriaBuilderImpl) theBuilder, false);
+ }
- if (StringUtils.isNotBlank(system)) {
- if (modifier != null && modifier == TokenParamModifier.NOT) {
- singleCodePredicates.add(theBuilder.notEqual(theFrom.get("mySystem"), system));
- } else {
- singleCodePredicates.add(theBuilder.equal(theFrom.get("mySystem"), system));
- }
- } else if (system == null) {
- // don't check the system
+ /*
+ * Note: A null system value means "match any system", but
+ * an empty-string system value means "match values that
+ * explicitly have no system".
+ */
+ boolean haveSystem = codes.get(0).getSystem() != null;
+ boolean haveCode = isNotBlank(codes.get(0).getCode());
+ Expression hashField;
+ if (!haveSystem && !haveCode) {
+ // If we have neither, this isn't actually an expression so
+ // just return 1=1
+ return new BooleanStaticAssertionPredicate((CriteriaBuilderImpl) theBuilder, true);
+ } else if (haveSystem && haveCode) {
+ hashField = theFrom.get("myHashSystemAndValue").as(Long.class);
+ } else if (haveSystem) {
+ hashField = theFrom.get("myHashSystem").as(Long.class);
+ } else {
+ hashField = theFrom.get("myHashValue").as(Long.class);
+ }
+
+ List values = new ArrayList<>(codes.size());
+ for (VersionIndependentConcept next : codes) {
+ if (haveSystem && haveCode) {
+ values.add(ResourceIndexedSearchParamToken.calculateHashSystemAndValue(theResourceName, theParamName, next.getSystem(), next.getCode()));
+ } else if (haveSystem) {
+ values.add(ResourceIndexedSearchParamToken.calculateHashSystem(theResourceName, theParamName, next.getSystem()));
} else {
- // If the system is "", we only match on null systems
- singleCodePredicates.add(theBuilder.isNull(theFrom.get("mySystem")));
- }
-
- if (StringUtils.isNotBlank(code)) {
- if (modifier != null && modifier == TokenParamModifier.NOT) {
- singleCodePredicates.add(theBuilder.notEqual(theFrom.get("myValue"), code));
- } else {
- singleCodePredicates.add(theBuilder.equal(theFrom.get("myValue"), code));
- }
- } else {
- /*
- * As of HAPI FHIR 1.5, if the client searched for a token with a system but no specified value this means to
- * match all tokens with the given value.
- *
- * I'm not sure I agree with this, but hey.. FHIR-I voted and this was the result :)
- */
- // singleCodePredicates.add(theBuilder.isNull(theFrom.get("myValue")));
+ values.add(ResourceIndexedSearchParamToken.calculateHashValue(theResourceName, theParamName, next.getCode()));
}
}
- Predicate singleCode = theBuilder.and(toArray(singleCodePredicates));
- return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, singleCode);
+ Predicate predicate = hashField.in(values);
+ if (modifier == TokenParamModifier.NOT) {
+ Predicate identityPredicate = theBuilder.equal(theFrom.get("myHashIdentity").as(Long.class), BaseResourceIndexedSearchParam.calculateHashIdentity(theResourceName, theParamName));
+ Predicate disjunctionPredicate = theBuilder.not(predicate);
+ predicate = theBuilder.and(identityPredicate, disjunctionPredicate);
+ }
+ return predicate;
}
@Override
@@ -1371,8 +1361,8 @@ public class SearchBuilder implements ISearchBuilder {
if (myParams.getEverythingMode() != null) {
Join join = myResourceTableRoot.join("myResourceLinks", JoinType.LEFT);
- if (myParams.get(BaseResource.SP_RES_ID) != null) {
- StringParam idParm = (StringParam) myParams.get(BaseResource.SP_RES_ID).get(0).get(0);
+ if (myParams.get(IAnyResource.SP_RES_ID) != null) {
+ StringParam idParm = (StringParam) myParams.get(IAnyResource.SP_RES_ID).get(0).get(0);
Long pid = BaseHapiFhirDao.translateForcedIdToPid(myResourceName, idParm.getValue(), myForcedIdDao);
if (myAlsoIncludePids == null) {
myAlsoIncludePids = new ArrayList<>(1);
@@ -1462,7 +1452,7 @@ public class SearchBuilder implements ISearchBuilder {
return false;
}
- if (BaseResource.SP_RES_ID.equals(theSort.getParamName())) {
+ if (IAnyResource.SP_RES_ID.equals(theSort.getParamName())) {
From, ?> forcedIdJoin = theFrom.join("myForcedId", JoinType.LEFT);
if (theSort.getOrder() == null || theSort.getOrder() == SortOrderEnum.ASC) {
theOrders.add(theBuilder.asc(forcedIdJoin.get("myForcedId")));
@@ -1602,35 +1592,39 @@ public class SearchBuilder implements ISearchBuilder {
private void doLoadPids(List theResourceListToPopulate, Set theRevIncludedPids, boolean theForHistoryOperation, EntityManager entityManager, FhirContext context, IDao theDao,
Map position, Collection pids) {
- CriteriaBuilder builder = entityManager.getCriteriaBuilder();
- CriteriaQuery cq = builder.createQuery(ResourceTable.class);
- Root from = cq.from(ResourceTable.class);
- cq.where(from.get("myId").in(pids));
- TypedQuery q = entityManager.createQuery(cq);
- List resultList = q.getResultList();
-
- for (ResourceTable next : resultList) {
+ // -- get the resource from the searchView
+ Collection resourceSearchViewList = myResourceSearchViewDao.findByResourceIds(pids);
+
+ //-- preload all tags with tag definition if any
+ Map> tagMap = getResourceTagMap(resourceSearchViewList);
+
+ Long resourceId = null;
+ for (ResourceSearchView next : resourceSearchViewList) {
+
Class extends IBaseResource> resourceType = context.getResourceDefinition(next.getResourceType()).getImplementingClass();
- IBaseResource resource = theDao.toResource(resourceType, next, theForHistoryOperation);
+
+ resourceId = next.getId();
+
+ IBaseResource resource = theDao.toResource(resourceType, next, tagMap.get(resourceId), theForHistoryOperation);
if (resource == null) {
ourLog.warn("Unable to find resource {}/{}/_history/{} in database", next.getResourceType(), next.getIdDt().getIdPart(), next.getVersion());
continue;
}
- Integer index = position.get(next.getId());
+ Integer index = position.get(resourceId);
if (index == null) {
- ourLog.warn("Got back unexpected resource PID {}", next.getId());
+ ourLog.warn("Got back unexpected resource PID {}", resourceId);
continue;
}
if (resource instanceof IResource) {
- if (theRevIncludedPids.contains(next.getId())) {
+ if (theRevIncludedPids.contains(resourceId)) {
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IResource) resource, BundleEntrySearchModeEnum.INCLUDE);
} else {
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IResource) resource, BundleEntrySearchModeEnum.MATCH);
}
} else {
- if (theRevIncludedPids.contains(next.getId())) {
+ if (theRevIncludedPids.contains(resourceId)) {
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IAnyResource) resource, BundleEntrySearchModeEnum.INCLUDE.getCode());
} else {
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IAnyResource) resource, BundleEntrySearchModeEnum.MATCH.getCode());
@@ -1641,6 +1635,44 @@ public class SearchBuilder implements ISearchBuilder {
}
}
+ private Map> getResourceTagMap(Collection theResourceSearchViewList) {
+
+ List idList = new ArrayList(theResourceSearchViewList.size());
+
+ //-- find all resource has tags
+ for (ResourceSearchView resource: theResourceSearchViewList) {
+ if (resource.isHasTags())
+ idList.add(resource.getId());
+ }
+
+ Map> tagMap = new HashMap<>();
+
+ //-- no tags
+ if (idList.size() == 0)
+ return tagMap;
+
+ //-- get all tags for the idList
+ Collection tagList = myResourceTagDao.findByResourceIds(idList);
+
+ //-- build the map, key = resourceId, value = list of ResourceTag
+ Long resourceId;
+ Collection tagCol;
+ for (ResourceTag tag : tagList) {
+
+ resourceId = tag.getResourceId();
+ tagCol = tagMap.get(resourceId);
+ if (tagCol == null) {
+ tagCol = new ArrayList<>();
+ tagCol.add(tag);
+ tagMap.put(resourceId, tagCol);
+ } else {
+ tagCol.add(tag);
+ }
+ }
+
+ return tagMap;
+ }
+
@Override
public void loadResourcesByPid(Collection theIncludePids, List theResourceListToPopulate, Set theRevIncludedPids, boolean theForHistoryOperation,
EntityManager entityManager, FhirContext context, IDao theDao) {
@@ -1677,18 +1709,16 @@ public class SearchBuilder implements ISearchBuilder {
}
/**
- * THIS SHOULD RETURN HASHSET and not jsut Set because we add to it later (so it can't be Collections.emptySet())
- *
- * @param theLastUpdated
+ * THIS SHOULD RETURN HASHSET and not just Set because we add to it later (so it can't be Collections.emptySet())
*/
@Override
public HashSet loadReverseIncludes(IDao theCallingDao, FhirContext theContext, EntityManager theEntityManager, Collection theMatches, Set theRevIncludes,
boolean theReverseMode, DateRangeParam theLastUpdated) {
if (theMatches.size() == 0) {
- return new HashSet();
+ return new HashSet<>();
}
if (theRevIncludes == null || theRevIncludes.isEmpty()) {
- return new HashSet();
+ return new HashSet<>();
}
String searchFieldName = theReverseMode ? "myTargetResourcePid" : "mySourceResourcePid";
@@ -1729,7 +1759,7 @@ public class SearchBuilder implements ISearchBuilder {
} else {
List paths;
- RuntimeSearchParam param = null;
+ RuntimeSearchParam param;
String resType = nextInclude.getParamType();
if (isBlank(resType)) {
continue;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchParamExtractorDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchParamExtractorDstu2.java
index 5fe006ecee1..0352a99f074 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchParamExtractorDstu2.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchParamExtractorDstu2.java
@@ -59,7 +59,7 @@ public class SearchParamExtractorDstu2 extends BaseSearchParamExtractor implemen
searchTerm = searchTerm.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
}
- ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(resourceName, BaseHapiFhirDao.normalizeString(searchTerm), searchTerm);
+ ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(getDaoConfig(), resourceName, BaseHapiFhirDao.normalizeString(searchTerm), searchTerm);
nextEntity.setResource(theEntity);
retVal.add(nextEntity);
}
@@ -68,7 +68,7 @@ public class SearchParamExtractorDstu2 extends BaseSearchParamExtractor implemen
if (value.length() > ResourceIndexedSearchParamString.MAX_LENGTH) {
value = value.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
}
- ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(nextSpDef.getName(), BaseHapiFhirDao.normalizeString(value), value);
+ ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(getDaoConfig(), nextSpDef.getName(), BaseHapiFhirDao.normalizeString(value), value);
nextEntity.setResource(theEntity);
retVal.add(nextEntity);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IForcedIdDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IForcedIdDao.java
index 2d24d6c7b00..9e2dc7c63e0 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IForcedIdDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IForcedIdDao.java
@@ -1,5 +1,6 @@
package ca.uhn.fhir.jpa.dao.data;
+import java.util.Collection;
import java.util.List;
/*
@@ -38,5 +39,7 @@ public interface IForcedIdDao extends JpaRepository {
@Query("SELECT f FROM ForcedId f WHERE f.myResourcePid = :resource_pid")
public ForcedId findByResourcePid(@Param("resource_pid") Long theResourcePid);
-
+
+ @Query("SELECT f FROM ForcedId f WHERE f.myResourcePid in (:pids)")
+ Collection findByResourcePids(@Param("pids") Collection pids);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTableDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTableDao.java
index d6250ce64f6..03fa39d7957 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTableDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTableDao.java
@@ -1,6 +1,10 @@
package ca.uhn.fhir.jpa.dao.data;
-import ca.uhn.fhir.jpa.entity.ResourceHistoryTable;
+import java.util.Collection;
+import java.util.Date;
+
+import javax.persistence.TemporalType;
+
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.data.jpa.repository.JpaRepository;
@@ -8,8 +12,7 @@ import org.springframework.data.jpa.repository.Query;
import org.springframework.data.jpa.repository.Temporal;
import org.springframework.data.repository.query.Param;
-import javax.persistence.TemporalType;
-import java.util.Date;
+import ca.uhn.fhir.jpa.entity.ResourceHistoryTable;
/*
* #%L
@@ -82,4 +85,10 @@ public interface IResourceHistoryTableDao extends JpaRepository findIdsOfPreviousVersionsOfResources(Pageable thePage);
+
+ @Query("" +
+ "SELECT h FROM ResourceHistoryTable h " +
+ "INNER JOIN ResourceTable r ON (r.myId = h.myResourceId and r.myVersion = h.myResourceVersion) " +
+ "WHERE r.myId in (:pids)")
+ Collection findByResourceIds(@Param("pids") Collection pids);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamStringDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamStringDao.java
index 6bd5724bc8a..7b1ae15b3e7 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamStringDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamStringDao.java
@@ -23,7 +23,12 @@ package ca.uhn.fhir.jpa.dao.data;
import org.springframework.data.jpa.repository.JpaRepository;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamString;
+import org.springframework.data.jpa.repository.Query;
+import org.springframework.data.repository.query.Param;
public interface IResourceIndexedSearchParamStringDao extends JpaRepository {
- // nothing yet
+
+ @Query("select count(*) from ResourceIndexedSearchParamString t WHERE t.myResourcePid = :resid")
+ int countForResourceId(@Param("resid") Long theResourcePid);
+
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamTokenDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamTokenDao.java
index 09679d390b0..9e30fd3b026 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamTokenDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamTokenDao.java
@@ -20,10 +20,14 @@ package ca.uhn.fhir.jpa.dao.data;
* #L%
*/
-import org.springframework.data.jpa.repository.JpaRepository;
-
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamToken;
+import org.springframework.data.jpa.repository.JpaRepository;
+import org.springframework.data.jpa.repository.Query;
+import org.springframework.data.repository.query.Param;
public interface IResourceIndexedSearchParamTokenDao extends JpaRepository {
- // nothing yet
+
+ @Query("select count(*) from ResourceIndexedSearchParamToken t WHERE t.myResourcePid = :resid")
+ int countForResourceId(@Param("resid") Long theResourcePid);
+
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchParamDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceSearchViewDao.java
similarity index 68%
rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchParamDao.java
rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceSearchViewDao.java
index e2184820d55..cf13d923fc6 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchParamDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceSearchViewDao.java
@@ -1,5 +1,7 @@
package ca.uhn.fhir.jpa.dao.data;
+import java.util.Collection;
+
/*
* #%L
* HAPI FHIR JPA Server
@@ -10,7 +12,7 @@ package ca.uhn.fhir.jpa.dao.data;
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
@@ -24,11 +26,10 @@ import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
-import ca.uhn.fhir.jpa.entity.SearchParam;
+import ca.uhn.fhir.jpa.entity.ResourceSearchView;
-public interface ISearchParamDao extends JpaRepository {
-
- @Query("SELECT s FROM SearchParam s WHERE s.myResourceName = :resname AND s.myParamName = :parmname")
- public SearchParam findForResource(@Param("resname") String theResourceType, @Param("parmname") String theParamName);
+public interface IResourceSearchViewDao extends JpaRepository {
+ @Query("SELECT v FROM ResourceSearchView v WHERE v.myResourceId in (:pids)")
+ Collection findByResourceIds(@Param("pids") Collection pids);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceTagDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceTagDao.java
index 3acba7469a6..1ba407c1f4f 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceTagDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceTagDao.java
@@ -1,5 +1,7 @@
package ca.uhn.fhir.jpa.dao.data;
+import java.util.Collection;
+
/*
* #%L
* HAPI FHIR JPA Server
@@ -21,9 +23,15 @@ package ca.uhn.fhir.jpa.dao.data;
*/
import org.springframework.data.jpa.repository.JpaRepository;
+import org.springframework.data.jpa.repository.Query;
+import org.springframework.data.repository.query.Param;
import ca.uhn.fhir.jpa.entity.ResourceTag;
public interface IResourceTagDao extends JpaRepository {
- // nothing
+ @Query("" +
+ "SELECT t FROM ResourceTag t " +
+ "INNER JOIN TagDefinition td ON (td.myId = t.myTagId) " +
+ "WHERE t.myResourceId in (:pids)")
+ Collection findByResourceIds(@Param("pids") Collection pids);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchDao.java
index f89d260f512..5414d0fcee4 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchDao.java
@@ -36,19 +36,19 @@ import ca.uhn.fhir.jpa.entity.Search;
public interface ISearchDao extends JpaRepository {
@Query("SELECT s FROM Search s WHERE s.myUuid = :uuid")
- public Search findByUuid(@Param("uuid") String theUuid);
+ Search findByUuid(@Param("uuid") String theUuid);
@Query("SELECT s.myId FROM Search s WHERE s.mySearchLastReturned < :cutoff")
- public Slice findWhereLastReturnedBefore(@Param("cutoff") Date theCutoff, Pageable thePage);
+ Slice findWhereLastReturnedBefore(@Param("cutoff") Date theCutoff, Pageable thePage);
// @Query("SELECT s FROM Search s WHERE s.myCreated < :cutoff")
// public Collection findWhereCreatedBefore(@Param("cutoff") Date theCutoff);
@Query("SELECT s FROM Search s WHERE s.myResourceType = :type AND mySearchQueryStringHash = :hash AND s.myCreated > :cutoff")
- public Collection find(@Param("type") String theResourceType, @Param("hash") int theHashCode, @Param("cutoff") Date theCreatedCutoff);
+ Collection