Merge branch 'master' into master

This commit is contained in:
anamariaradu10 2018-07-27 16:59:52 +03:00 committed by GitHub
commit 3a5556006b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
192 changed files with 13429 additions and 9729 deletions

32
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@ -0,0 +1,32 @@
---
name: Bug report
about: Create a report to help us improve
---
NOTE: Before filing a ticket, please see the following URL:
https://github.com/jamesagnew/hapi-fhir/wiki/Getting-Help
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Environment (please complete the following information):**
- HAPI FHIR Version
- OS: [e.g. iOS]
- Browser [e.g. chrome, safari]
**Additional context**
Add any other context about the problem here.

View File

@ -17,3 +17,5 @@ A demonstration of this project is available here:
http://hapi.fhir.org/
This project is Open Source, licensed under the Apache Software License 2.0.
Please see [this wiki page](https://github.com/jamesagnew/hapi-fhir/wiki/Getting-Help) for information on where to get help with HAPI FHIR. Please see [Smile CDR](https://smilecdr.com) for information on commercial support.

View File

@ -4,6 +4,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.util.List;
import org.hl7.fhir.dstu3.model.IdType;
import org.hl7.fhir.instance.model.api.IBaseResource;
import ca.uhn.fhir.model.dstu2.resource.Patient;
@ -140,5 +141,21 @@ public class AuthorizationInterceptors {
}
};
//END SNIPPET: authorizeTenantAction
//START SNIPPET: patchAll
new AuthorizationInterceptor(PolicyEnum.DENY) {
@Override
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
return new RuleBuilder()
// Authorize patch requests
.allow().patch().allRequests().andThen()
// Authorize actual writes that patch may perform
.allow().write().allResources().inCompartment("Patient", new IdType("Patient/123")).andThen()
.build();
}
};
//END SNIPPET: patchAll
}
}

View File

@ -1,7 +1,34 @@
package ca.uhn.fhir.context;
import ca.uhn.fhir.context.api.AddProfileTagEnum;
import ca.uhn.fhir.context.support.IContextValidationSupport;
import ca.uhn.fhir.fluentpath.IFluentPath;
import ca.uhn.fhir.i18n.HapiLocalizer;
import ca.uhn.fhir.model.api.IElement;
import ca.uhn.fhir.model.api.IFhirVersion;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.view.ViewGenerator;
import ca.uhn.fhir.narrative.INarrativeGenerator;
import ca.uhn.fhir.parser.*;
import ca.uhn.fhir.rest.api.IVersionSpecificBundleFactory;
import ca.uhn.fhir.rest.client.api.IBasicClient;
import ca.uhn.fhir.rest.client.api.IGenericClient;
import ca.uhn.fhir.rest.client.api.IRestfulClient;
import ca.uhn.fhir.rest.client.api.IRestfulClientFactory;
import ca.uhn.fhir.util.FhirTerser;
import ca.uhn.fhir.util.ReflectionUtil;
import ca.uhn.fhir.util.VersionUtil;
import ca.uhn.fhir.validation.FhirValidator;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseResource;
import java.io.IOException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.*;
import java.util.Map.Entry;
/*
* #%L
@ -23,30 +50,10 @@ import java.lang.reflect.Method;
* #L%
*/
import java.lang.reflect.Modifier;
import java.util.*;
import java.util.Map.Entry;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.*;
import ca.uhn.fhir.context.api.AddProfileTagEnum;
import ca.uhn.fhir.context.support.IContextValidationSupport;
import ca.uhn.fhir.fluentpath.IFluentPath;
import ca.uhn.fhir.i18n.HapiLocalizer;
import ca.uhn.fhir.model.api.*;
import ca.uhn.fhir.model.view.ViewGenerator;
import ca.uhn.fhir.narrative.INarrativeGenerator;
import ca.uhn.fhir.parser.*;
import ca.uhn.fhir.rest.api.IVersionSpecificBundleFactory;
import ca.uhn.fhir.rest.client.api.*;
import ca.uhn.fhir.util.*;
import ca.uhn.fhir.validation.FhirValidator;
/**
* The FHIR context is the central starting point for the use of the HAPI FHIR API. It should be created once, and then
* used as a factory for various other types of objects (parsers, clients, etc.).
*
*
* <p>
* Important usage notes:
* </p>
@ -68,6 +75,7 @@ public class FhirContext {
private static final List<Class<? extends IBaseResource>> EMPTY_LIST = Collections.emptyList();
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirContext.class);
private final IFhirVersion myVersion;
private AddProfileTagEnum myAddProfileTagWhenEncoding = AddProfileTagEnum.ONLY_FOR_CUSTOM;
private volatile Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> myClassToElementDefinition = Collections.emptyMap();
private ArrayList<Class<? extends IBase>> myCustomTypes;
@ -87,14 +95,11 @@ public class FhirContext {
private volatile IRestfulClientFactory myRestfulClientFactory;
private volatile RuntimeChildUndeclaredExtensionDefinition myRuntimeChildUndeclaredExtensionDefinition;
private IContextValidationSupport<?, ?, ?, ?, ?, ?> myValidationSupport;
private final IFhirVersion myVersion;
private Map<FhirVersionEnum, Map<String, Class<? extends IBaseResource>>> myVersionToNameToResourceType = Collections.emptyMap();
/**
* @deprecated It is recommended that you use one of the static initializer methods instead
* of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
* of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
*/
@Deprecated
public FhirContext() {
@ -103,7 +108,7 @@ public class FhirContext {
/**
* @deprecated It is recommended that you use one of the static initializer methods instead
* of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
* of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
*/
@Deprecated
public FhirContext(Class<? extends IBaseResource> theResourceType) {
@ -112,7 +117,7 @@ public class FhirContext {
/**
* @deprecated It is recommended that you use one of the static initializer methods instead
* of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
* of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
*/
@Deprecated
public FhirContext(Class<?>... theResourceTypes) {
@ -121,7 +126,7 @@ public class FhirContext {
/**
* @deprecated It is recommended that you use one of the static initializer methods instead
* of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
* of this method, e.g. {@link #forDstu2()} or {@link #forDstu3()} or {@link #forR4()}
*/
@Deprecated
public FhirContext(Collection<Class<? extends IBaseResource>> theResourceTypes) {
@ -161,7 +166,7 @@ public class FhirContext {
if (theVersion == null) {
ourLog.info("Creating new FhirContext with auto-detected version [{}]. It is recommended to explicitly select a version for future compatibility by invoking FhirContext.forDstuX()",
myVersion.getVersion().name());
myVersion.getVersion().name());
} else {
ourLog.info("Creating new FHIR context for FHIR version [{}]", myVersion.getVersion().name());
}
@ -201,13 +206,37 @@ public class FhirContext {
* When encoding resources, this setting configures the parser to include
* an entry in the resource's metadata section which indicates which profile(s) the
* resource claims to conform to. The default is {@link AddProfileTagEnum#ONLY_FOR_CUSTOM}.
*
*
* @see #setAddProfileTagWhenEncoding(AddProfileTagEnum) for more information
*/
public AddProfileTagEnum getAddProfileTagWhenEncoding() {
return myAddProfileTagWhenEncoding;
}
/**
* When encoding resources, this setting configures the parser to include
* an entry in the resource's metadata section which indicates which profile(s) the
* resource claims to conform to. The default is {@link AddProfileTagEnum#ONLY_FOR_CUSTOM}.
* <p>
* This feature is intended for situations where custom resource types are being used,
* avoiding the need to manually add profile declarations for these custom types.
* </p>
* <p>
* See <a href="http://jamesagnew.gihhub.io/hapi-fhir/doc_extensions.html">Profiling and Extensions</a>
* for more information on using custom types.
* </p>
* <p>
* Note that this feature automatically adds the profile, but leaves any profile tags
* which have been manually added in place as well.
* </p>
*
* @param theAddProfileTagWhenEncoding The add profile mode (must not be <code>null</code>)
*/
public void setAddProfileTagWhenEncoding(AddProfileTagEnum theAddProfileTagWhenEncoding) {
Validate.notNull(theAddProfileTagWhenEncoding, "theAddProfileTagWhenEncoding must not be null");
myAddProfileTagWhenEncoding = theAddProfileTagWhenEncoding;
}
Collection<RuntimeResourceDefinition> getAllResourceDefinitions() {
validateInitialized();
return myNameToResourceDefinition.values();
@ -215,7 +244,7 @@ public class FhirContext {
/**
* Returns the default resource type for the given profile
*
*
* @see #setDefaultTypeForProfile(String, Class)
*/
public Class<? extends IBaseResource> getDefaultTypeForProfile(String theProfile) {
@ -249,7 +278,9 @@ public class FhirContext {
return myNameToElementDefinition.get(theElementName.toLowerCase());
}
/** For unit tests only */
/**
* For unit tests only
*/
int getElementDefinitionCount() {
validateInitialized();
return myClassToElementDefinition.size();
@ -274,20 +305,43 @@ public class FhirContext {
return myLocalizer;
}
/**
* This feature is not yet in its final state and should be considered an internal part of HAPI for now - use with
* caution
*/
public void setLocalizer(HapiLocalizer theMessages) {
myLocalizer = theMessages;
}
public INarrativeGenerator getNarrativeGenerator() {
return myNarrativeGenerator;
}
public void setNarrativeGenerator(INarrativeGenerator theNarrativeGenerator) {
myNarrativeGenerator = theNarrativeGenerator;
}
/**
* Returns the parser options object which will be used to supply default
* options to newly created parsers
*
*
* @return The parser options - Will not return <code>null</code>
*/
public ParserOptions getParserOptions() {
return myParserOptions;
}
/**
* Sets the parser options object which will be used to supply default
* options to newly created parsers
*
* @param theParserOptions The parser options object - Must not be <code>null</code>
*/
public void setParserOptions(ParserOptions theParserOptions) {
Validate.notNull(theParserOptions, "theParserOptions must not be null");
myParserOptions = theParserOptions;
}
/**
* Get the configured performance options
*/
@ -295,6 +349,32 @@ public class FhirContext {
return myPerformanceOptions;
}
// /**
// * Return an unmodifiable collection containing all known resource definitions
// */
// public Collection<RuntimeResourceDefinition> getResourceDefinitions() {
//
// Set<Class<? extends IBase>> datatypes = Collections.emptySet();
// Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> existing = Collections.emptyMap();
// HashMap<String, Class<? extends IBaseResource>> types = new HashMap<String, Class<? extends IBaseResource>>();
// ModelScanner.scanVersionPropertyFile(datatypes, types, myVersion.getVersion(), existing);
// for (int next : types.)
//
// return Collections.unmodifiableCollection(myIdToResourceDefinition.values());
// }
/**
* Sets the configured performance options
*
* @see PerformanceOptionsEnum for a list of available options
*/
public void setPerformanceOptions(Collection<PerformanceOptionsEnum> theOptions) {
myPerformanceOptions.clear();
if (theOptions != null) {
myPerformanceOptions.addAll(theOptions);
}
}
/**
* Returns the scanned runtime model for the given type. This is an advanced feature which is generally only needed
* for extending the core library.
@ -359,8 +439,12 @@ public class FhirContext {
* <p>
* Note that this method is case insensitive!
* </p>
*
* @throws DataFormatException If the resource name is not known
*/
public RuntimeResourceDefinition getResourceDefinition(String theResourceName) {
// Multiple spots in HAPI FHIR and Smile CDR depend on DataFormatException being
// thrown by this method, don't change that.
public RuntimeResourceDefinition getResourceDefinition(String theResourceName) throws DataFormatException {
validateInitialized();
Validate.notBlank(theResourceName, "theResourceName must not be blank");
@ -380,20 +464,6 @@ public class FhirContext {
return retVal;
}
// /**
// * Return an unmodifiable collection containing all known resource definitions
// */
// public Collection<RuntimeResourceDefinition> getResourceDefinitions() {
//
// Set<Class<? extends IBase>> datatypes = Collections.emptySet();
// Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> existing = Collections.emptyMap();
// HashMap<String, Class<? extends IBaseResource>> types = new HashMap<String, Class<? extends IBaseResource>>();
// ModelScanner.scanVersionPropertyFile(datatypes, types, myVersion.getVersion(), existing);
// for (int next : types.)
//
// return Collections.unmodifiableCollection(myIdToResourceDefinition.values());
// }
/**
* Returns the scanned runtime model for the given type. This is an advanced feature which is generally only needed
* for extending the core library.
@ -412,10 +482,40 @@ public class FhirContext {
return myIdToResourceDefinition.values();
}
/**
* Returns an unmodifiable set containing all resource names known to this
* context
*/
public Set<String> getResourceNames() {
Set<String> resourceNames = new HashSet<>();
if (myNameToResourceDefinition.isEmpty()) {
Properties props = new Properties();
try {
props.load(myVersion.getFhirVersionPropertiesFile());
} catch (IOException theE) {
throw new ConfigurationException("Failed to load version properties file");
}
Enumeration<?> propNames = props.propertyNames();
while (propNames.hasMoreElements()) {
String next = (String) propNames.nextElement();
if (next.startsWith("resource.")) {
resourceNames.add(next.substring("resource.".length()).trim());
}
}
}
for (RuntimeResourceDefinition next : myNameToResourceDefinition.values()) {
resourceNames.add(next.getName());
}
return Collections.unmodifiableSet(resourceNames);
}
/**
* Get the restful client factory. If no factory has been set, this will be initialized with
* a new ApacheRestfulClientFactory.
*
*
* @return the factory used to create the restful clients
*/
public IRestfulClientFactory getRestfulClientFactory() {
@ -429,6 +529,16 @@ public class FhirContext {
return myRestfulClientFactory;
}
/**
* Set the restful client factory
*
* @param theRestfulClientFactory
*/
public void setRestfulClientFactory(IRestfulClientFactory theRestfulClientFactory) {
Validate.notNull(theRestfulClientFactory, "theRestfulClientFactory must not be null");
this.myRestfulClientFactory = theRestfulClientFactory;
}
public RuntimeChildUndeclaredExtensionDefinition getRuntimeChildUndeclaredExtensionDefinition() {
validateInitialized();
return myRuntimeChildUndeclaredExtensionDefinition;
@ -438,7 +548,7 @@ public class FhirContext {
* Returns the validation support module configured for this context, creating a default
* implementation if no module has been passed in via the {@link #setValidationSupport(IContextValidationSupport)}
* method
*
*
* @see #setValidationSupport(IContextValidationSupport)
*/
public IContextValidationSupport<?, ?, ?, ?, ?, ?> getValidationSupport() {
@ -448,6 +558,15 @@ public class FhirContext {
return myValidationSupport;
}
/**
* Sets the validation support module to use for this context. The validation support module
* is used to supply underlying infrastructure such as conformance resources (StructureDefinition, ValueSet, etc)
* as well as to provide terminology services to modules such as the validator and FluentPath executor
*/
public void setValidationSupport(IContextValidationSupport<?, ?, ?, ?, ?, ?> theValidationSupport) {
myValidationSupport = theValidationSupport;
}
public IFhirVersion getVersion() {
return myVersion;
}
@ -455,7 +574,7 @@ public class FhirContext {
/**
* Returns <code>true</code> if any default types for specific profiles have been defined
* within this context.
*
*
* @see #setDefaultTypeForProfile(String, Class)
* @see #getDefaultTypeForProfile(String)
*/
@ -483,7 +602,7 @@ public class FhirContext {
* on a context for a previous version of fhir will result in an
* {@link UnsupportedOperationException}
* </p>
*
*
* @since 2.2
*/
public IFluentPath newFluentPath() {
@ -492,7 +611,7 @@ public class FhirContext {
/**
* Create and return a new JSON parser.
*
*
* <p>
* Thread safety: <b>Parsers are not guaranteed to be thread safe</b>. Create a new parser instance for every thread
* or every message being parsed/encoded.
@ -513,19 +632,16 @@ public class FhirContext {
* sub-interface {@link IBasicClient}). See the <a
* href="http://jamesagnew.github.io/hapi-fhir/doc_rest_client.html">RESTful Client</a> documentation for more
* information on how to define this interface.
*
*
* <p>
* Performance Note: <b>This method is cheap</b> to call, and may be called once for every operation invocation
* without incurring any performance penalty
* </p>
*
* @param theClientType
* The client type, which is an interface type to be instantiated
* @param theServerBase
* The URL of the base for the restful FHIR server to connect to
*
* @param theClientType The client type, which is an interface type to be instantiated
* @param theServerBase The URL of the base for the restful FHIR server to connect to
* @return A newly created client
* @throws ConfigurationException
* If the interface type is not an interface
* @throws ConfigurationException If the interface type is not an interface
*/
public <T extends IRestfulClient> T newRestfulClient(Class<T> theClientType, String theServerBase) {
return getRestfulClientFactory().newClient(theClientType, theServerBase);
@ -535,14 +651,13 @@ public class FhirContext {
* Instantiates a new generic client. A generic client is able to perform any of the FHIR RESTful operations against
* a compliant server, but does not have methods defining the specific functionality required (as is the case with
* {@link #newRestfulClient(Class, String) non-generic clients}).
*
*
* <p>
* Performance Note: <b>This method is cheap</b> to call, and may be called once for every operation invocation
* without incurring any performance penalty
* </p>
*
* @param theServerBase
* The URL of the base for the restful FHIR server to connect to
*
* @param theServerBase The URL of the base for the restful FHIR server to connect to
*/
public IGenericClient newRestfulGenericClient(String theServerBase) {
return getRestfulClientFactory().newGenericClient(theServerBase);
@ -569,7 +684,7 @@ public class FhirContext {
/**
* Create and return a new XML parser.
*
*
* <p>
* Thread safety: <b>Parsers are not guaranteed to be thread safe</b>. Create a new parser instance for every thread
* or every message being parsed/encoded.
@ -592,9 +707,8 @@ public class FhirContext {
* <b>THREAD SAFETY WARNING:</b> This method is not thread safe. It should be called before any
* threads are able to call any methods on this context.
* </p>
*
* @param theType
* The custom type to add (must not be <code>null</code>)
*
* @param theType The custom type to add (must not be <code>null</code>)
*/
public void registerCustomType(Class<? extends IBase> theType) {
Validate.notNull(theType, "theType must not be null");
@ -612,9 +726,8 @@ public class FhirContext {
* <b>THREAD SAFETY WARNING:</b> This method is not thread safe. It should be called before any
* threads are able to call any methods on this context.
* </p>
*
* @param theTypes
* The custom types to add (must not be <code>null</code> or contain null elements in the collection)
*
* @param theTypes The custom types to add (must not be <code>null</code> or contain null elements in the collection)
*/
public void registerCustomTypes(Collection<Class<? extends IBase>> theTypes) {
Validate.notNull(theTypes, "theTypes must not be null");
@ -698,31 +811,6 @@ public class FhirContext {
return classToElementDefinition;
}
/**
* When encoding resources, this setting configures the parser to include
* an entry in the resource's metadata section which indicates which profile(s) the
* resource claims to conform to. The default is {@link AddProfileTagEnum#ONLY_FOR_CUSTOM}.
* <p>
* This feature is intended for situations where custom resource types are being used,
* avoiding the need to manually add profile declarations for these custom types.
* </p>
* <p>
* See <a href="http://jamesagnew.gihhub.io/hapi-fhir/doc_extensions.html">Profiling and Extensions</a>
* for more information on using custom types.
* </p>
* <p>
* Note that this feature automatically adds the profile, but leaves any profile tags
* which have been manually added in place as well.
* </p>
*
* @param theAddProfileTagWhenEncoding
* The add profile mode (must not be <code>null</code>)
*/
public void setAddProfileTagWhenEncoding(AddProfileTagEnum theAddProfileTagWhenEncoding) {
Validate.notNull(theAddProfileTagWhenEncoding, "theAddProfileTagWhenEncoding must not be null");
myAddProfileTagWhenEncoding = theAddProfileTagWhenEncoding;
}
/**
* Sets the default type which will be used when parsing a resource that is found to be
* of the given profile.
@ -732,12 +820,10 @@ public class FhirContext {
* if the parser is parsing a resource and finds that it declares that it conforms to that profile,
* the <code>MyPatient</code> type will be used unless otherwise specified.
* </p>
*
* @param theProfile
* The profile string, e.g. <code>"http://example.com/some_patient_profile"</code>. Must not be
* <code>null</code> or empty.
* @param theClass
* The resource type, or <code>null</code> to clear any existing type
*
* @param theProfile The profile string, e.g. <code>"http://example.com/some_patient_profile"</code>. Must not be
* <code>null</code> or empty.
* @param theClass The resource type, or <code>null</code> to clear any existing type
*/
public void setDefaultTypeForProfile(String theProfile, Class<? extends IBaseResource> theClass) {
Validate.notBlank(theProfile, "theProfile must not be null or empty");
@ -748,56 +834,19 @@ public class FhirContext {
}
}
/**
* This feature is not yet in its final state and should be considered an internal part of HAPI for now - use with
* caution
*/
public void setLocalizer(HapiLocalizer theMessages) {
myLocalizer = theMessages;
}
public void setNarrativeGenerator(INarrativeGenerator theNarrativeGenerator) {
myNarrativeGenerator = theNarrativeGenerator;
}
/**
* Sets a parser error handler to use by default on all parsers
*
* @param theParserErrorHandler
* The error handler
*
* @param theParserErrorHandler The error handler
*/
public void setParserErrorHandler(IParserErrorHandler theParserErrorHandler) {
Validate.notNull(theParserErrorHandler, "theParserErrorHandler must not be null");
myParserErrorHandler = theParserErrorHandler;
}
/**
* Sets the parser options object which will be used to supply default
* options to newly created parsers
*
* @param theParserOptions
* The parser options object - Must not be <code>null</code>
*/
public void setParserOptions(ParserOptions theParserOptions) {
Validate.notNull(theParserOptions, "theParserOptions must not be null");
myParserOptions = theParserOptions;
}
/**
* Sets the configured performance options
*
* @see PerformanceOptionsEnum for a list of available options
*/
public void setPerformanceOptions(Collection<PerformanceOptionsEnum> theOptions) {
myPerformanceOptions.clear();
if (theOptions != null) {
myPerformanceOptions.addAll(theOptions);
}
}
/**
* Sets the configured performance options
*
*
* @see PerformanceOptionsEnum for a list of available options
*/
public void setPerformanceOptions(PerformanceOptionsEnum... thePerformanceOptions) {
@ -808,26 +857,7 @@ public class FhirContext {
setPerformanceOptions(asList);
}
/**
* Set the restful client factory
*
* @param theRestfulClientFactory
*/
public void setRestfulClientFactory(IRestfulClientFactory theRestfulClientFactory) {
Validate.notNull(theRestfulClientFactory, "theRestfulClientFactory must not be null");
this.myRestfulClientFactory = theRestfulClientFactory;
}
/**
* Sets the validation support module to use for this context. The validation support module
* is used to supply underlying infrastructure such as conformance resources (StructureDefinition, ValueSet, etc)
* as well as to provide terminology services to modules such as the validator and FluentPath executor
*/
public void setValidationSupport(IContextValidationSupport<?, ?, ?, ?, ?, ?> theValidationSupport) {
myValidationSupport = theValidationSupport;
}
@SuppressWarnings({ "cast" })
@SuppressWarnings({"cast"})
private List<Class<? extends IElement>> toElementList(Collection<Class<? extends IBaseResource>> theResourceTypes) {
if (theResourceTypes == null) {
return null;
@ -858,13 +888,6 @@ public class FhirContext {
return new FhirContext(FhirVersionEnum.DSTU2);
}
/**
* Creates and returns a new FhirContext with version {@link FhirVersionEnum#DSTU2 DSTU2} (2016 May DSTU3 Snapshot)
*/
public static FhirContext forDstu2_1() {
return new FhirContext(FhirVersionEnum.DSTU2_1);
}
/**
* Creates and returns a new FhirContext with version {@link FhirVersionEnum#DSTU2_HL7ORG DSTU2} (using the Reference
* Implementation Structures)
@ -873,9 +896,16 @@ public class FhirContext {
return new FhirContext(FhirVersionEnum.DSTU2_HL7ORG);
}
/**
* Creates and returns a new FhirContext with version {@link FhirVersionEnum#DSTU2 DSTU2} (2016 May DSTU3 Snapshot)
*/
public static FhirContext forDstu2_1() {
return new FhirContext(FhirVersionEnum.DSTU2_1);
}
/**
* Creates and returns a new FhirContext with version {@link FhirVersionEnum#DSTU3 DSTU3}
*
*
* @since 1.4
*/
public static FhirContext forDstu3() {
@ -884,14 +914,13 @@ public class FhirContext {
/**
* Creates and returns a new FhirContext with version {@link FhirVersionEnum#DSTU3 DSTU3}
*
*
* @since 3.0.0
*/
public static FhirContext forR4() {
return new FhirContext(FhirVersionEnum.R4);
}
private static Collection<Class<? extends IBaseResource>> toCollection(Class<? extends IBaseResource> theResourceType) {
ArrayList<Class<? extends IBaseResource>> retVal = new ArrayList<Class<? extends IBaseResource>>(1);
retVal.add(theResourceType);
@ -909,34 +938,4 @@ public class FhirContext {
}
return retVal;
}
/**
* Returns an unmodifiable set containing all resource names known to this
* context
*/
public Set<String> getResourceNames() {
Set<String> resourceNames= new HashSet<>();
if (myNameToResourceDefinition.isEmpty()) {
Properties props = new Properties();
try {
props.load(myVersion.getFhirVersionPropertiesFile());
} catch (IOException theE) {
throw new ConfigurationException("Failed to load version properties file");
}
Enumeration<?> propNames = props.propertyNames();
while (propNames.hasMoreElements()){
String next = (String) propNames.nextElement();
if (next.startsWith("resource.")) {
resourceNames.add(next.substring("resource.".length()).trim());
}
}
}
for (RuntimeResourceDefinition next : myNameToResourceDefinition.values()) {
resourceNames.add(next.getName());
}
return Collections.unmodifiableSet(resourceNames);
}
}

View File

@ -185,14 +185,29 @@ public class RuntimeResourceDefinition extends BaseRuntimeElementCompositeDefini
});
mySearchParams = Collections.unmodifiableList(searchParams);
Map<String, List<RuntimeSearchParam>> compartmentNameToSearchParams = new HashMap<String, List<RuntimeSearchParam>>();
Map<String, List<RuntimeSearchParam>> compartmentNameToSearchParams = new HashMap<>();
for (RuntimeSearchParam next : searchParams) {
if (next.getProvidesMembershipInCompartments() != null) {
for (String nextCompartment : next.getProvidesMembershipInCompartments()) {
if (!compartmentNameToSearchParams.containsKey(nextCompartment)) {
compartmentNameToSearchParams.put(nextCompartment, new ArrayList<RuntimeSearchParam>());
compartmentNameToSearchParams.put(nextCompartment, new ArrayList<>());
}
List<RuntimeSearchParam> searchParamsForCompartment = compartmentNameToSearchParams.get(nextCompartment);
searchParamsForCompartment.add(next);
/*
* If one search parameter marks an SP as making a resource
* a part of a compartment, let's also denote all other
* SPs with the same path the same way. This behaviour is
* used by AuthorizationInterceptor
*/
for (RuntimeSearchParam nextAlternate : searchParams) {
if (nextAlternate.getPath().equals(next.getPath())) {
if (!nextAlternate.getName().equals(next.getName())) {
searchParamsForCompartment.add(nextAlternate);
}
}
}
compartmentNameToSearchParams.get(nextCompartment).add(next);
}
}
}

View File

@ -5,6 +5,10 @@ import static org.apache.commons.lang3.StringUtils.trim;
import java.util.*;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hl7.fhir.instance.model.api.IIdType;
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
@ -38,6 +42,18 @@ public class RuntimeSearchParam {
private final RestSearchParameterTypeEnum myParamType;
private final String myPath;
private final Set<String> myTargets;
@Override
public String toString() {
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
.append("base", myBase)
.append("name", myName)
.append("path", myPath)
.append("id", myId)
.append("uri", myUri)
.toString();
}
private final Set<String> myProvidesMembershipInCompartments;
private final RuntimeSearchParamStatusEnum myStatus;
private final String myUri;
@ -55,9 +71,36 @@ public class RuntimeSearchParam {
this(theId, theUri, theName, theDescription, thePath, theParamType, theCompositeOf, theProvidesMembershipInCompartments, theTargets, theStatus, null);
}
@Override
public boolean equals(Object theO) {
if (this == theO) return true;
if (theO == null || getClass() != theO.getClass()) return false;
RuntimeSearchParam that = (RuntimeSearchParam) theO;
return new EqualsBuilder()
.append(getId(), that.getId())
.append(getName(), that.getName())
.append(getPath(), that.getPath())
.append(getUri(), that.getUri())
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(getId())
.append(getName())
.append(getPath())
.append(getUri())
.toHashCode();
}
public RuntimeSearchParam(IIdType theId, String theUri, String theName, String theDescription, String thePath, RestSearchParameterTypeEnum theParamType, List<RuntimeSearchParam> theCompositeOf,
Set<String> theProvidesMembershipInCompartments, Set<String> theTargets, RuntimeSearchParamStatusEnum theStatus, Collection<String> theBase) {
Set<String> theProvidesMembershipInCompartments, Set<String> theTargets, RuntimeSearchParamStatusEnum theStatus, Collection<String> theBase) {
super();
myId = theId;
myUri = theUri;
myName = theName;

View File

@ -156,7 +156,12 @@ public enum EncodingEnum {
* </p>
*/
public static EncodingEnum forContentType(String theContentType) {
return ourContentTypeToEncoding.get(theContentType);
String contentTypeSplitted = getTypeWithoutCharset(theContentType);
if (contentTypeSplitted == null) {
return null;
} else {
return ourContentTypeToEncoding.get(contentTypeSplitted );
}
}
@ -170,14 +175,33 @@ public enum EncodingEnum {
* @see #forContentType(String)
*/
public static EncodingEnum forContentTypeStrict(String theContentType) {
return ourContentTypeToEncodingStrict.get(theContentType);
String contentTypeSplitted = getTypeWithoutCharset(theContentType);
if (contentTypeSplitted == null) {
return null;
} else {
return ourContentTypeToEncodingStrict.get(contentTypeSplitted);
}
}
private static String getTypeWithoutCharset(String theContentType) {
if (theContentType == null) {
return null;
} else {
String[] contentTypeSplitted = theContentType.split(";");
return contentTypeSplitted[0];
}
}
/**
* Is the given type a FHIR legacy (pre-DSTU3) content type?
*/
public static boolean isLegacy(String theFormat) {
return ourContentTypeToEncodingLegacy.containsKey(theFormat);
public static boolean isLegacy(String theContentType) {
String contentTypeSplitted = getTypeWithoutCharset(theContentType);
if (contentTypeSplitted == null) {
return false;
} else {
return ourContentTypeToEncodingLegacy.containsKey(contentTypeSplitted);
}
}

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.util;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -34,8 +34,17 @@ public class UrlPathTokenizer {
return myTok.hasMoreTokens();
}
public String nextToken() {
return UrlUtil.unescape(myTok.nextToken());
/**
* Returns the next portion. Any URL-encoding is undone, but we will
* HTML encode the &lt; and &quot; marks since they are both
* not useful un URL paths in FHIR and potentially represent injection
* attacks.
*
* @see UrlUtil#sanitizeUrlPart(String)
* @see UrlUtil#unescape(String)
*/
public String nextTokenUnescapedAndSanitized() {
return UrlUtil.sanitizeUrlPart(UrlUtil.unescape(myTok.nextToken()));
}
}

View File

@ -25,9 +25,9 @@ import static org.apache.commons.lang3.StringUtils.isBlank;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -70,7 +70,7 @@ public class UrlUtil {
return theExtensionUrl;
}
if (theExtensionUrl == null) {
return theExtensionUrl;
return null;
}
int parentLastSlashIdx = theParentExtensionUrl.lastIndexOf('/');
@ -119,6 +119,18 @@ public class UrlUtil {
return value.startsWith("http://") || value.startsWith("https://");
}
public static boolean isNeedsSanitization(String theString) {
if (theString != null) {
for (int i = 0; i < theString.length(); i++) {
char nextChar = theString.charAt(i);
if (nextChar == '<' || nextChar == '"') {
return true;
}
}
}
return false;
}
public static boolean isValid(String theUrl) {
if (theUrl == null || theUrl.length() < 8) {
return false;
@ -164,7 +176,7 @@ public class UrlUtil {
}
public static Map<String, String[]> parseQueryString(String theQueryString) {
HashMap<String, List<String>> map = new HashMap<String, List<String>>();
HashMap<String, List<String>> map = new HashMap<>();
parseQueryString(theQueryString, map);
return toQueryStringMap(map);
}
@ -197,17 +209,13 @@ public class UrlUtil {
nextKey = unescape(nextKey);
nextValue = unescape(nextValue);
List<String> list = map.get(nextKey);
if (list == null) {
list = new ArrayList<>();
map.put(nextKey, list);
}
List<String> list = map.computeIfAbsent(nextKey, k -> new ArrayList<>());
list.add(nextValue);
}
}
public static Map<String, String[]> parseQueryStrings(String... theQueryString) {
HashMap<String, List<String>> map = new HashMap<String, List<String>>();
HashMap<String, List<String>> map = new HashMap<>();
for (String next : theQueryString) {
parseQueryString(next, map);
}
@ -222,7 +230,6 @@ public class UrlUtil {
* <li>[Resource Type]/[Resource ID]/_history/[Version ID]
* </ul>
*/
//@formatter:on
public static UrlParts parseUrl(String theUrl) {
String url = theUrl;
UrlParts retVal = new UrlParts();
@ -243,7 +250,7 @@ public class UrlUtil {
retVal.setVersionId(id.getVersionIdPart());
return retVal;
}
if (url.matches("\\/[a-zA-Z]+\\?.*")) {
if (url.matches("/[a-zA-Z]+\\?.*")) {
url = url.substring(1);
}
int nextStart = 0;
@ -282,12 +289,47 @@ public class UrlUtil {
}
//@formatter:off
/**
* This method specifically HTML-encodes the &quot; and
* &lt; characters in order to prevent injection attacks
*/
public static String sanitizeUrlPart(String theString) {
if (theString == null) {
return null;
}
boolean needsSanitization = isNeedsSanitization(theString);
if (needsSanitization) {
// Ok, we're sanitizing
StringBuilder buffer = new StringBuilder(theString.length() + 10);
for (int j = 0; j < theString.length(); j++) {
char nextChar = theString.charAt(j);
switch (nextChar) {
case '"':
buffer.append("&quot;");
break;
case '<':
buffer.append("&lt;");
break;
default:
buffer.append(nextChar);
break;
}
} // for build escaped string
return buffer.toString();
}
return theString;
}
private static Map<String, String[]> toQueryStringMap(HashMap<String, List<String>> map) {
HashMap<String, String[]> retVal = new HashMap<String, String[]>();
HashMap<String, String[]> retVal = new HashMap<>();
for (Entry<String, List<String>> nextEntry : map.entrySet()) {
retVal.put(nextEntry.getKey(), nextEntry.getValue().toArray(new String[nextEntry.getValue().size()]));
retVal.put(nextEntry.getKey(), nextEntry.getValue().toArray(new String[0]));
}
return retVal;
}

View File

@ -28,9 +28,6 @@ import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.hl7.fhir.dstu3.model.Parameters;
import org.hl7.fhir.dstu3.model.StringType;
import org.hl7.fhir.dstu3.model.UriType;
import org.hl7.fhir.instance.model.api.IBaseParameters;
import static org.apache.commons.lang3.StringUtils.isBlank;
@ -82,10 +79,17 @@ public class UploadTerminologyCommand extends BaseCommand {
IGenericClient client = super.newClient(theCommandLine);
IBaseParameters inputParameters;
if (ctx.getVersion().getVersion() == FhirVersionEnum.DSTU3) {
Parameters p = new Parameters();
p.addParameter().setName("url").setValue(new UriType(termUrl));
org.hl7.fhir.dstu3.model.Parameters p = new org.hl7.fhir.dstu3.model.Parameters();
p.addParameter().setName("url").setValue(new org.hl7.fhir.dstu3.model.UriType(termUrl));
for (String next : datafile) {
p.addParameter().setName("localfile").setValue(new StringType(next));
p.addParameter().setName("localfile").setValue(new org.hl7.fhir.dstu3.model.StringType(next));
}
inputParameters = p;
} else if (ctx.getVersion().getVersion() == FhirVersionEnum.R4) {
org.hl7.fhir.r4.model.Parameters p = new org.hl7.fhir.r4.model.Parameters();
p.addParameter().setName("url").setValue(new org.hl7.fhir.r4.model.UriType(termUrl));
for (String next : datafile) {
p.addParameter().setName("localfile").setValue(new org.hl7.fhir.r4.model.StringType(next));
}
inputParameters = p;
} else {

View File

@ -31,6 +31,13 @@
<appender-ref ref="STDOUT" />
</logger>
<!--
It's useful to have this log when uploading big terminologies
-->
<logger name="ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl" additivity="false" level="info">
<appender-ref ref="STDOUT" />
</logger>
<root level="warn">
<appender-ref ref="STDOUT" />

View File

@ -1103,7 +1103,7 @@ public class GenericClient extends BaseClient implements IGenericClient {
@SuppressWarnings("unchecked")
@Override
public Object execute() {
if (myOperationName != null && myOperationName.equals(Constants.EXTOP_PROCESS_MESSAGE)) {
if (myOperationName != null && myOperationName.equals(Constants.EXTOP_PROCESS_MESSAGE) && myMsgBundle != null) {
Map<String, List<String>> urlParams = new LinkedHashMap<String, List<String>>();
// Set Url parameter Async and Response-Url
if (myIsAsync != null) {

View File

@ -57,17 +57,6 @@ abstract class BaseHttpClientInvocationWithContents extends BaseHttpClientInvoca
private IIdType myForceResourceId;
public BaseHttpClientInvocationWithContents(FhirContext theContext, IBaseResource theResource, Map<String, List<String>> theParams, String... theUrlPath) {
super(theContext);
myResource = theResource;
myUrlPath = StringUtils.join(theUrlPath, '/');
myResources = null;
myContents = null;
myParams = theParams;
myBundleType = null;
}
public BaseHttpClientInvocationWithContents(FhirContext theContext, IBaseResource theResource, String theUrlPath) {
super(theContext);
myResource = theResource;
@ -105,17 +94,6 @@ abstract class BaseHttpClientInvocationWithContents extends BaseHttpClientInvoca
myBundleType = null;
}
public BaseHttpClientInvocationWithContents(FhirContext theContext, String theContents, Map<String, List<String>> theParams, String... theUrlPath) {
super(theContext);
myResource = null;
myUrlPath = StringUtils.join(theUrlPath, '/');
myResources = null;
myContents = theContents;
myParams = theParams;
myBundleType = null;
}
@Override
public IHttpRequest asHttpRequest(String theUrlBase, Map<String, List<String>> theExtraParams, EncodingEnum theEncoding, Boolean thePrettyPrint) throws DataFormatException {
StringBuilder url = new StringBuilder();

View File

@ -502,6 +502,7 @@
<groupId>com.github.ben-manes.caffeine</groupId>
<artifactId>caffeine</artifactId>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava-testlib</artifactId>

View File

@ -8,10 +8,10 @@ import ca.uhn.fhir.jpa.util.ResourceCountCache;
import ca.uhn.fhir.model.dstu2.composite.MetaDt;
import ca.uhn.fhir.validation.IValidatorModule;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.instance.hapi.validation.CachingValidationSupport;
import org.hl7.fhir.instance.hapi.validation.DefaultProfileValidationSupport;
import org.hl7.fhir.instance.hapi.validation.FhirInstanceValidator;
import org.hl7.fhir.instance.hapi.validation.ValidationSupportChain;
import org.hl7.fhir.instance.utils.IResourceValidator.BestPracticeWarningLevel;
import org.hl7.fhir.r4.utils.IResourceValidator;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.context.annotation.Bean;
@ -81,7 +81,7 @@ public class BaseDstu2Config extends BaseConfig {
public IValidatorModule instanceValidatorDstu2() {
FhirInstanceValidator retVal = new FhirInstanceValidator();
retVal.setBestPracticeWarningLevel(IResourceValidator.BestPracticeWarningLevel.Warning);
retVal.setValidationSupport(new ValidationSupportChain(new DefaultProfileValidationSupport(), jpaValidationSupportDstu2()));
retVal.setValidationSupport(new CachingValidationSupport(new ValidationSupportChain(new DefaultProfileValidationSupport(), jpaValidationSupportDstu2())));
return retVal;
}
@ -91,6 +91,13 @@ public class BaseDstu2Config extends BaseConfig {
return retVal;
}
@Bean(name = "myResourceCountsCache")
public ResourceCountCache resourceCountsCache() {
ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoDstu2().getResourceCounts());
retVal.setCacheMillis(60 * DateUtils.MILLIS_PER_SECOND);
return retVal;
}
@Bean(autowire = Autowire.BY_TYPE)
public IFulltextSearchSvc searchDao() {
FulltextSearchSvcImpl searchDao = new FulltextSearchSvcImpl();
@ -121,13 +128,6 @@ public class BaseDstu2Config extends BaseConfig {
return retVal;
}
@Bean(name = "myResourceCountsCache")
public ResourceCountCache resourceCountsCache() {
ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoDstu2().getResourceCounts());
retVal.setCacheMillis(60 * DateUtils.MILLIS_PER_SECOND);
return retVal;
}
@Bean(autowire = Autowire.BY_TYPE)
public IHapiTerminologySvc terminologyService() {
return new HapiTerminologySvcDstu2();

View File

@ -19,6 +19,7 @@ import ca.uhn.fhir.jpa.validation.JpaValidationSupportChainDstu3;
import ca.uhn.fhir.validation.IValidatorModule;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
import org.hl7.fhir.dstu3.hapi.validation.CachingValidationSupport;
import org.hl7.fhir.dstu3.hapi.validation.FhirInstanceValidator;
import org.hl7.fhir.r4.utils.IResourceValidator;
import org.springframework.beans.factory.annotation.Autowire;
@ -78,13 +79,17 @@ public class BaseDstu3Config extends BaseConfig {
return val;
}
@Bean
public JpaValidationSupportChainDstu3 jpaValidationSupportChain() {
return new JpaValidationSupportChainDstu3();
}
@Bean(name = "myJpaValidationSupportDstu3", autowire = Autowire.BY_NAME)
public ca.uhn.fhir.jpa.dao.dstu3.IJpaValidationSupportDstu3 jpaValidationSupportDstu3() {
ca.uhn.fhir.jpa.dao.dstu3.JpaValidationSupportDstu3 retVal = new ca.uhn.fhir.jpa.dao.dstu3.JpaValidationSupportDstu3();
return retVal;
}
@Bean(name = "myResourceCountsCache")
public ResourceCountCache resourceCountsCache() {
ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoDstu3().getResourceCounts());
@ -142,7 +147,7 @@ public class BaseDstu3Config extends BaseConfig {
@Primary
@Bean(autowire = Autowire.BY_NAME, name = "myJpaValidationSupportChainDstu3")
public IValidationSupport validationSupportChainDstu3() {
return new JpaValidationSupportChainDstu3();
return new CachingValidationSupport(jpaValidationSupportChain());
}
}

View File

@ -21,6 +21,7 @@ import ca.uhn.fhir.validation.IValidatorModule;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
import org.hl7.fhir.r4.hapi.rest.server.GraphQLProvider;
import org.hl7.fhir.r4.hapi.validation.CachingValidationSupport;
import org.hl7.fhir.r4.hapi.validation.FhirInstanceValidator;
import org.hl7.fhir.r4.utils.GraphQLEngine;
import org.hl7.fhir.r4.utils.IResourceValidator.BestPracticeWarningLevel;
@ -93,6 +94,11 @@ public class BaseR4Config extends BaseConfig {
return val;
}
@Bean
public JpaValidationSupportChainR4 jpaValidationSupportChain() {
return new JpaValidationSupportChainR4();
}
@Bean(name = "myJpaValidationSupportR4", autowire = Autowire.BY_NAME)
public ca.uhn.fhir.jpa.dao.r4.IJpaValidationSupportR4 jpaValidationSupportR4() {
ca.uhn.fhir.jpa.dao.r4.JpaValidationSupportR4 retVal = new ca.uhn.fhir.jpa.dao.r4.JpaValidationSupportR4();
@ -156,7 +162,7 @@ public class BaseR4Config extends BaseConfig {
@Primary
@Bean(autowire = Autowire.BY_NAME, name = "myJpaValidationSupportChainR4")
public IValidationSupport validationSupportChainR4() {
return new JpaValidationSupportChainR4();
return new CachingValidationSupport(jpaValidationSupportChain());
}
}

View File

@ -1,25 +1,5 @@
package ca.uhn.fhir.jpa.dao;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.*;
import ca.uhn.fhir.jpa.dao.data.*;
import ca.uhn.fhir.jpa.entity.*;
@ -58,7 +38,6 @@ import ca.uhn.fhir.util.*;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.hash.HashFunction;
import com.google.common.hash.Hashing;
@ -104,6 +83,26 @@ import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.*;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
@SuppressWarnings("WeakerAccess")
@Repository
public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao, ApplicationContextAware {
@ -186,6 +185,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
protected IResourceTableDao myResourceTableDao;
@Autowired
protected IResourceTagDao myResourceTagDao;
@Autowired
protected IResourceSearchViewDao myResourceViewDao;
@Autowired(required = true)
private DaoConfig myConfig;
private FhirContext myContext;
@ -199,8 +200,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
private ISearchParamPresenceSvc mySearchParamPresenceSvc;
@Autowired
private ISearchParamRegistry mySearchParamRegistry;
@Autowired
private ISearchResultDao mySearchResultDao;
//@Autowired
//private ISearchResultDao mySearchResultDao;
@Autowired
private IResourceIndexedCompositeStringUniqueDao myResourceIndexedCompositeStringUniqueDao;
private ApplicationContext myApplicationContext;
@ -227,6 +228,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
}
protected ExpungeOutcome doExpunge(String theResourceName, Long theResourceId, Long theVersion, ExpungeOptions theExpungeOptions) {
TransactionTemplate txTemplate = new TransactionTemplate(myPlatformTransactionManager);
if (!getConfig().isExpungeEnabled()) {
throw new MethodNotAllowedException("$expunge is not enabled on this server");
@ -245,32 +247,39 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
/*
* Delete historical versions of deleted resources
*/
Pageable page = new PageRequest(0, remainingCount.get());
Slice<Long> resourceIds;
if (theResourceId != null) {
resourceIds = myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceId, theResourceName);
} else {
if (theResourceName != null) {
resourceIds = myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceName);
Pageable page = PageRequest.of(0, remainingCount.get());
Slice<Long> resourceIds = txTemplate.execute(t -> {
if (theResourceId != null) {
return myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceId, theResourceName);
} else {
resourceIds = myResourceTableDao.findIdsOfDeletedResources(page);
if (theResourceName != null) {
return myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceName);
} else {
return myResourceTableDao.findIdsOfDeletedResources(page);
}
}
}
});
for (Long next : resourceIds) {
expungeHistoricalVersionsOfId(next, remainingCount);
if (remainingCount.get() <= 0) {
return toExpungeOutcome(theExpungeOptions, remainingCount);
}
txTemplate.execute(t -> {
expungeHistoricalVersionsOfId(next, remainingCount);
if (remainingCount.get() <= 0) {
return toExpungeOutcome(theExpungeOptions, remainingCount);
}
return null;
});
}
/*
* Delete current versions of deleted resources
*/
for (Long next : resourceIds) {
expungeCurrentVersionOfResource(next);
if (remainingCount.get() <= 0) {
return toExpungeOutcome(theExpungeOptions, remainingCount);
}
txTemplate.execute(t -> {
expungeCurrentVersionOfResource(next);
if (remainingCount.get() <= 0) {
return toExpungeOutcome(theExpungeOptions, remainingCount);
}
return null;
});
}
}
@ -280,22 +289,26 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
/*
* Delete historical versions of non-deleted resources
*/
Pageable page = new PageRequest(0, remainingCount.get());
Slice<Long> historicalIds;
if (theResourceId != null && theVersion != null) {
historicalIds = toSlice(myResourceHistoryTableDao.findForIdAndVersion(theResourceId, theVersion));
} else {
if (theResourceName != null) {
historicalIds = myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page, theResourceName);
Pageable page = PageRequest.of(0, remainingCount.get());
Slice<Long> historicalIds = txTemplate.execute(t -> {
if (theResourceId != null && theVersion != null) {
return toSlice(myResourceHistoryTableDao.findForIdAndVersion(theResourceId, theVersion));
} else {
historicalIds = myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page);
if (theResourceName != null) {
return myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page, theResourceName);
} else {
return myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page);
}
}
}
});
for (Long next : historicalIds) {
expungeHistoricalVersion(next);
if (remainingCount.decrementAndGet() <= 0) {
return toExpungeOutcome(theExpungeOptions, remainingCount);
}
txTemplate.execute(t -> {
expungeHistoricalVersion(next);
if (remainingCount.decrementAndGet() <= 0) {
return toExpungeOutcome(theExpungeOptions, remainingCount);
}
return null;
});
}
}
@ -315,7 +328,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
});
txTemplate.execute(t -> {
doExpungeEverythingQuery("DELETE from " + SearchParamPresent.class.getSimpleName() + " d");
doExpungeEverythingQuery("DELETE from " + SearchParam.class.getSimpleName() + " d");
doExpungeEverythingQuery("DELETE from " + ForcedId.class.getSimpleName() + " d");
doExpungeEverythingQuery("DELETE from " + ResourceIndexedSearchParamDate.class.getSimpleName() + " d");
doExpungeEverythingQuery("DELETE from " + ResourceIndexedSearchParamNumber.class.getSimpleName() + " d");
@ -704,58 +716,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
return retVal;
}
@SuppressWarnings("unchecked")
public <R extends IBaseResource> IFhirResourceDao<R> getDao(Class<R> theType) {
Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> resourceTypeToDao = getDaos();
IFhirResourceDao<R> dao = (IFhirResourceDao<R>) resourceTypeToDao.get(theType);
return dao;
}
protected IFhirResourceDao<?> getDaoOrThrowException(Class<? extends IBaseResource> theClass) {
IFhirResourceDao<? extends IBaseResource> retVal = getDao(theClass);
if (retVal == null) {
List<String> supportedResourceTypes = getDaos()
.keySet()
.stream()
.map(t->myContext.getResourceDefinition(t).getName())
.sorted()
.collect(Collectors.toList());
throw new InvalidRequestException("Unable to process request, this server does not know how to handle resources of type " + getContext().getResourceDefinition(theClass).getName() + " - Can handle: " + supportedResourceTypes);
}
return retVal;
}
private Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> getDaos() {
if (myResourceTypeToDao == null) {
Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> resourceTypeToDao = new HashMap<>();
Map<String, IFhirResourceDao> daos = myApplicationContext.getBeansOfType(IFhirResourceDao.class, false, false);
String[] beanNames = myApplicationContext.getBeanNamesForType(IFhirResourceDao.class);
for (IFhirResourceDao<?> next : daos.values()) {
resourceTypeToDao.put(next.getResourceType(), next);
}
if (this instanceof IFhirResourceDao<?>) {
IFhirResourceDao<?> thiz = (IFhirResourceDao<?>) this;
resourceTypeToDao.put(thiz.getResourceType(), thiz);
}
myResourceTypeToDao = resourceTypeToDao;
}
return Collections.unmodifiableMap(myResourceTypeToDao);
}
@PostConstruct
public void startClearCaches() {
myResourceTypeToDao = null;
}
protected Set<ResourceIndexedSearchParamCoords> extractSearchParamCoords(ResourceTable theEntity, IBaseResource theResource) {
return mySearchParamExtractor.extractSearchParamCoords(theEntity, theResource);
}
@ -910,7 +870,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
param = new ResourceIndexedSearchParamQuantity();
break;
case STRING:
param = new ResourceIndexedSearchParamString();
param = new ResourceIndexedSearchParamString()
.setDaoConfig(myConfig);
break;
case TOKEN:
param = new ResourceIndexedSearchParamToken();
@ -957,18 +918,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
return myConfig;
}
@Override
public void setApplicationContext(ApplicationContext theApplicationContext) throws BeansException {
/*
* We do a null check here because Smile's module system tries to
* initialize the application context twice if two modules depend on
* the persistence module. The second time sets the dependency's appctx.
*/
if (myApplicationContext == null) {
myApplicationContext = theApplicationContext;
}
}
public void setConfig(DaoConfig theConfig) {
myConfig = theConfig;
}
@ -995,6 +944,50 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
}
}
@SuppressWarnings("unchecked")
public <R extends IBaseResource> IFhirResourceDao<R> getDao(Class<R> theType) {
Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> resourceTypeToDao = getDaos();
IFhirResourceDao<R> dao = (IFhirResourceDao<R>) resourceTypeToDao.get(theType);
return dao;
}
protected IFhirResourceDao<?> getDaoOrThrowException(Class<? extends IBaseResource> theClass) {
IFhirResourceDao<? extends IBaseResource> retVal = getDao(theClass);
if (retVal == null) {
List<String> supportedResourceTypes = getDaos()
.keySet()
.stream()
.map(t -> myContext.getResourceDefinition(t).getName())
.sorted()
.collect(Collectors.toList());
throw new InvalidRequestException("Unable to process request, this server does not know how to handle resources of type " + getContext().getResourceDefinition(theClass).getName() + " - Can handle: " + supportedResourceTypes);
}
return retVal;
}
private Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> getDaos() {
if (myResourceTypeToDao == null) {
Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> resourceTypeToDao = new HashMap<>();
Map<String, IFhirResourceDao> daos = myApplicationContext.getBeansOfType(IFhirResourceDao.class, false, false);
String[] beanNames = myApplicationContext.getBeanNamesForType(IFhirResourceDao.class);
for (IFhirResourceDao<?> next : daos.values()) {
resourceTypeToDao.put(next.getResourceType(), next);
}
if (this instanceof IFhirResourceDao<?>) {
IFhirResourceDao<?> thiz = (IFhirResourceDao<?>) this;
resourceTypeToDao.put(thiz.getResourceType(), thiz);
}
myResourceTypeToDao = resourceTypeToDao;
}
return Collections.unmodifiableMap(myResourceTypeToDao);
}
public IResourceIndexedCompositeStringUniqueDao getResourceIndexedCompositeStringUniqueDao() {
return myResourceIndexedCompositeStringUniqueDao;
}
@ -1172,9 +1165,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
@Override
public SearchBuilder newSearchBuilder() {
SearchBuilder builder = new SearchBuilder(getContext(), myEntityManager, myFulltextSearchSvc, this, myResourceIndexedSearchParamUriDao,
myForcedIdDao,
myTerminologySvc, mySerarchParamRegistry);
SearchBuilder builder = new SearchBuilder(
getContext(), myEntityManager, myFulltextSearchSvc, this, myResourceIndexedSearchParamUriDao,
myForcedIdDao, myTerminologySvc, mySerarchParamRegistry, myResourceTagDao, myResourceViewDao);
return builder;
}
@ -1223,7 +1216,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
}
}
private void populateResourceIdFromEntity(BaseHasResource theEntity, final IBaseResource theResource) {
private void populateResourceIdFromEntity(IBaseResourceEntity theEntity, final IBaseResource theResource) {
IIdType id = theEntity.getIdDt();
if (getContext().getVersion().getVersion().isRi()) {
id = getContext().getVersion().newIdType().setValue(id.getValue());
@ -1308,20 +1301,24 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
}
}
// Don't keep duplicate tags
Set<ResourceTag> allTagsNew = getAllTagDefinitions(theEntity);
Set<TagDefinition> allDefsPresent = new HashSet<>();
theEntity.getTags().removeIf(theResourceTag -> !allDefsPresent.add(theResourceTag.getTag()));
allTagsNew.forEach(tag -> {
// Remove any tags that have been removed
for (ResourceTag next : allTagsOld) {
if (!allDefs.contains(next)) {
if (shouldDroppedTagBeRemovedOnUpdate(theRequest, next)) {
theEntity.getTags().remove(next);
// Don't keep duplicate tags
if (!allDefsPresent.add(tag.getTag())) {
theEntity.getTags().remove(tag);
}
// Drop any tags that have been removed
if (!allDefs.contains(tag)) {
if (shouldDroppedTagBeRemovedOnUpdate(theRequest, tag)) {
theEntity.getTags().remove(tag);
}
}
}
Set<ResourceTag> allTagsNew = getAllTagDefinitions(theEntity);
});
if (!allTagsOld.equals(allTagsNew)) {
changed = true;
}
@ -1355,7 +1352,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
}
@SuppressWarnings("unchecked")
private <R extends IBaseResource> R populateResourceMetadataHapi(Class<R> theResourceType, BaseHasResource theEntity, boolean theForHistoryOperation, IResource res) {
private <R extends IBaseResource> R populateResourceMetadataHapi(Class<R> theResourceType, IBaseResourceEntity theEntity, Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IResource res) {
R retVal = (R) res;
if (theEntity.getDeleted() != null) {
res = (IResource) myContext.getResourceDefinition(theResourceType).newInstance();
@ -1384,7 +1381,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
ResourceMetadataKeyEnum.UPDATED.put(res, theEntity.getUpdated());
IDao.RESOURCE_PID.put(res, theEntity.getId());
Collection<? extends BaseTag> tags = theEntity.getTags();
Collection<? extends BaseTag> tags = theTagList;
if (theEntity.isHasTags()) {
TagList tagList = new TagList();
List<IBaseCoding> securityLabels = new ArrayList<>();
@ -1421,7 +1418,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
}
@SuppressWarnings("unchecked")
private <R extends IBaseResource> R populateResourceMetadataRi(Class<R> theResourceType, BaseHasResource theEntity, boolean theForHistoryOperation, IAnyResource res) {
private <R extends IBaseResource> R populateResourceMetadataRi(Class<R> theResourceType, IBaseResourceEntity theEntity, Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IAnyResource res) {
R retVal = (R) res;
if (theEntity.getDeleted() != null) {
res = (IAnyResource) myContext.getResourceDefinition(theResourceType).newInstance();
@ -1454,7 +1451,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
res.getMeta().setLastUpdated(theEntity.getUpdatedDate());
IDao.RESOURCE_PID.put(res, theEntity.getId());
Collection<? extends BaseTag> tags = theEntity.getTags();
Collection<? extends BaseTag> tags = theTagList;
if (theEntity.isHasTags()) {
for (BaseTag next : tags) {
@ -1480,6 +1477,15 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
return retVal;
}
/**
* Subclasses may override to provide behaviour. Called when a pre-existing resource has been updated in the database
*
* @param theEntity The resource
*/
protected void postDelete(ResourceTable theEntity) {
// nothing
}
/**
* Subclasses may override to provide behaviour. Called when a resource has been inserted into the database for the first time.
*
@ -1536,6 +1542,18 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
return retVal;
}
@Override
public void setApplicationContext(ApplicationContext theApplicationContext) throws BeansException {
/*
* We do a null check here because Smile's module system tries to
* initialize the application context twice if two modules depend on
* the persistence module. The second time sets the dependency's appctx.
*/
if (myApplicationContext == null) {
myApplicationContext = theApplicationContext;
}
}
private void setUpdatedTime(Collection<? extends BaseResourceIndexedSearchParam> theParams, Date theUpdateTime) {
for (BaseResourceIndexedSearchParam nextSearchParam : theParams) {
nextSearchParam.setUpdated(theUpdateTime);
@ -1592,6 +1610,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
return false;
}
@PostConstruct
public void startClearCaches() {
myResourceTypeToDao = null;
}
private ExpungeOutcome toExpungeOutcome(ExpungeOptions theExpungeOptions, AtomicInteger theRemainingCount) {
return new ExpungeOutcome()
.setDeletedCount(theExpungeOptions.getLimit() - theRemainingCount.get());
@ -1601,28 +1624,47 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
public IBaseResource toResource(BaseHasResource theEntity, boolean theForHistoryOperation) {
RuntimeResourceDefinition type = myContext.getResourceDefinition(theEntity.getResourceType());
Class<? extends IBaseResource> resourceType = type.getImplementingClass();
return toResource(resourceType, theEntity, theForHistoryOperation);
return toResource(resourceType, theEntity, null, theForHistoryOperation);
}
@SuppressWarnings("unchecked")
@Override
public <R extends IBaseResource> R toResource(Class<R> theResourceType, BaseHasResource theEntity,
boolean theForHistoryOperation) {
public <R extends IBaseResource> R toResource(Class<R> theResourceType, IBaseResourceEntity theEntity, Collection<ResourceTag> theTagList, boolean theForHistoryOperation) {
// 1. get resource, it's encoding and the tags if any
byte[] resourceBytes = null;
ResourceEncodingEnum resourceEncoding = null;
Collection<? extends BaseTag> myTagList = null;
ResourceHistoryTable history;
if (theEntity instanceof ResourceHistoryTable) {
history = (ResourceHistoryTable) theEntity;
ResourceHistoryTable history = (ResourceHistoryTable) theEntity;
resourceBytes = history.getResource();
resourceEncoding = history.getEncoding();
myTagList = history.getTags();
} else if (theEntity instanceof ResourceTable) {
ResourceTable resource = (ResourceTable) theEntity;
ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), theEntity.getVersion());
if (history == null) {
return null;
}
resourceBytes = history.getResource();
resourceEncoding = history.getEncoding();
myTagList = resource.getTags();
} else if (theEntity instanceof ResourceSearchView) {
// This is the search View
ResourceSearchView myView = (ResourceSearchView) theEntity;
resourceBytes = myView.getResource();
resourceEncoding = myView.getEncoding();
if (theTagList == null)
myTagList = new HashSet<>();
else
myTagList = theTagList;
} else {
history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), theEntity.getVersion());
}
if (history == null) {
// something wrong
return null;
}
byte[] resourceBytes = history.getResource();
ResourceEncodingEnum resourceEncoding = history.getEncoding();
// 2. get The text
String resourceText = null;
switch (resourceEncoding) {
case JSON:
@ -1639,12 +1681,10 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
break;
}
/*
* Use the appropriate custom type if one is specified in the context
*/
// 3. Use the appropriate custom type if one is specified in the context
Class<R> resourceType = theResourceType;
if (myContext.hasDefaultTypeForProfile()) {
for (BaseTag nextTag : theEntity.getTags()) {
for (BaseTag nextTag : myTagList) {
if (nextTag.getTag().getTagType() == TagTypeEnum.PROFILE) {
String profile = nextTag.getTag().getCode();
if (isNotBlank(profile)) {
@ -1659,6 +1699,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
}
}
// 4. parse the text to FHIR
R retVal;
if (resourceEncoding != ResourceEncodingEnum.DEL) {
IParser parser = resourceEncoding.newParser(getContext(theEntity.getFhirVersion()));
@ -1689,15 +1730,15 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
}
// 5. fill MetaData
if (retVal instanceof IResource) {
IResource res = (IResource) retVal;
retVal = populateResourceMetadataHapi(resourceType, theEntity, theForHistoryOperation, res);
retVal = populateResourceMetadataHapi(resourceType, theEntity, myTagList, theForHistoryOperation, res);
} else {
IAnyResource res = (IAnyResource) retVal;
retVal = populateResourceMetadataRi(resourceType, theEntity, theForHistoryOperation, res);
retVal = populateResourceMetadataRi(resourceType, theEntity, myTagList, theForHistoryOperation, res);
}
return retVal;
}
@ -1735,8 +1776,12 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
protected ResourceTable updateEntity(RequestDetails theRequest, final IBaseResource theResource, ResourceTable
theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing,
boolean theUpdateVersion, Date theUpdateTime, boolean theForceUpdate, boolean theCreateNewHistoryEntry) {
Validate.notNull(theEntity);
Validate.isTrue(theDeletedTimestampOrNull != null || theResource != null, "Must have either a resource[{}] or a deleted timestamp[{}] for resource PID[{}]", theDeletedTimestampOrNull != null, theResource != null, theEntity.getId());
ourLog.debug("Starting entity update");
/*
* This should be the very first thing..
*/
@ -1826,6 +1871,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
theEntity.setNarrativeTextParsedIntoWords(null);
theEntity.setContentTextParsedIntoWords(null);
theEntity.setHashSha256(null);
theEntity.setIndexStatus(INDEX_STATUS_INDEXED);
changed = populateResourceIntoEntity(theRequest, theResource, theEntity, true);
} else {
@ -2003,6 +2049,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
postPersist(theEntity, (T) theResource);
} else if (theEntity.getDeleted() != null) {
theEntity = myEntityManager.merge(theEntity);
postDelete(theEntity);
} else {
theEntity = myEntityManager.merge(theEntity);
@ -2014,10 +2065,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
*/
if (theCreateNewHistoryEntry) {
final ResourceHistoryTable historyEntry = theEntity.toHistory();
// if (theEntity.getVersion() > 1) {
// existing = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), theEntity.getVersion());
// ourLog.warn("Reusing existing history entry entity {}", theEntity.getIdDt().getValue());
// }
historyEntry.setEncoding(changed.getEncoding());
historyEntry.setResource(changed.getResource());
@ -2057,6 +2104,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
if (thePerformIndexing) {
for (ResourceIndexedSearchParamString next : removeCommon(existingStringParams, stringParams)) {
next.setDaoConfig(myConfig);
myEntityManager.remove(next);
theEntity.getParamsString().remove(next);
}
@ -2148,12 +2196,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
} // if thePerformIndexing
theEntity = myEntityManager.merge(theEntity);
if (theResource != null) {
populateResourceIdFromEntity(theEntity, theResource);
}
return theEntity;
}

View File

@ -50,6 +50,7 @@ import ca.uhn.fhir.rest.server.method.SearchMethodBinding;
import ca.uhn.fhir.util.*;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.*;
import org.hl7.fhir.r4.model.InstantType;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Required;
import org.springframework.lang.NonNull;
@ -207,7 +208,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
StopWatch w = new StopWatch();
T resourceToDelete = toResource(myResourceType, entity, false);
T resourceToDelete = toResource(myResourceType, entity, null, false);
// Notify IServerOperationInterceptors about pre-action call
if (theReques != null) {
@ -289,7 +290,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
ResourceTable entity = myEntityManager.find(ResourceTable.class, pid);
deletedResources.add(entity);
T resourceToDelete = toResource(myResourceType, entity, false);
T resourceToDelete = toResource(myResourceType, entity, null, false);
// Notify IServerOperationInterceptors about pre-action call
if (theRequest != null) {
@ -394,16 +395,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
"This server cannot create an entity with a user-specified numeric ID - Client should not specify an ID when creating a new resource, or should include at least one letter in the ID to force a client-defined ID");
}
createForcedIdIfNeeded(entity, theResource.getIdElement());
if (entity.getForcedId() != null) {
try {
translateForcedIdToPid(getResourceName(), theResource.getIdElement().getIdPart());
throw new UnprocessableEntityException(getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "duplicateCreateForcedId", theResource.getIdElement().getIdPart()));
} catch (ResourceNotFoundException e) {
// good, this ID doesn't exist so we can create it
}
}
}
// Notify interceptors
@ -517,6 +508,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
@Override
@Transactional(propagation = Propagation.NEVER)
public ExpungeOutcome expunge(IIdType theId, ExpungeOptions theExpungeOptions) {
BaseHasResource entity = readEntity(theId);
if (theId.hasVersionIdPart()) {
@ -532,6 +524,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
@Override
@Transactional(propagation = Propagation.NEVER)
public ExpungeOutcome expunge(ExpungeOptions theExpungeOptions) {
ourLog.info("Beginning TYPE[{}] expunge operation", getResourceName());
@ -854,16 +847,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
BaseHasResource entity = readEntity(theId);
validateResourceType(entity);
T retVal = toResource(myResourceType, entity, false);
T retVal = toResource(myResourceType, entity, null, false);
IPrimitiveType<Date> deleted;
if (retVal instanceof IResource) {
deleted = ResourceMetadataKeyEnum.DELETED_AT.get((IResource) retVal);
} else {
deleted = ResourceMetadataKeyEnum.DELETED_AT.get((IAnyResource) retVal);
}
if (deleted != null && !deleted.isEmpty()) {
throw new ResourceGoneException("Resource was deleted at " + deleted.getValueAsString());
if (entity.getDeleted() != null) {
throw new ResourceGoneException("Resource was deleted at " + new InstantType(entity.getDeleted()).getValueAsString());
}
ourLog.debug("Processed read on {} in {}ms", theId.getValue(), w.getMillisAndRestart());
@ -930,10 +917,14 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
@Override
public void reindex(T theResource, ResourceTable theEntity) {
ourLog.debug("Indexing resource {} - PID {}", theResource.getIdElement().getValue(), theEntity.getId());
CURRENTLY_REINDEXING.put(theResource, Boolean.TRUE);
updateEntity(null, theResource, theEntity, null, true, false, theEntity.getUpdatedDate(), true, false);
CURRENTLY_REINDEXING.put(theResource, null);
ourLog.debug("Indexing resource {} - PID {}", theEntity.getIdDt().getValue(), theEntity.getId());
if (theResource != null) {
CURRENTLY_REINDEXING.put(theResource, Boolean.TRUE);
}
updateEntity(null, theResource, theEntity, theEntity.getDeleted(), true, false, theEntity.getUpdatedDate(), true, false);
if (theResource != null) {
CURRENTLY_REINDEXING.put(theResource, null);
}
}
@Override
@ -1065,6 +1056,11 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
mySecondaryPrimaryKeyParamName = theSecondaryPrimaryKeyParamName;
}
@PostConstruct
public void start() {
ourLog.debug("Starting resource DAO for type: {}", getResourceName());
}
protected <MT extends IBaseMetaType> MT toMetaDt(Class<MT> theType, Collection<TagDefinition> tagDefinitions) {
MT retVal;
try {
@ -1205,7 +1201,9 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
} else {
/*
* Note: resourcdeId will not be null or empty here, because we check it and reject requests in BaseOutcomeReturningMethodBindingWithResourceParam
* Note: resourceId will not be null or empty here, because we
* check it and reject requests in
* BaseOutcomeReturningMethodBindingWithResourceParam
*/
resourceId = theResource.getIdElement();
@ -1336,9 +1334,4 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
}
@PostConstruct
public void start() {
ourLog.info("Starting resource DAO for type: {}", getResourceName());
}
}

View File

@ -257,7 +257,7 @@ public abstract class BaseHapiFhirSystemDao<T, MT> extends BaseHapiFhirDao<IBase
@Transactional(propagation = Propagation.NEVER)
public Integer performReindexingPass(final Integer theCount) {
if (!myReindexLock.tryLock()) {
return null;
return -1;
}
try {
return doPerformReindexingPass(theCount);
@ -305,7 +305,8 @@ public abstract class BaseHapiFhirSystemDao<T, MT> extends BaseHapiFhirDao<IBase
final IBaseResource resource = toResource(resourceTable, false);
@SuppressWarnings("rawtypes") final IFhirResourceDao dao = getDaoOrThrowException(resource.getClass());
Class<? extends IBaseResource> resourceClass = getContext().getResourceDefinition(resourceTable.getResourceType()).getImplementingClass();
@SuppressWarnings("rawtypes") final IFhirResourceDao dao = getDaoOrThrowException(resourceClass);
dao.reindex(resource, resourceTable);
return null;

View File

@ -20,45 +20,43 @@ package ca.uhn.fhir.jpa.dao;
* #L%
*/
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.regex.Pattern;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.util.FhirTerser;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang3.ObjectUtils;
import org.hl7.fhir.instance.model.api.IBaseDatatype;
import org.hl7.fhir.instance.model.api.IBaseExtension;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.beans.factory.annotation.Autowired;
import com.google.common.annotations.VisibleForTesting;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.util.FhirTerser;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.regex.Pattern;
public abstract class BaseSearchParamExtractor implements ISearchParamExtractor {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseSearchParamExtractor.class);
public static final Pattern SPLIT = Pattern.compile("\\||( or )");
public static final Pattern SPLIT = Pattern.compile("\\||( or )");
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseSearchParamExtractor.class);
@Autowired
private FhirContext myContext;
@Autowired
private DaoConfig myDaoConfig;
@Autowired
private ISearchParamRegistry mySearchParamRegistry;
public BaseSearchParamExtractor() {
super();
}
public BaseSearchParamExtractor(FhirContext theCtx, ISearchParamRegistry theSearchParamRegistry) {
public BaseSearchParamExtractor(DaoConfig theDaoConfig, FhirContext theCtx, ISearchParamRegistry theSearchParamRegistry) {
myContext = theCtx;
mySearchParamRegistry = theSearchParamRegistry;
myDaoConfig = theDaoConfig;
}
@Override
public List<PathAndRef> extractResourceLinks(IBaseResource theResource, RuntimeSearchParam theNextSpDef) {
List<PathAndRef> refs = new ArrayList<PathAndRef>();
@ -95,20 +93,24 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
}
} catch (Exception e) {
RuntimeResourceDefinition def = myContext.getResourceDefinition(theResource);
ourLog.warn("Failed to index values from path[{}] in resource type[{}]: {}", new Object[] { nextPathTrimmed, def.getName(), e.toString(), e } );
ourLog.warn("Failed to index values from path[{}] in resource type[{}]: {}", new Object[] {nextPathTrimmed, def.getName(), e.toString(), e});
}
}
return values;
}
protected FhirContext getContext() {
return myContext;
}
public DaoConfig getDaoConfig() {
return myDaoConfig;
}
public Collection<RuntimeSearchParam> getSearchParams(IBaseResource theResource) {
RuntimeResourceDefinition def = getContext().getResourceDefinition(theResource);
Collection<RuntimeSearchParam> retVal = mySearchParamRegistry.getActiveSearchParams(def.getName()).values();
List<RuntimeSearchParam> defaultList= Collections.emptyList();
List<RuntimeSearchParam> defaultList = Collections.emptyList();
retVal = ObjectUtils.defaultIfNull(retVal, defaultList);
return retVal;
}

View File

@ -89,7 +89,7 @@ public class DaoConfig {
/**
* update setter javadoc if default changes
*/
private boolean myAllowContainsSearches = true;
private boolean myAllowContainsSearches = false;
/**
* update setter javadoc if default changes
@ -754,7 +754,15 @@ public class DaoConfig {
* If enabled, the server will support the use of :contains searches,
* which are helpful but can have adverse effects on performance.
* <p>
* Default is <code>true</code>
* Default is <code>false</code> (Note that prior to HAPI FHIR
* 3.5.0 the default was <code>true</code>)
* </p>
* <p>
* Note: If you change this value after data already has
* already been stored in the database, you must for a reindexing
* of all data in the database or resources may not be
* searchable.
* </p>
*/
public boolean isAllowContainsSearches() {
return myAllowContainsSearches;
@ -764,12 +772,21 @@ public class DaoConfig {
* If enabled, the server will support the use of :contains searches,
* which are helpful but can have adverse effects on performance.
* <p>
* Default is <code>true</code>
* Default is <code>false</code> (Note that prior to HAPI FHIR
* 3.5.0 the default was <code>true</code>)
* </p>
* <p>
* Note: If you change this value after data already has
* already been stored in the database, you must for a reindexing
* of all data in the database or resources may not be
* searchable.
* </p>
*/
public void setAllowContainsSearches(boolean theAllowContainsSearches) {
this.myAllowContainsSearches = theAllowContainsSearches;
}
/**
* If set to <code>true</code> (default is <code>false</code>) the server will allow
* resources to have references to external servers. For example if this server is

View File

@ -0,0 +1,38 @@
package ca.uhn.fhir.jpa.dao;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.model.dstu2.resource.MessageHeader;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.NotImplementedOperationException;
import org.hl7.fhir.instance.model.api.IBaseBundle;
public class FhirResourceDaoMessageHeaderDstu2 extends FhirResourceDaoDstu2<MessageHeader> implements IFhirResourceDaoMessageHeader<MessageHeader> {
@Override
public IBaseBundle messageHeaderProcessMessage(RequestDetails theRequestDetails, IBaseBundle theMessage) {
return FhirResourceDaoMessageHeaderDstu2.throwProcessMessageNotImplemented();
}
public static IBaseBundle throwProcessMessageNotImplemented() {
throw new NotImplementedOperationException("This operation is not yet implemented on this server");
}
}

View File

@ -28,6 +28,7 @@ import java.util.*;
import javax.annotation.PostConstruct;
import org.apache.commons.codec.binary.StringUtils;
import org.hl7.fhir.instance.hapi.validation.CachingValidationSupport;
import org.hl7.fhir.instance.hapi.validation.DefaultProfileValidationSupport;
import org.hl7.fhir.instance.hapi.validation.ValidationSupportChain;
import org.hl7.fhir.instance.model.api.IIdType;
@ -62,7 +63,7 @@ public class FhirResourceDaoValueSetDstu2 extends FhirResourceDaoDstu2<ValueSet>
@Qualifier("myFhirContextDstu2Hl7Org")
private FhirContext myRiCtx;
private ValidationSupportChain myValidationSupport;
private CachingValidationSupport myValidationSupport;
private void addCompose(String theFilter, ValueSet theValueSetToPopulate, ValueSet theSourceValueSet, CodeSystemConcept theConcept) {
if (isBlank(theFilter)) {
@ -252,7 +253,7 @@ public class FhirResourceDaoValueSetDstu2 extends FhirResourceDaoDstu2<ValueSet>
public void postConstruct() {
super.postConstruct();
myDefaultProfileValidationSupport = new DefaultProfileValidationSupport();
myValidationSupport = new ValidationSupportChain(myDefaultProfileValidationSupport, myJpaValidationSupport);
myValidationSupport = new CachingValidationSupport(new ValidationSupportChain(myDefaultProfileValidationSupport, myJpaValidationSupport));
}
@Override

View File

@ -1,15 +1,18 @@
package ca.uhn.fhir.jpa.dao;
import java.util.Collection;
import java.util.Set;
import org.hl7.fhir.instance.model.api.IBaseResource;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.jpa.entity.BaseHasResource;
import ca.uhn.fhir.jpa.entity.IBaseResourceEntity;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.ResourceTag;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
import org.hl7.fhir.instance.model.api.IBaseResource;
import java.util.Collection;
import java.util.Set;
/*
* #%L
@ -56,6 +59,6 @@ public interface IDao {
IBaseResource toResource(BaseHasResource theEntity, boolean theForHistoryOperation);
<R extends IBaseResource> R toResource(Class<R> theResourceType, BaseHasResource theEntity, boolean theForHistoryOperation);
<R extends IBaseResource> R toResource(Class<R> theResourceType, IBaseResourceEntity theEntity, Collection<ResourceTag> theTagList, boolean theForHistoryOperation);
}

View File

@ -0,0 +1,31 @@
package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseResource;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public interface IFhirResourceDaoMessageHeader<T extends IBaseResource> extends IFhirResourceDao<T> {
IBaseBundle messageHeaderProcessMessage(RequestDetails theRequestDetails, IBaseBundle theMessage);
}

View File

@ -23,6 +23,8 @@ package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.context.*;
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamUriDao;
import ca.uhn.fhir.jpa.dao.data.IResourceSearchViewDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
import ca.uhn.fhir.jpa.entity.*;
import ca.uhn.fhir.jpa.search.JpaRuntimeSearchParam;
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
@ -53,7 +55,6 @@ import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
@ -61,6 +62,8 @@ import org.apache.commons.lang3.tuple.Pair;
import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults;
import org.hibernate.query.Query;
import org.hibernate.query.criteria.internal.CriteriaBuilderImpl;
import org.hibernate.query.criteria.internal.predicate.BooleanStaticAssertionPredicate;
import org.hl7.fhir.dstu3.model.BaseResource;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseResource;
@ -69,7 +72,6 @@ import org.hl7.fhir.instance.model.api.IIdType;
import javax.persistence.EntityManager;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.*;
import javax.persistence.criteria.CriteriaBuilder.In;
import java.math.BigDecimal;
import java.math.MathContext;
import java.util.*;
@ -108,12 +110,17 @@ public class SearchBuilder implements ISearchBuilder {
private IHapiTerminologySvc myTerminologySvc;
private int myFetchSize;
protected IResourceTagDao myResourceTagDao;
protected IResourceSearchViewDao myResourceSearchViewDao;
/**
* Constructor
*/
public SearchBuilder(FhirContext theFhirContext, EntityManager theEntityManager, IFulltextSearchSvc theFulltextSearchSvc,
BaseHapiFhirDao<?> theDao,
IResourceIndexedSearchParamUriDao theResourceIndexedSearchParamUriDao, IForcedIdDao theForcedIdDao, IHapiTerminologySvc theTerminologySvc, ISearchParamRegistry theSearchParamRegistry) {
public SearchBuilder(FhirContext theFhirContext, EntityManager theEntityManager,
IFulltextSearchSvc theFulltextSearchSvc, BaseHapiFhirDao<?> theDao,
IResourceIndexedSearchParamUriDao theResourceIndexedSearchParamUriDao, IForcedIdDao theForcedIdDao,
IHapiTerminologySvc theTerminologySvc, ISearchParamRegistry theSearchParamRegistry,
IResourceTagDao theResourceTagDao, IResourceSearchViewDao theResourceViewDao) {
myContext = theFhirContext;
myEntityManager = theEntityManager;
myFulltextSearchSvc = theFulltextSearchSvc;
@ -122,6 +129,8 @@ public class SearchBuilder implements ISearchBuilder {
myForcedIdDao = theForcedIdDao;
myTerminologySvc = theTerminologySvc;
mySearchParamRegistry = theSearchParamRegistry;
myResourceTagDao = theResourceTagDao;
myResourceSearchViewDao = theResourceViewDao;
}
private void addPredicateComposite(String theResourceName, RuntimeSearchParam theParamDef, List<? extends IQueryParameterType> theNextAnd) {
@ -257,7 +266,7 @@ public class SearchBuilder implements ISearchBuilder {
return;
}
List<Predicate> codePredicates = new ArrayList<Predicate>();
List<Predicate> codePredicates = new ArrayList<>();
for (IQueryParameterType nextOr : theList) {
IQueryParameterType params = nextOr;
@ -273,8 +282,9 @@ public class SearchBuilder implements ISearchBuilder {
ParamPrefixEnum prefix = ObjectUtils.defaultIfNull(param.getPrefix(), ParamPrefixEnum.EQUAL);
String invalidMessageName = "invalidNumberPrefix";
Predicate num = createPredicateNumeric(theResourceName, theParamName, join, myBuilder, params, prefix, value, fromObj, invalidMessageName);
codePredicates.add(num);
Predicate predicateNumeric = createPredicateNumeric(theResourceName, theParamName, join, myBuilder, params, prefix, value, fromObj, invalidMessageName);
Predicate predicateOuter = combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, join, predicateNumeric );
codePredicates.add(predicateOuter);
} else {
throw new IllegalArgumentException("Invalid token type: " + params.getClass());
@ -287,11 +297,10 @@ public class SearchBuilder implements ISearchBuilder {
private void addPredicateParamMissing(String theResourceName, String theParamName, boolean theMissing) {
Join<ResourceTable, SearchParamPresent> paramPresentJoin = myResourceTableRoot.join("mySearchParamPresents", JoinType.LEFT);
Join<SearchParamPresent, SearchParam> paramJoin = paramPresentJoin.join("mySearchParam", JoinType.LEFT);
myPredicates.add(myBuilder.equal(paramJoin.get("myResourceName"), theResourceName));
myPredicates.add(myBuilder.equal(paramJoin.get("myParamName"), theParamName));
myPredicates.add(myBuilder.equal(paramPresentJoin.get("myPresent"), !theMissing));
Expression<Long> hashPresence = paramPresentJoin.get("myHashPresence").as(Long.class);
Long hash = SearchParamPresent.calculateHashPresence(theResourceName, theParamName, !theMissing);
myPredicates.add(myBuilder.equal(hashPresence, hash));
}
private void addPredicateParamMissing(String theResourceName, String theParamName, boolean theMissing, Join<ResourceTable, ? extends BaseResourceIndexedSearchParam> theJoin) {
@ -309,7 +318,7 @@ public class SearchBuilder implements ISearchBuilder {
return;
}
List<Predicate> codePredicates = new ArrayList<Predicate>();
List<Predicate> codePredicates = new ArrayList<>();
for (IQueryParameterType nextOr : theList) {
Predicate singleCode = createPredicateQuantity(nextOr, theResourceName, theParamName, myBuilder, join);
@ -332,7 +341,7 @@ public class SearchBuilder implements ISearchBuilder {
Join<ResourceTable, ResourceLink> join = createOrReuseJoin(JoinEnum.REFERENCE, theParamName);
List<Predicate> codePredicates = new ArrayList<Predicate>();
List<Predicate> codePredicates = new ArrayList<>();
for (IQueryParameterType nextOr : theList) {
@ -429,7 +438,7 @@ public class SearchBuilder implements ISearchBuilder {
} else {
RuntimeResourceDefinition resDef = myContext.getResourceDefinition(ref.getResourceType());
resourceTypes = new ArrayList<Class<? extends IBaseResource>>(1);
resourceTypes = new ArrayList<>(1);
resourceTypes.add(resDef.getImplementingClass());
resourceId = ref.getIdPart();
}
@ -474,7 +483,7 @@ public class SearchBuilder implements ISearchBuilder {
IQueryParameterType chainValue;
if (remainingChain != null) {
if (param == null || param.getParamType() != RestSearchParameterTypeEnum.REFERENCE) {
ourLog.debug("Type {} parameter {} is not a reference, can not chain {}", new Object[] {nextType.getSimpleName(), chain, remainingChain});
ourLog.debug("Type {} parameter {} is not a reference, can not chain {}", nextType.getSimpleName(), chain, remainingChain);
continue;
}
@ -495,7 +504,7 @@ public class SearchBuilder implements ISearchBuilder {
Root<ResourceTable> subQfrom = subQ.from(ResourceTable.class);
subQ.select(subQfrom.get("myId").as(Long.class));
List<List<? extends IQueryParameterType>> andOrParams = new ArrayList<List<? extends IQueryParameterType>>();
List<List<? extends IQueryParameterType>> andOrParams = new ArrayList<>();
andOrParams.add(Collections.singletonList(chainValue));
/*
@ -546,7 +555,7 @@ public class SearchBuilder implements ISearchBuilder {
private void addPredicateResourceId(List<List<? extends IQueryParameterType>> theValues) {
for (List<? extends IQueryParameterType> nextValue : theValues) {
Set<Long> orPids = new HashSet<Long>();
Set<Long> orPids = new HashSet<>();
for (IQueryParameterType next : nextValue) {
String value = next.getValueAsQueryToken(myContext);
if (value != null && value.startsWith("|")) {
@ -594,10 +603,9 @@ public class SearchBuilder implements ISearchBuilder {
return;
}
List<Predicate> codePredicates = new ArrayList<Predicate>();
List<Predicate> codePredicates = new ArrayList<>();
for (IQueryParameterType nextOr : theList) {
IQueryParameterType theParameter = nextOr;
Predicate singleCode = createPredicateString(theParameter, theResourceName, theParamName, myBuilder, join);
Predicate singleCode = createPredicateString(nextOr, theResourceName, theParamName, myBuilder, join);
codePredicates.add(singleCode);
}
@ -742,7 +750,7 @@ public class SearchBuilder implements ISearchBuilder {
return;
}
List<Predicate> codePredicates = new ArrayList<Predicate>();
List<Predicate> codePredicates = new ArrayList<>();
for (IQueryParameterType nextOr : theList) {
if (nextOr instanceof TokenParam) {
@ -785,7 +793,6 @@ public class SearchBuilder implements ISearchBuilder {
continue;
}
Predicate predicate;
if (param.getQualifier() == UriParamQualifierEnum.ABOVE) {
/*
@ -814,14 +821,24 @@ public class SearchBuilder implements ISearchBuilder {
continue;
}
predicate = join.get("myUri").as(String.class).in(toFind);
Predicate uriPredicate = join.get("myUri").as(String.class).in(toFind);
Predicate hashAndUriPredicate = combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, join, uriPredicate);
codePredicates.add(hashAndUriPredicate);
} else if (param.getQualifier() == UriParamQualifierEnum.BELOW) {
predicate = myBuilder.like(join.get("myUri").as(String.class), createLeftMatchLikeExpression(value));
Predicate uriPredicate = myBuilder.like(join.get("myUri").as(String.class), createLeftMatchLikeExpression(value));
Predicate hashAndUriPredicate = combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, join, uriPredicate);
codePredicates.add(hashAndUriPredicate);
} else {
predicate = myBuilder.equal(join.get("myUri").as(String.class), value);
long hashUri = ResourceIndexedSearchParamUri.calculateHashUri(theResourceName, theParamName, value);
Predicate hashPredicate = myBuilder.equal(join.get("myHashUri"), hashUri);
codePredicates.add(hashPredicate);
}
codePredicates.add(predicate);
} else {
throw new IllegalArgumentException("Invalid URI type: " + nextOr.getClass());
}
@ -839,16 +856,13 @@ public class SearchBuilder implements ISearchBuilder {
}
Predicate orPredicate = myBuilder.or(toArray(codePredicates));
Predicate outerPredicate = combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, join, orPredicate);
myPredicates.add(outerPredicate);
myPredicates.add(orPredicate);
}
private Predicate combineParamIndexPredicateWithParamNamePredicate(String theResourceName, String theParamName, From<?, ? extends BaseResourceIndexedSearchParam> theFrom, Predicate thePredicate) {
Predicate resourceTypePredicate = myBuilder.equal(theFrom.get("myResourceType"), theResourceName);
Predicate paramNamePredicate = myBuilder.equal(theFrom.get("myParamName"), theParamName);
Predicate outerPredicate = myBuilder.and(resourceTypePredicate, paramNamePredicate, thePredicate);
return outerPredicate;
long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity(theResourceName, theParamName);
Predicate hashIdentityPredicate = myBuilder.equal(theFrom.get("myHashIdentity"), hashIdentity);
return myBuilder.and(hashIdentityPredicate, thePredicate);
}
private Predicate createCompositeParamPart(String theResourceName, Root<ResourceTable> theRoot, RuntimeSearchParam theParam, IQueryParameterType leftValue) {
@ -1028,7 +1042,7 @@ public class SearchBuilder implements ISearchBuilder {
if (theParamName == null) {
return num;
}
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, num);
return num;
}
private Predicate createPredicateQuantity(IQueryParameterType theParam, String theResourceName, String theParamName, CriteriaBuilder theBuilder,
@ -1054,39 +1068,31 @@ public class SearchBuilder implements ISearchBuilder {
throw new IllegalArgumentException("Invalid quantity type: " + theParam.getClass());
}
Predicate system = null;
if (!isBlank(systemValue)) {
system = theBuilder.equal(theFrom.get("mySystem"), systemValue);
}
Predicate code = null;
if (!isBlank(unitsValue)) {
code = theBuilder.equal(theFrom.get("myUnits"), unitsValue);
Predicate hashPredicate;
if (!isBlank(systemValue) && !isBlank(unitsValue)) {
long hash = ResourceIndexedSearchParamQuantity.calculateHashSystemAndUnits(theResourceName, theParamName, systemValue, unitsValue);
hashPredicate = myBuilder.equal(theFrom.get("myHashIdentitySystemAndUnits"), hash);
} else if (!isBlank(unitsValue)) {
long hash = ResourceIndexedSearchParamQuantity.calculateHashUnits(theResourceName, theParamName, unitsValue);
hashPredicate = myBuilder.equal(theFrom.get("myHashIdentityAndUnits"), hash);
} else {
long hash = BaseResourceIndexedSearchParam.calculateHashIdentity(theResourceName, theParamName);
hashPredicate = myBuilder.equal(theFrom.get("myHashIdentity"), hash);
}
cmpValue = ObjectUtils.defaultIfNull(cmpValue, ParamPrefixEnum.EQUAL);
final Expression<BigDecimal> path = theFrom.get("myValue");
String invalidMessageName = "invalidQuantityPrefix";
Predicate num = createPredicateNumeric(theResourceName, null, theFrom, theBuilder, theParam, cmpValue, valueValue, path, invalidMessageName);
Predicate numericPredicate = createPredicateNumeric(theResourceName, null, theFrom, theBuilder, theParam, cmpValue, valueValue, path, invalidMessageName);
Predicate singleCode;
if (system == null && code == null) {
singleCode = num;
} else if (system == null) {
singleCode = theBuilder.and(code, num);
} else if (code == null) {
singleCode = theBuilder.and(system, num);
} else {
singleCode = theBuilder.and(system, code, num);
}
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, singleCode);
return theBuilder.and(hashPredicate, numericPredicate);
}
private Predicate createPredicateString(IQueryParameterType theParameter, String theResourceName, String theParamName, CriteriaBuilder theBuilder,
From<?, ResourceIndexedSearchParamString> theFrom) {
String rawSearchTerm;
DaoConfig daoConfig = myCallingDao.getConfig();
if (theParameter instanceof TokenParam) {
TokenParam id = (TokenParam) theParameter;
if (!id.isText()) {
@ -1097,7 +1103,7 @@ public class SearchBuilder implements ISearchBuilder {
StringParam id = (StringParam) theParameter;
rawSearchTerm = id.getValue();
if (id.isContains()) {
if (!myCallingDao.getConfig().isAllowContainsSearches()) {
if (!daoConfig.isAllowContainsSearches()) {
throw new MethodNotAllowedException(":contains modifier is disabled on this server");
}
}
@ -1113,22 +1119,34 @@ public class SearchBuilder implements ISearchBuilder {
+ ResourceIndexedSearchParamString.MAX_LENGTH + "): " + rawSearchTerm);
}
String likeExpression = BaseHapiFhirDao.normalizeString(rawSearchTerm);
if (theParameter instanceof StringParam &&
((StringParam) theParameter).isContains() &&
myCallingDao.getConfig().isAllowContainsSearches()) {
likeExpression = createLeftAndRightMatchLikeExpression(likeExpression);
boolean exactMatch = theParameter instanceof StringParam && ((StringParam) theParameter).isExact();
if (exactMatch) {
// Exact match
Long hash = ResourceIndexedSearchParamString.calculateHashExact(theResourceName, theParamName, rawSearchTerm);
return theBuilder.equal(theFrom.get("myHashExact").as(Long.class), hash);
} else {
likeExpression = createLeftMatchLikeExpression(likeExpression);
}
Predicate singleCode = theBuilder.like(theFrom.get("myValueNormalized").as(String.class), likeExpression);
if (theParameter instanceof StringParam && ((StringParam) theParameter).isExact()) {
Predicate exactCode = theBuilder.equal(theFrom.get("myValueExact"), rawSearchTerm);
singleCode = theBuilder.and(singleCode, exactCode);
}
// Normalized Match
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, singleCode);
String normalizedString = BaseHapiFhirDao.normalizeString(rawSearchTerm);
String likeExpression;
if (theParameter instanceof StringParam &&
((StringParam) theParameter).isContains() &&
daoConfig.isAllowContainsSearches()) {
likeExpression = createLeftAndRightMatchLikeExpression(normalizedString);
} else {
likeExpression = createLeftMatchLikeExpression(normalizedString);
}
Long hash = ResourceIndexedSearchParamString.calculateHashNormalized(daoConfig, theResourceName, theParamName, normalizedString);
Predicate hashCode = theBuilder.equal(theFrom.get("myHashNormalizedPrefix").as(Long.class), hash);
Predicate singleCode = theBuilder.like(theFrom.get("myValueNormalized").as(String.class), likeExpression);
return theBuilder.and(hashCode, singleCode);
}
}
private List<Predicate> createPredicateTagList(Path<TagDefinition> theDefJoin, CriteriaBuilder theBuilder, TagTypeEnum theTagType, List<Pair<String, String>> theTokens) {
@ -1183,7 +1201,7 @@ public class SearchBuilder implements ISearchBuilder {
* Process token modifiers (:in, :below, :above)
*/
List<VersionIndependentConcept> codes = null;
List<VersionIndependentConcept> codes;
if (modifier == TokenParamModifier.IN) {
codes = myTerminologySvc.expandValueSet(code);
} else if (modifier == TokenParamModifier.ABOVE) {
@ -1192,81 +1210,53 @@ public class SearchBuilder implements ISearchBuilder {
} else if (modifier == TokenParamModifier.BELOW) {
system = determineSystemIfMissing(theParamName, code, system);
codes = myTerminologySvc.findCodesBelow(system, code);
}
ArrayList<Predicate> singleCodePredicates = new ArrayList<>();
if (codes != null) {
if (codes.isEmpty()) {
// This will never match anything
Predicate codePredicate = theBuilder.isNull(theFrom.get("myMissing"));
singleCodePredicates.add(codePredicate);
} else {
List<Predicate> orPredicates = new ArrayList<Predicate>();
Map<String, List<VersionIndependentConcept>> map = new HashMap<String, List<VersionIndependentConcept>>();
for (VersionIndependentConcept nextCode : codes) {
List<VersionIndependentConcept> systemCodes = map.get(nextCode.getSystem());
if (null == systemCodes) {
systemCodes = new ArrayList<>();
map.put(nextCode.getSystem(), systemCodes);
}
systemCodes.add(nextCode);
}
// Use "in" in case of large numbers of codes due to param modifiers
final Path<String> systemExpression = theFrom.get("mySystem");
final Path<String> valueExpression = theFrom.get("myValue");
for (Map.Entry<String, List<VersionIndependentConcept>> entry : map.entrySet()) {
Predicate systemPredicate = theBuilder.equal(systemExpression, entry.getKey());
In<String> codePredicate = theBuilder.in(valueExpression);
for (VersionIndependentConcept nextCode : entry.getValue()) {
codePredicate.value(nextCode.getCode());
}
orPredicates.add(theBuilder.and(systemPredicate, codePredicate));
}
singleCodePredicates.add(theBuilder.or(orPredicates.toArray(new Predicate[orPredicates.size()])));
}
} else {
codes = Collections.singletonList(new VersionIndependentConcept(system, code));
}
/*
* Ok, this is a normal query
*/
if (codes.isEmpty()) {
// This will never match anything
return new BooleanStaticAssertionPredicate((CriteriaBuilderImpl) theBuilder, false);
}
if (StringUtils.isNotBlank(system)) {
if (modifier != null && modifier == TokenParamModifier.NOT) {
singleCodePredicates.add(theBuilder.notEqual(theFrom.get("mySystem"), system));
} else {
singleCodePredicates.add(theBuilder.equal(theFrom.get("mySystem"), system));
}
} else if (system == null) {
// don't check the system
/*
* Note: A null system value means "match any system", but
* an empty-string system value means "match values that
* explicitly have no system".
*/
boolean haveSystem = codes.get(0).getSystem() != null;
boolean haveCode = isNotBlank(codes.get(0).getCode());
Expression<Long> hashField;
if (!haveSystem && !haveCode) {
// If we have neither, this isn't actually an expression so
// just return 1=1
return new BooleanStaticAssertionPredicate((CriteriaBuilderImpl) theBuilder, true);
} else if (haveSystem && haveCode) {
hashField = theFrom.get("myHashSystemAndValue").as(Long.class);
} else if (haveSystem) {
hashField = theFrom.get("myHashSystem").as(Long.class);
} else {
hashField = theFrom.get("myHashValue").as(Long.class);
}
List<Long> values = new ArrayList<>(codes.size());
for (VersionIndependentConcept next : codes) {
if (haveSystem && haveCode) {
values.add(ResourceIndexedSearchParamToken.calculateHashSystemAndValue(theResourceName, theParamName, next.getSystem(), next.getCode()));
} else if (haveSystem) {
values.add(ResourceIndexedSearchParamToken.calculateHashSystem(theResourceName, theParamName, next.getSystem()));
} else {
// If the system is "", we only match on null systems
singleCodePredicates.add(theBuilder.isNull(theFrom.get("mySystem")));
}
if (StringUtils.isNotBlank(code)) {
if (modifier != null && modifier == TokenParamModifier.NOT) {
singleCodePredicates.add(theBuilder.notEqual(theFrom.get("myValue"), code));
} else {
singleCodePredicates.add(theBuilder.equal(theFrom.get("myValue"), code));
}
} else {
/*
* As of HAPI FHIR 1.5, if the client searched for a token with a system but no specified value this means to
* match all tokens with the given value.
*
* I'm not sure I agree with this, but hey.. FHIR-I voted and this was the result :)
*/
// singleCodePredicates.add(theBuilder.isNull(theFrom.get("myValue")));
values.add(ResourceIndexedSearchParamToken.calculateHashValue(theResourceName, theParamName, next.getCode()));
}
}
Predicate singleCode = theBuilder.and(toArray(singleCodePredicates));
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, singleCode);
Predicate predicate = hashField.in(values);
if (modifier == TokenParamModifier.NOT) {
Predicate identityPredicate = theBuilder.equal(theFrom.get("myHashIdentity").as(Long.class), BaseResourceIndexedSearchParam.calculateHashIdentity(theResourceName, theParamName));
Predicate disjunctionPredicate = theBuilder.not(predicate);
predicate = theBuilder.and(identityPredicate, disjunctionPredicate);
}
return predicate;
}
@Override
@ -1371,8 +1361,8 @@ public class SearchBuilder implements ISearchBuilder {
if (myParams.getEverythingMode() != null) {
Join<ResourceTable, ResourceLink> join = myResourceTableRoot.join("myResourceLinks", JoinType.LEFT);
if (myParams.get(BaseResource.SP_RES_ID) != null) {
StringParam idParm = (StringParam) myParams.get(BaseResource.SP_RES_ID).get(0).get(0);
if (myParams.get(IAnyResource.SP_RES_ID) != null) {
StringParam idParm = (StringParam) myParams.get(IAnyResource.SP_RES_ID).get(0).get(0);
Long pid = BaseHapiFhirDao.translateForcedIdToPid(myResourceName, idParm.getValue(), myForcedIdDao);
if (myAlsoIncludePids == null) {
myAlsoIncludePids = new ArrayList<>(1);
@ -1462,7 +1452,7 @@ public class SearchBuilder implements ISearchBuilder {
return false;
}
if (BaseResource.SP_RES_ID.equals(theSort.getParamName())) {
if (IAnyResource.SP_RES_ID.equals(theSort.getParamName())) {
From<?, ?> forcedIdJoin = theFrom.join("myForcedId", JoinType.LEFT);
if (theSort.getOrder() == null || theSort.getOrder() == SortOrderEnum.ASC) {
theOrders.add(theBuilder.asc(forcedIdJoin.get("myForcedId")));
@ -1602,35 +1592,39 @@ public class SearchBuilder implements ISearchBuilder {
private void doLoadPids(List<IBaseResource> theResourceListToPopulate, Set<Long> theRevIncludedPids, boolean theForHistoryOperation, EntityManager entityManager, FhirContext context, IDao theDao,
Map<Long, Integer> position, Collection<Long> pids) {
CriteriaBuilder builder = entityManager.getCriteriaBuilder();
CriteriaQuery<ResourceTable> cq = builder.createQuery(ResourceTable.class);
Root<ResourceTable> from = cq.from(ResourceTable.class);
cq.where(from.get("myId").in(pids));
TypedQuery<ResourceTable> q = entityManager.createQuery(cq);
List<ResourceTable> resultList = q.getResultList();
for (ResourceTable next : resultList) {
// -- get the resource from the searchView
Collection<ResourceSearchView> resourceSearchViewList = myResourceSearchViewDao.findByResourceIds(pids);
//-- preload all tags with tag definition if any
Map<Long, Collection<ResourceTag>> tagMap = getResourceTagMap(resourceSearchViewList);
Long resourceId = null;
for (ResourceSearchView next : resourceSearchViewList) {
Class<? extends IBaseResource> resourceType = context.getResourceDefinition(next.getResourceType()).getImplementingClass();
IBaseResource resource = theDao.toResource(resourceType, next, theForHistoryOperation);
resourceId = next.getId();
IBaseResource resource = theDao.toResource(resourceType, next, tagMap.get(resourceId), theForHistoryOperation);
if (resource == null) {
ourLog.warn("Unable to find resource {}/{}/_history/{} in database", next.getResourceType(), next.getIdDt().getIdPart(), next.getVersion());
continue;
}
Integer index = position.get(next.getId());
Integer index = position.get(resourceId);
if (index == null) {
ourLog.warn("Got back unexpected resource PID {}", next.getId());
ourLog.warn("Got back unexpected resource PID {}", resourceId);
continue;
}
if (resource instanceof IResource) {
if (theRevIncludedPids.contains(next.getId())) {
if (theRevIncludedPids.contains(resourceId)) {
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IResource) resource, BundleEntrySearchModeEnum.INCLUDE);
} else {
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IResource) resource, BundleEntrySearchModeEnum.MATCH);
}
} else {
if (theRevIncludedPids.contains(next.getId())) {
if (theRevIncludedPids.contains(resourceId)) {
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IAnyResource) resource, BundleEntrySearchModeEnum.INCLUDE.getCode());
} else {
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IAnyResource) resource, BundleEntrySearchModeEnum.MATCH.getCode());
@ -1641,6 +1635,44 @@ public class SearchBuilder implements ISearchBuilder {
}
}
private Map<Long, Collection<ResourceTag>> getResourceTagMap(Collection<ResourceSearchView> theResourceSearchViewList) {
List<Long> idList = new ArrayList<Long>(theResourceSearchViewList.size());
//-- find all resource has tags
for (ResourceSearchView resource: theResourceSearchViewList) {
if (resource.isHasTags())
idList.add(resource.getId());
}
Map<Long, Collection<ResourceTag>> tagMap = new HashMap<>();
//-- no tags
if (idList.size() == 0)
return tagMap;
//-- get all tags for the idList
Collection<ResourceTag> tagList = myResourceTagDao.findByResourceIds(idList);
//-- build the map, key = resourceId, value = list of ResourceTag
Long resourceId;
Collection<ResourceTag> tagCol;
for (ResourceTag tag : tagList) {
resourceId = tag.getResourceId();
tagCol = tagMap.get(resourceId);
if (tagCol == null) {
tagCol = new ArrayList<>();
tagCol.add(tag);
tagMap.put(resourceId, tagCol);
} else {
tagCol.add(tag);
}
}
return tagMap;
}
@Override
public void loadResourcesByPid(Collection<Long> theIncludePids, List<IBaseResource> theResourceListToPopulate, Set<Long> theRevIncludedPids, boolean theForHistoryOperation,
EntityManager entityManager, FhirContext context, IDao theDao) {
@ -1677,18 +1709,16 @@ public class SearchBuilder implements ISearchBuilder {
}
/**
* THIS SHOULD RETURN HASHSET and not jsut Set because we add to it later (so it can't be Collections.emptySet())
*
* @param theLastUpdated
* THIS SHOULD RETURN HASHSET and not just Set because we add to it later (so it can't be Collections.emptySet())
*/
@Override
public HashSet<Long> loadReverseIncludes(IDao theCallingDao, FhirContext theContext, EntityManager theEntityManager, Collection<Long> theMatches, Set<Include> theRevIncludes,
boolean theReverseMode, DateRangeParam theLastUpdated) {
if (theMatches.size() == 0) {
return new HashSet<Long>();
return new HashSet<>();
}
if (theRevIncludes == null || theRevIncludes.isEmpty()) {
return new HashSet<Long>();
return new HashSet<>();
}
String searchFieldName = theReverseMode ? "myTargetResourcePid" : "mySourceResourcePid";
@ -1729,7 +1759,7 @@ public class SearchBuilder implements ISearchBuilder {
} else {
List<String> paths;
RuntimeSearchParam param = null;
RuntimeSearchParam param;
String resType = nextInclude.getParamType();
if (isBlank(resType)) {
continue;

View File

@ -59,7 +59,7 @@ public class SearchParamExtractorDstu2 extends BaseSearchParamExtractor implemen
searchTerm = searchTerm.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
}
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(resourceName, BaseHapiFhirDao.normalizeString(searchTerm), searchTerm);
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(getDaoConfig(), resourceName, BaseHapiFhirDao.normalizeString(searchTerm), searchTerm);
nextEntity.setResource(theEntity);
retVal.add(nextEntity);
}
@ -68,7 +68,7 @@ public class SearchParamExtractorDstu2 extends BaseSearchParamExtractor implemen
if (value.length() > ResourceIndexedSearchParamString.MAX_LENGTH) {
value = value.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
}
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(nextSpDef.getName(), BaseHapiFhirDao.normalizeString(value), value);
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(getDaoConfig(), nextSpDef.getName(), BaseHapiFhirDao.normalizeString(value), value);
nextEntity.setResource(theEntity);
retVal.add(nextEntity);
}

View File

@ -1,5 +1,6 @@
package ca.uhn.fhir.jpa.dao.data;
import java.util.Collection;
import java.util.List;
/*
@ -38,5 +39,7 @@ public interface IForcedIdDao extends JpaRepository<ForcedId, Long> {
@Query("SELECT f FROM ForcedId f WHERE f.myResourcePid = :resource_pid")
public ForcedId findByResourcePid(@Param("resource_pid") Long theResourcePid);
@Query("SELECT f FROM ForcedId f WHERE f.myResourcePid in (:pids)")
Collection<ForcedId> findByResourcePids(@Param("pids") Collection<Long> pids);
}

View File

@ -1,6 +1,10 @@
package ca.uhn.fhir.jpa.dao.data;
import ca.uhn.fhir.jpa.entity.ResourceHistoryTable;
import java.util.Collection;
import java.util.Date;
import javax.persistence.TemporalType;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.data.jpa.repository.JpaRepository;
@ -8,8 +12,7 @@ import org.springframework.data.jpa.repository.Query;
import org.springframework.data.jpa.repository.Temporal;
import org.springframework.data.repository.query.Param;
import javax.persistence.TemporalType;
import java.util.Date;
import ca.uhn.fhir.jpa.entity.ResourceHistoryTable;
/*
* #%L
@ -82,4 +85,10 @@ public interface IResourceHistoryTableDao extends JpaRepository<ResourceHistoryT
"LEFT OUTER JOIN ResourceTable t ON (v.myResourceId = t.myId) " +
"WHERE v.myResourceVersion != t.myVersion")
Slice<Long> findIdsOfPreviousVersionsOfResources(Pageable thePage);
@Query("" +
"SELECT h FROM ResourceHistoryTable h " +
"INNER JOIN ResourceTable r ON (r.myId = h.myResourceId and r.myVersion = h.myResourceVersion) " +
"WHERE r.myId in (:pids)")
Collection<ResourceHistoryTable> findByResourceIds(@Param("pids") Collection<Long> pids);
}

View File

@ -23,7 +23,12 @@ package ca.uhn.fhir.jpa.dao.data;
import org.springframework.data.jpa.repository.JpaRepository;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamString;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
public interface IResourceIndexedSearchParamStringDao extends JpaRepository<ResourceIndexedSearchParamString, Long> {
// nothing yet
@Query("select count(*) from ResourceIndexedSearchParamString t WHERE t.myResourcePid = :resid")
int countForResourceId(@Param("resid") Long theResourcePid);
}

View File

@ -20,10 +20,14 @@ package ca.uhn.fhir.jpa.dao.data;
* #L%
*/
import org.springframework.data.jpa.repository.JpaRepository;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamToken;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
public interface IResourceIndexedSearchParamTokenDao extends JpaRepository<ResourceIndexedSearchParamToken, Long> {
// nothing yet
@Query("select count(*) from ResourceIndexedSearchParamToken t WHERE t.myResourcePid = :resid")
int countForResourceId(@Param("resid") Long theResourcePid);
}

View File

@ -1,5 +1,7 @@
package ca.uhn.fhir.jpa.dao.data;
import java.util.Collection;
/*
* #%L
* HAPI FHIR JPA Server
@ -10,7 +12,7 @@ package ca.uhn.fhir.jpa.dao.data;
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
@ -24,11 +26,10 @@ import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import ca.uhn.fhir.jpa.entity.SearchParam;
import ca.uhn.fhir.jpa.entity.ResourceSearchView;
public interface ISearchParamDao extends JpaRepository<SearchParam, Long> {
@Query("SELECT s FROM SearchParam s WHERE s.myResourceName = :resname AND s.myParamName = :parmname")
public SearchParam findForResource(@Param("resname") String theResourceType, @Param("parmname") String theParamName);
public interface IResourceSearchViewDao extends JpaRepository<ResourceSearchView, Long> {
@Query("SELECT v FROM ResourceSearchView v WHERE v.myResourceId in (:pids)")
Collection<ResourceSearchView> findByResourceIds(@Param("pids") Collection<Long> pids);
}

View File

@ -1,5 +1,7 @@
package ca.uhn.fhir.jpa.dao.data;
import java.util.Collection;
/*
* #%L
* HAPI FHIR JPA Server
@ -21,9 +23,15 @@ package ca.uhn.fhir.jpa.dao.data;
*/
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import ca.uhn.fhir.jpa.entity.ResourceTag;
public interface IResourceTagDao extends JpaRepository<ResourceTag, Long> {
// nothing
@Query("" +
"SELECT t FROM ResourceTag t " +
"INNER JOIN TagDefinition td ON (td.myId = t.myTagId) " +
"WHERE t.myResourceId in (:pids)")
Collection<ResourceTag> findByResourceIds(@Param("pids") Collection<Long> pids);
}

View File

@ -36,19 +36,19 @@ import ca.uhn.fhir.jpa.entity.Search;
public interface ISearchDao extends JpaRepository<Search, Long> {
@Query("SELECT s FROM Search s WHERE s.myUuid = :uuid")
public Search findByUuid(@Param("uuid") String theUuid);
Search findByUuid(@Param("uuid") String theUuid);
@Query("SELECT s.myId FROM Search s WHERE s.mySearchLastReturned < :cutoff")
public Slice<Long> findWhereLastReturnedBefore(@Param("cutoff") Date theCutoff, Pageable thePage);
Slice<Long> findWhereLastReturnedBefore(@Param("cutoff") Date theCutoff, Pageable thePage);
// @Query("SELECT s FROM Search s WHERE s.myCreated < :cutoff")
// public Collection<Search> findWhereCreatedBefore(@Param("cutoff") Date theCutoff);
@Query("SELECT s FROM Search s WHERE s.myResourceType = :type AND mySearchQueryStringHash = :hash AND s.myCreated > :cutoff")
public Collection<Search> find(@Param("type") String theResourceType, @Param("hash") int theHashCode, @Param("cutoff") Date theCreatedCutoff);
Collection<Search> find(@Param("type") String theResourceType, @Param("hash") int theHashCode, @Param("cutoff") Date theCreatedCutoff);
@Modifying
@Query("UPDATE Search s SET s.mySearchLastReturned = :last WHERE s.myId = :pid")
public void updateSearchLastReturned(@Param("pid") long thePid, @Param("last") Date theDate);
void updateSearchLastReturned(@Param("pid") long thePid, @Param("last") Date theDate);
}

View File

@ -38,8 +38,8 @@ public interface ISearchResultDao extends JpaRepository<SearchResult, Long> {
@Query(value="SELECT r FROM SearchResult r WHERE r.mySearch = :search")
Collection<SearchResult> findWithSearchUuid(@Param("search") Search theSearch);
@Query(value="SELECT r FROM SearchResult r WHERE r.mySearch = :search ORDER BY r.myOrder ASC")
Page<SearchResult> findWithSearchUuid(@Param("search") Search theSearch, Pageable thePage);
@Query(value="SELECT r.myResourcePid FROM SearchResult r WHERE r.mySearch = :search ORDER BY r.myOrder ASC")
Page<Long> findWithSearchUuid(@Param("search") Search theSearch, Pageable thePage);
@Modifying
@Query(value="DELETE FROM SearchResult r WHERE r.mySearchPid = :search")

View File

@ -26,6 +26,8 @@ import org.springframework.data.repository.query.Param;
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
import java.util.Optional;
public interface ITermCodeSystemDao extends JpaRepository<TermCodeSystem, Long> {
@Query("SELECT cs FROM TermCodeSystem cs WHERE cs.myCodeSystemUri = :code_system_uri")
@ -34,4 +36,7 @@ public interface ITermCodeSystemDao extends JpaRepository<TermCodeSystem, Long>
@Query("SELECT cs FROM TermCodeSystem cs WHERE cs.myResourcePid = :resource_pid")
TermCodeSystem findByResourcePid(@Param("resource_pid") Long theReourcePid);
@Query("SELECT cs FROM TermCodeSystem cs WHERE cs.myCurrentVersion.myId = :csv_pid")
Optional<TermCodeSystem> findWithCodeSystemVersionAsCurrentVersion(@Param("csv_pid") Long theCodeSystemVersionPid);
}

View File

@ -1,9 +1,16 @@
package ca.uhn.fhir.jpa.dao.data;
import java.util.List;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.List;
/*
* #%L
@ -14,9 +21,9 @@ import org.springframework.data.domain.Pageable;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -25,31 +32,25 @@ import org.springframework.data.domain.Pageable;
* #L%
*/
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
public interface ITermConceptDao extends JpaRepository<TermConcept, Long> {
@Query("SELECT COUNT(t) FROM TermConcept t WHERE t.myCodeSystem.myId = :cs_pid")
Integer countByCodeSystemVersion(@Param("cs_pid") Long thePid);
@Query("SELECT c FROM TermConcept c WHERE c.myCodeSystem = :code_system AND c.myCode = :code")
TermConcept findByCodeSystemAndCode(@Param("code_system") TermCodeSystemVersion theCodeSystem, @Param("code") String theCode);
@Query("SELECT t FROM TermConcept t WHERE t.myCodeSystem.myId = :cs_pid")
Slice<TermConcept> findByCodeSystemVersion(Pageable thePage, @Param("cs_pid") Long thePid);
@Query("SELECT c FROM TermConcept c WHERE c.myCodeSystem = :code_system")
List<TermConcept> findByCodeSystemVersion(@Param("code_system") TermCodeSystemVersion theCodeSystem);
@Query("SELECT t FROM TermConcept t WHERE t.myCodeSystem.myId = :cs_pid")
@Modifying
List<TermConcept> findByCodeSystemVersion(@Param("cs_pid") Long thePid);
@Query("SELECT t FROM TermConcept t WHERE t.myIndexStatus = null")
Page<TermConcept> findResourcesRequiringReindexing(Pageable thePageRequest);
@Query("UPDATE TermConcept t SET t.myIndexStatus = null")
@Modifying
int markAllForReindexing();
@Query("SELECT t FROM TermConcept t WHERE t.myIndexStatus = null")
Page<TermConcept> findResourcesRequiringReindexing(Pageable thePageRequest);
}

View File

@ -1,7 +1,11 @@
package ca.uhn.fhir.jpa.dao.data;
import ca.uhn.fhir.jpa.entity.TermConceptDesignation;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
/*
* #%L
@ -24,5 +28,11 @@ import org.springframework.data.jpa.repository.JpaRepository;
*/
public interface ITermConceptDesignationDao extends JpaRepository<TermConceptDesignation, Long> {
// nothing
@Query("SELECT t FROM TermConceptDesignation t WHERE t.myCodeSystemVersion.myId = :csv_pid")
Slice<TermConceptDesignation> findByCodeSystemVersion(Pageable thePage, @Param("csv_pid") Long thePid);
@Query("SELECT COUNT(t) FROM TermConceptDesignation t WHERE t.myCodeSystemVersion.myId = :csv_pid")
Integer countByCodeSystemVersion(@Param("csv_pid") Long thePid);
}

View File

@ -1,5 +1,12 @@
package ca.uhn.fhir.jpa.dao.data;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.Collection;
/*
@ -22,20 +29,15 @@ import java.util.Collection;
* #L%
*/
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
public interface ITermConceptParentChildLinkDao extends JpaRepository<TermConceptParentChildLink, Long> {
@Query("DELETE FROM TermConceptParentChildLink t WHERE t.myCodeSystem.myId = :cs_pid")
@Modifying
void deleteByCodeSystemVersion(@Param("cs_pid") Long thePid);
@Query("SELECT COUNT(t) FROM TermConceptParentChildLink t WHERE t.myCodeSystem.myId = :cs_pid")
Integer countByCodeSystemVersion(@Param("cs_pid") Long thePid);
@Query("SELECT t.myParentPid FROM TermConceptParentChildLink t WHERE t.myChildPid = :child_pid")
Collection<Long> findAllWithChild(@Param("child_pid") Long theConceptPid);
@Query("SELECT t FROM TermConceptParentChildLink t WHERE t.myCodeSystem.myId = :cs_pid")
Slice<TermConceptParentChildLink> findByCodeSystemVersion(Pageable thePage, @Param("cs_pid") Long thePid);
}

View File

@ -1,17 +1,12 @@
package ca.uhn.fhir.jpa.dao.data;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.entity.TermConceptProperty;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.List;
/*
* #%L
* HAPI FHIR JPA Server
@ -33,5 +28,10 @@ import java.util.List;
*/
public interface ITermConceptPropertyDao extends JpaRepository<TermConceptProperty, Long> {
// nothing
@Query("SELECT t FROM TermConceptProperty t WHERE t.myCodeSystemVersion.myId = :cs_pid")
Slice<TermConceptProperty> findByCodeSystemVersion(Pageable thePage, @Param("cs_pid") Long thePid);
@Query("SELECT COUNT(t) FROM TermConceptProperty t WHERE t.myCodeSystemVersion.myId = :cs_pid")
Integer countByCodeSystemVersion(@Param("cs_pid") Long thePid);
}

View File

@ -160,13 +160,16 @@ public class FhirResourceDaoConceptMapDstu3 extends FhirResourceDaoDstu3<Concept
boolean theUpdateVersion, Date theUpdateTime, boolean theForceUpdate, boolean theCreateNewHistoryEntry) {
ResourceTable retVal = super.updateEntity(theRequestDetails, theResource, theEntity, theDeletedTimestampOrNull, thePerformIndexing, theUpdateVersion, theUpdateTime, theForceUpdate, theCreateNewHistoryEntry);
ConceptMap conceptMap = (ConceptMap) theResource;
// Convert from DSTU3 to R4
try {
myHapiTerminologySvc.storeTermConceptMapAndChildren(retVal, VersionConvertor_30_40.convertConceptMap(conceptMap));
} catch (FHIRException fe) {
throw new InternalErrorException(fe);
if (retVal.getDeleted() == null) {
try {
ConceptMap conceptMap = (ConceptMap) theResource;
org.hl7.fhir.r4.model.ConceptMap converted = VersionConvertor_30_40.convertConceptMap(conceptMap);
myHapiTerminologySvc.storeTermConceptMapAndChildren(retVal, converted);
} catch (FHIRException fe) {
throw new InternalErrorException(fe);
}
} else {
myHapiTerminologySvc.deleteConceptMapAndChildren(retVal);
}
return retVal;

View File

@ -0,0 +1,36 @@
package ca.uhn.fhir.jpa.dao.dstu3;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.dao.FhirResourceDaoMessageHeaderDstu2;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoMessageHeader;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import org.hl7.fhir.dstu3.model.MessageHeader;
import org.hl7.fhir.instance.model.api.IBaseBundle;
public class FhirResourceDaoMessageHeaderDstu3 extends FhirResourceDaoDstu3<MessageHeader> implements IFhirResourceDaoMessageHeader<MessageHeader> {
@Override
public IBaseBundle messageHeaderProcessMessage(RequestDetails theRequestDetails, IBaseBundle theMessage) {
return FhirResourceDaoMessageHeaderDstu2.throwProcessMessageNotImplemented();
}
}

View File

@ -25,6 +25,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.commons.codec.binary.StringUtils;
import org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext;
@ -35,6 +36,8 @@ import org.hl7.fhir.dstu3.model.ValueSet.*;
import org.hl7.fhir.dstu3.terminologies.ValueSetExpander.ValueSetExpansionOutcome;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
@ -223,6 +226,7 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet>
if (vs != null) {
ValueSet expansion = doExpand(vs);
List<ValueSetExpansionContainsComponent> contains = expansion.getExpansion().getContains();
ValidateCodeResult result = validateCodeIsInContains(contains, toStringOrNull(theSystem), toStringOrNull(theCode), theCoding, theCodeableConcept);
if (result != null) {
if (theDisplay != null && isNotBlank(theDisplay.getValue()) && isNotBlank(result.getDisplay())) {
@ -238,6 +242,9 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet>
}
private static final Logger ourLog = LoggerFactory.getLogger(FhirResourceDaoValueSetDstu3.class);
private String toStringOrNull(IPrimitiveType<String> thePrimitive) {
return thePrimitive != null ? thePrimitive.getValue() : null;
}

View File

@ -26,12 +26,13 @@ import static org.apache.commons.lang3.StringUtils.trim;
import java.math.BigDecimal;
import java.util.*;
import javax.annotation.PostConstruct;
import javax.measure.unit.NonSI;
import javax.measure.unit.Unit;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.hl7.fhir.dstu3.context.IWorkerContext;
import org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext;
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
import org.hl7.fhir.dstu3.model.*;
import org.hl7.fhir.dstu3.model.CapabilityStatement.CapabilityStatementRestSecurityComponent;
@ -58,6 +59,13 @@ public class SearchParamExtractorDstu3 extends BaseSearchParamExtractor implemen
@Autowired
private org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport myValidationSupport;
private HapiWorkerContext myWorkerContext;
@PostConstruct
public void start() {
myWorkerContext = new HapiWorkerContext(getContext(), myValidationSupport);
}
/**
* Constructor
*/
@ -65,8 +73,8 @@ public class SearchParamExtractorDstu3 extends BaseSearchParamExtractor implemen
super();
}
public SearchParamExtractorDstu3(FhirContext theCtx, IValidationSupport theValidationSupport, ISearchParamRegistry theSearchParamRegistry) {
super(theCtx, theSearchParamRegistry);
public SearchParamExtractorDstu3(DaoConfig theDaoConfig, FhirContext theCtx, IValidationSupport theValidationSupport, ISearchParamRegistry theSearchParamRegistry) {
super(theDaoConfig, theCtx, theSearchParamRegistry);
myValidationSupport = theValidationSupport;
}
@ -78,7 +86,7 @@ public class SearchParamExtractorDstu3 extends BaseSearchParamExtractor implemen
searchTerm = searchTerm.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
}
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(resourceName, BaseHapiFhirDao.normalizeString(searchTerm), searchTerm);
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(getDaoConfig(), resourceName, BaseHapiFhirDao.normalizeString(searchTerm), searchTerm);
nextEntity.setResource(theEntity);
retVal.add(nextEntity);
}
@ -87,7 +95,7 @@ public class SearchParamExtractorDstu3 extends BaseSearchParamExtractor implemen
if (value.length() > ResourceIndexedSearchParamString.MAX_LENGTH) {
value = value.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
}
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(nextSpDef.getName(), BaseHapiFhirDao.normalizeString(value), value);
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(getDaoConfig(), nextSpDef.getName(), BaseHapiFhirDao.normalizeString(value), value);
nextEntity.setResource(theEntity);
retVal.add(nextEntity);
}
@ -695,8 +703,7 @@ public class SearchParamExtractorDstu3 extends BaseSearchParamExtractor implemen
*/
@Override
protected List<Object> extractValues(String thePaths, IBaseResource theResource) {
IWorkerContext worker = new org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext(getContext(), myValidationSupport);
FHIRPathEngine fp = new FHIRPathEngine(worker);
FHIRPathEngine fp = new FHIRPathEngine(myWorkerContext);
List<Object> values = new ArrayList<>();
try {

View File

@ -38,7 +38,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport.CodeValidationResult;
import org.hl7.fhir.r4.hapi.ctx.ValidationSupportChain;
import org.hl7.fhir.r4.hapi.validation.ValidationSupportChain;
import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.CodeSystem.CodeSystemContentMode;
import org.hl7.fhir.r4.model.CodeSystem.ConceptDefinitionComponent;
@ -47,7 +47,6 @@ import org.hl7.fhir.r4.model.Coding;
import org.hl7.fhir.r4.model.IdType;
import org.springframework.beans.factory.annotation.Autowired;
import javax.servlet.http.HttpServletRequest;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

View File

@ -156,9 +156,12 @@ public class FhirResourceDaoConceptMapR4 extends FhirResourceDaoR4<ConceptMap> i
boolean theUpdateVersion, Date theUpdateTime, boolean theForceUpdate, boolean theCreateNewHistoryEntry) {
ResourceTable retVal = super.updateEntity(theRequestDetails, theResource, theEntity, theDeletedTimestampOrNull, thePerformIndexing, theUpdateVersion, theUpdateTime, theForceUpdate, theCreateNewHistoryEntry);
ConceptMap conceptMap = (ConceptMap) theResource;
myHapiTerminologySvc.storeTermConceptMapAndChildren(retVal, conceptMap);
if (retVal.getDeleted() == null) {
ConceptMap conceptMap = (ConceptMap) theResource;
myHapiTerminologySvc.storeTermConceptMapAndChildren(retVal, conceptMap);
} else {
myHapiTerminologySvc.deleteConceptMapAndChildren(retVal);
}
return retVal;
}

View File

@ -0,0 +1,36 @@
package ca.uhn.fhir.jpa.dao.r4;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.dao.FhirResourceDaoMessageHeaderDstu2;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoMessageHeader;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.r4.model.MessageHeader;
public class FhirResourceDaoMessageHeaderR4 extends FhirResourceDaoR4<MessageHeader> implements IFhirResourceDaoMessageHeader<MessageHeader> {
@Override
public IBaseBundle messageHeaderProcessMessage(RequestDetails theRequestDetails, IBaseBundle theMessage) {
return FhirResourceDaoMessageHeaderDstu2.throwProcessMessageNotImplemented();
}
}

View File

@ -64,8 +64,8 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements
super();
}
public SearchParamExtractorR4(FhirContext theCtx, IValidationSupport theValidationSupport, ISearchParamRegistry theSearchParamRegistry) {
super(theCtx, theSearchParamRegistry);
public SearchParamExtractorR4(DaoConfig theDaoConfig, FhirContext theCtx, IValidationSupport theValidationSupport, ISearchParamRegistry theSearchParamRegistry) {
super(theDaoConfig, theCtx, theSearchParamRegistry);
myValidationSupport = theValidationSupport;
}
@ -77,7 +77,7 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements
searchTerm = searchTerm.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
}
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(resourceName, BaseHapiFhirDao.normalizeString(searchTerm), searchTerm);
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(getDaoConfig(), resourceName, BaseHapiFhirDao.normalizeString(searchTerm), searchTerm);
nextEntity.setResource(theEntity);
retVal.add(nextEntity);
}
@ -86,7 +86,7 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements
if (value.length() > ResourceIndexedSearchParamString.MAX_LENGTH) {
value = value.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
}
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(nextSpDef.getName(), BaseHapiFhirDao.normalizeString(value), value);
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(getDaoConfig(), nextSpDef.getName(), BaseHapiFhirDao.normalizeString(value), value);
nextEntity.setResource(theEntity);
retVal.add(nextEntity);
}
@ -104,7 +104,7 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements
*/
@Override
public Set<ResourceIndexedSearchParamDate> extractSearchParamDates(ResourceTable theEntity, IBaseResource theResource) {
HashSet<ResourceIndexedSearchParamDate> retVal = new HashSet<ResourceIndexedSearchParamDate>();
HashSet<ResourceIndexedSearchParamDate> retVal = new HashSet<>();
Collection<RuntimeSearchParam> searchParams = getSearchParams(theResource);
for (RuntimeSearchParam nextSpDef : searchParams) {
@ -187,7 +187,7 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements
*/
@Override
public HashSet<ResourceIndexedSearchParamNumber> extractSearchParamNumber(ResourceTable theEntity, IBaseResource theResource) {
HashSet<ResourceIndexedSearchParamNumber> retVal = new HashSet<ResourceIndexedSearchParamNumber>();
HashSet<ResourceIndexedSearchParamNumber> retVal = new HashSet<>();
Collection<RuntimeSearchParam> searchParams = getSearchParams(theResource);
for (RuntimeSearchParam nextSpDef : searchParams) {
@ -290,7 +290,7 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements
*/
@Override
public Set<ResourceIndexedSearchParamQuantity> extractSearchParamQuantity(ResourceTable theEntity, IBaseResource theResource) {
HashSet<ResourceIndexedSearchParamQuantity> retVal = new HashSet<ResourceIndexedSearchParamQuantity>();
HashSet<ResourceIndexedSearchParamQuantity> retVal = new HashSet<>();
Collection<RuntimeSearchParam> searchParams = getSearchParams(theResource);
for (RuntimeSearchParam nextSpDef : searchParams) {
@ -354,7 +354,7 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements
*/
@Override
public Set<ResourceIndexedSearchParamString> extractSearchParamStrings(ResourceTable theEntity, IBaseResource theResource) {
HashSet<ResourceIndexedSearchParamString> retVal = new HashSet<ResourceIndexedSearchParamString>();
HashSet<ResourceIndexedSearchParamString> retVal = new HashSet<>();
String resourceName = getContext().getResourceDefinition(theResource).getName();
@ -397,7 +397,7 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements
addSearchTerm(theEntity, retVal, nextSpName, searchTerm);
} else {
if (nextObject instanceof HumanName) {
ArrayList<StringType> allNames = new ArrayList<StringType>();
ArrayList<StringType> allNames = new ArrayList<>();
HumanName nextHumanName = (HumanName) nextObject;
if (isNotBlank(nextHumanName.getFamily())) {
allNames.add(nextHumanName.getFamilyElement());
@ -407,7 +407,7 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements
addSearchTerm(theEntity, retVal, nextSpName, nextName.getValue());
}
} else if (nextObject instanceof Address) {
ArrayList<StringType> allNames = new ArrayList<StringType>();
ArrayList<StringType> allNames = new ArrayList<>();
Address nextAddress = (Address) nextObject;
allNames.addAll(nextAddress.getLine());
allNames.add(nextAddress.getCityElement());
@ -573,7 +573,7 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements
assert systems.size() == codes.size() : "Systems contains " + systems + ", codes contains: " + codes;
Set<Pair<String, String>> haveValues = new HashSet<Pair<String, String>>();
Set<Pair<String, String>> haveValues = new HashSet<>();
for (int i = 0; i < systems.size(); i++) {
String system = systems.get(i);
String code = codes.get(i);
@ -608,7 +608,7 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements
@Override
public Set<ResourceIndexedSearchParamUri> extractSearchParamUri(ResourceTable theEntity, IBaseResource theResource) {
HashSet<ResourceIndexedSearchParamUri> retVal = new HashSet<ResourceIndexedSearchParamUri>();
HashSet<ResourceIndexedSearchParamUri> retVal = new HashSet<>();
Collection<RuntimeSearchParam> searchParams = getSearchParams(theResource);
for (RuntimeSearchParam nextSpDef : searchParams) {
@ -690,7 +690,7 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements
IWorkerContext worker = new org.hl7.fhir.r4.hapi.ctx.HapiWorkerContext(getContext(), myValidationSupport);
FHIRPathEngine fp = new FHIRPathEngine(worker);
List<Object> values = new ArrayList<Object>();
List<Object> values = new ArrayList<>();
try {
String[] nextPathsSplit = SPLIT.split(thePaths);
for (String nextPath : nextPathsSplit) {
@ -717,7 +717,7 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements
@Override
public List<PathAndRef> extractResourceLinks(IBaseResource theResource, RuntimeSearchParam theNextSpDef) {
ArrayList<PathAndRef> retVal = new ArrayList<PathAndRef>();
ArrayList<PathAndRef> retVal = new ArrayList<>();
String[] nextPathsSplit = SPLIT.split(theNextSpDef.getPath());
for (String path : nextPathsSplit) {

View File

@ -30,7 +30,7 @@ import java.util.Collection;
import java.util.Date;
@MappedSuperclass
public abstract class BaseHasResource {
public abstract class BaseHasResource implements IBaseResourceEntity {
@Column(name = "RES_DELETED_AT", nullable = true)
@Temporal(TemporalType.TIMESTAMP)
@ -42,7 +42,7 @@ public abstract class BaseHasResource {
@OptimisticLock(excluded = true)
private FhirVersionEnum myFhirVersion;
@OneToOne(optional = true, fetch = FetchType.EAGER, cascade = {}, orphanRemoval = false)
@OneToOne(optional = true, fetch = FetchType.LAZY, cascade = {}, orphanRemoval = false)
@JoinColumn(name = "FORCED_ID_PID")
@OptimisticLock(excluded = true)
private ForcedId myForcedId;
@ -63,6 +63,7 @@ public abstract class BaseHasResource {
public abstract BaseTag addTag(TagDefinition theDef);
@Override
public Date getDeleted() {
return myDeleted;
}
@ -72,6 +73,7 @@ public abstract class BaseHasResource {
}
@Override
public FhirVersionEnum getFhirVersion() {
return myFhirVersion;
}
@ -88,10 +90,13 @@ public abstract class BaseHasResource {
myForcedId = theForcedId;
}
@Override
public abstract Long getId();
@Override
public abstract IdDt getIdDt();
@Override
public InstantDt getPublished() {
if (myPublished != null) {
return new InstantDt(myPublished);
@ -104,12 +109,15 @@ public abstract class BaseHasResource {
myPublished = thePublished;
}
@Override
public abstract Long getResourceId();
@Override
public abstract String getResourceType();
public abstract Collection<? extends BaseTag> getTags();
@Override
public InstantDt getUpdated() {
return new InstantDt(myUpdated);
}
@ -118,12 +126,15 @@ public abstract class BaseHasResource {
myUpdated = theUpdated;
}
@Override
public Date getUpdatedDate() {
return myUpdated;
}
@Override
public abstract long getVersion();
@Override
public boolean isHasTags() {
return myHasTags;
}

View File

@ -36,13 +36,15 @@ import java.util.Date;
@MappedSuperclass
public abstract class BaseResourceIndexedSearchParam implements Serializable {
/** Don't change this without careful consideration. You will break existing hashes! */
private static final HashFunction HASH_FUNCTION = Hashing.murmur3_128(0);
/** Don't make this public 'cause nobody better touch it! */
private static final byte[] DELIMITER_BYTES = "|".getBytes(Charsets.UTF_8);
static final int MAX_SP_NAME = 100;
/**
* Don't change this without careful consideration. You will break existing hashes!
*/
private static final HashFunction HASH_FUNCTION = Hashing.murmur3_128(0);
/**
* Don't make this public 'cause nobody better be able to modify it!
*/
private static final byte[] DELIMITER_BYTES = "|".getBytes(Charsets.UTF_8);
private static final long serialVersionUID = 1L;
// TODO: make this nullable=false and a primitive (written may 2017)
@ -71,6 +73,13 @@ public abstract class BaseResourceIndexedSearchParam implements Serializable {
@Temporal(TemporalType.TIMESTAMP)
private Date myUpdated;
/**
* Subclasses may override
*/
protected void clearHashes() {
// nothing
}
protected abstract Long getId();
public String getParamName() {
@ -82,13 +91,6 @@ public abstract class BaseResourceIndexedSearchParam implements Serializable {
myParamName = theName;
}
/**
* Subclasses may override
*/
protected void clearHashes() {
// nothing
}
public ResourceTable getResource() {
return myResource;
}
@ -127,6 +129,10 @@ public abstract class BaseResourceIndexedSearchParam implements Serializable {
public abstract IQueryParameterType toQueryParameterType();
public static long calculateHashIdentity(String theResourceType, String theParamName) {
return hash(theResourceType, theParamName);
}
/**
* Applies a fast and consistent hashing algorithm to a set of strings
*/
@ -148,5 +154,4 @@ public abstract class BaseResourceIndexedSearchParam implements Serializable {
return hashCode.asLong();
}
}

View File

@ -20,30 +20,22 @@ package ca.uhn.fhir.jpa.entity;
* #L%
*/
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.ForeignKey;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Index;
import javax.persistence.JoinColumn;
import javax.persistence.OneToOne;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import javax.persistence.UniqueConstraint;
import org.hibernate.annotations.ColumnDefault;
//@formatter:off
import javax.persistence.*;
@Entity()
@Table(name = "HFJ_FORCED_ID", uniqueConstraints = {
@UniqueConstraint(name = "IDX_FORCEDID_RESID", columnNames = {"RESOURCE_PID"}),
@UniqueConstraint(name = "IDX_FORCEDID_TYPE_RESID", columnNames = {"RESOURCE_TYPE", "RESOURCE_PID"})
}, indexes= {
@Index(name = "IDX_FORCEDID_TYPE_FORCEDID", columnList = "RESOURCE_TYPE,FORCED_ID"),
@UniqueConstraint(name = "IDX_FORCEDID_TYPE_FID", columnNames = {"RESOURCE_TYPE", "FORCED_ID"})
}, indexes = {
/*
* NB: We previously had indexes named
* - IDX_FORCEDID_TYPE_FORCEDID
* - IDX_FORCEDID_TYPE_RESID
* so don't reuse these names
*/
})
//@formatter:on
public class ForcedId {
public static final int MAX_FORCED_ID_LENGTH = 100;
@ -57,11 +49,11 @@ public class ForcedId {
@Column(name = "PID")
private Long myId;
@JoinColumn(name = "RESOURCE_PID", nullable = false, updatable = false, foreignKey=@ForeignKey(name="FK_FORCEDID_RESOURCE"))
@JoinColumn(name = "RESOURCE_PID", nullable = false, updatable = false, foreignKey = @ForeignKey(name = "FK_FORCEDID_RESOURCE"))
@OneToOne()
private ResourceTable myResource;
@Column(name = "RESOURCE_PID", nullable = false, updatable = false, insertable=false)
@Column(name = "RESOURCE_PID", nullable = false, updatable = false, insertable = false)
private Long myResourcePid;
// This is updatable=true because it was added in 1.6 and needs to be set.. At some
@ -81,39 +73,39 @@ public class ForcedId {
return myForcedId;
}
public ResourceTable getResource() {
return myResource;
}
public Long getResourcePid() {
if (myResourcePid==null) {
return myResource.getId();
}
return myResourcePid;
}
public String getResourceType() {
return myResourceType;
}
public void setForcedId(String theForcedId) {
myForcedId = theForcedId;
}
public ResourceTable getResource() {
return myResource;
}
public void setResource(ResourceTable theResource) {
myResource = theResource;
}
public void setResourcePid(Long theResourcePid) {
myResourcePid = theResourcePid;
public Long getResourcePid() {
if (myResourcePid == null) {
return myResource.getId();
}
return myResourcePid;
}
public void setResourcePid(ResourceTable theResourcePid) {
myResource = theResourcePid;
}
public String getResourceType() {
return myResourceType;
}
public void setResourceType(String theResourceType) {
myResourceType = theResourceType;
}
public void setResourcePid(Long theResourcePid) {
myResourcePid = theResourcePid;
}
}

View File

@ -0,0 +1,41 @@
package ca.uhn.fhir.jpa.entity;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.util.Date;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.InstantDt;
public interface IBaseResourceEntity {
Date getDeleted();
FhirVersionEnum getFhirVersion();
Long getId();
IdDt getIdDt();
InstantDt getPublished();
Long getResourceId();
String getResourceType();
InstantDt getUpdated();
Date getUpdatedDate();
long getVersion();
boolean isHasTags();
}

View File

@ -31,30 +31,32 @@ import javax.persistence.*;
@Embeddable
@Entity
@Table(name = "HFJ_SPIDX_COORDS", indexes = {
@Index(name = "IDX_SP_COORDS", columnList = "RES_TYPE,SP_NAME,SP_LATITUDE,SP_LONGITUDE"),
@Index(name = "IDX_SP_COORDS_UPDATED", columnList = "SP_UPDATED"),
@Index(name = "IDX_SP_COORDS_RESID", columnList = "RES_ID")
@Table(name = "HFJ_SPIDX_COORDS", indexes = {
@Index(name = "IDX_SP_COORDS", columnList = "RES_TYPE,SP_NAME,SP_LATITUDE,SP_LONGITUDE"),
@Index(name = "IDX_SP_COORDS_UPDATED", columnList = "SP_UPDATED"),
@Index(name = "IDX_SP_COORDS_RESID", columnList = "RES_ID")
})
public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchParam {
public static final int MAX_LENGTH = 100;
private static final long serialVersionUID = 1L;
@Column(name = "SP_LATITUDE")
@Field
public double myLatitude;
@Column(name = "SP_LONGITUDE")
@Field
public double myLongitude;
@Id
@SequenceGenerator(name = "SEQ_SPIDX_COORDS", sequenceName = "SEQ_SPIDX_COORDS")
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_COORDS")
@Column(name = "SP_ID")
private Long myId;
@Column(name = "SP_LATITUDE")
@Field
public double myLatitude;
@Column(name = "SP_LONGITUDE")
@Field
public double myLongitude;
/**
* @since 3.5.0 - At some point this should be made not-null
*/
@Column(name = "HASH_IDENTITY", nullable = true)
private Long myHashIdentity;
public ResourceIndexedSearchParamCoords() {
}
@ -65,6 +67,20 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP
setLongitude(theLongitude);
}
@PrePersist
public void calculateHashes() {
if (myHashIdentity == null) {
String resourceType = getResourceType();
String paramName = getParamName();
setHashIdentity(calculateHashIdentity(resourceType, paramName));
}
}
@Override
protected void clearHashes() {
myHashIdentity = null;
}
@Override
public boolean equals(Object theObj) {
if (this == theObj) {
@ -82,27 +98,39 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP
b.append(getResource(), obj.getResource());
b.append(getLatitude(), obj.getLatitude());
b.append(getLongitude(), obj.getLongitude());
b.append(getHashIdentity(), obj.getHashIdentity());
return b.isEquals();
}
public Long getHashIdentity() {
return myHashIdentity;
}
public void setHashIdentity(Long theHashIdentity) {
myHashIdentity = theHashIdentity;
}
@Override
protected Long getId() {
return myId;
}
@Override
public IQueryParameterType toQueryParameterType() {
return null;
}
public double getLatitude() {
return myLatitude;
}
public void setLatitude(double theLatitude) {
myLatitude = theLatitude;
}
public double getLongitude() {
return myLongitude;
}
public void setLongitude(double theLongitude) {
myLongitude = theLongitude;
}
@Override
public int hashCode() {
HashCodeBuilder b = new HashCodeBuilder();
@ -113,12 +141,9 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP
return b.toHashCode();
}
public void setLatitude(double theLatitude) {
myLatitude = theLatitude;
}
public void setLongitude(double theLongitude) {
myLongitude = theLongitude;
@Override
public IQueryParameterType toQueryParameterType() {
return null;
}
@Override

View File

@ -37,17 +37,14 @@ import java.util.Date;
@Embeddable
@Entity
@Table(name = "HFJ_SPIDX_DATE", indexes = {
@Index(name = "IDX_SP_DATE", columnList = "RES_TYPE,SP_NAME,SP_VALUE_LOW,SP_VALUE_HIGH"),
// @Index(name = "IDX_SP_DATE", columnList = "RES_TYPE,SP_NAME,SP_VALUE_LOW,SP_VALUE_HIGH"),
@Index(name = "IDX_SP_DATE_HASH", columnList = "HASH_IDENTITY,SP_VALUE_LOW,SP_VALUE_HIGH"),
@Index(name = "IDX_SP_DATE_UPDATED", columnList = "SP_UPDATED"),
@Index(name = "IDX_SP_DATE_RESID", columnList = "RES_ID")
})
public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchParam {
private static final long serialVersionUID = 1L;
@Transient
private transient String myOriginalValue;
@Column(name = "SP_VALUE_HIGH", nullable = true)
@Temporal(TemporalType.TIMESTAMP)
@Field
@ -56,11 +53,18 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
@Temporal(TemporalType.TIMESTAMP)
@Field
public Date myValueLow;
@Transient
private transient String myOriginalValue;
@Id
@SequenceGenerator(name = "SEQ_SPIDX_DATE", sequenceName = "SEQ_SPIDX_DATE")
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_DATE")
@Column(name = "SP_ID")
private Long myId;
/**
* @since 3.5.0 - At some point this should be made not-null
*/
@Column(name = "HASH_IDENTITY", nullable = true)
private Long myHashIdentity;
/**
* Constructor
@ -79,6 +83,20 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
myOriginalValue = theOriginalValue;
}
@PrePersist
public void calculateHashes() {
if (myHashIdentity == null) {
String resourceType = getResourceType();
String paramName = getParamName();
setHashIdentity(calculateHashIdentity(resourceType, paramName));
}
}
@Override
protected void clearHashes() {
myHashIdentity = null;
}
@Override
public boolean equals(Object theObj) {
if (this == theObj) {
@ -97,9 +115,23 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
b.append(getResource(), obj.getResource());
b.append(getTimeFromDate(getValueHigh()), getTimeFromDate(obj.getValueHigh()));
b.append(getTimeFromDate(getValueLow()), getTimeFromDate(obj.getValueLow()));
b.append(getHashIdentity(), obj.getHashIdentity());
return b.isEquals();
}
public Long getHashIdentity() {
return myHashIdentity;
}
public void setHashIdentity(Long theHashIdentity) {
myHashIdentity = theHashIdentity;
}
@Override
protected Long getId() {
return myId;
}
protected Long getTimeFromDate(Date date) {
if (date != null) {
return date.getTime();
@ -107,11 +139,6 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
return null;
}
@Override
protected Long getId() {
return myId;
}
public Date getValueHigh() {
return myValueHigh;
}

View File

@ -34,15 +34,14 @@ import org.hibernate.search.annotations.NumericField;
import javax.persistence.*;
import java.math.BigDecimal;
//@formatter:off
@Embeddable
@Entity
@Table(name = "HFJ_SPIDX_NUMBER", indexes = {
@Index(name = "IDX_SP_NUMBER", columnList = "RES_TYPE,SP_NAME,SP_VALUE"),
// @Index(name = "IDX_SP_NUMBER", columnList = "RES_TYPE,SP_NAME,SP_VALUE"),
@Index(name = "IDX_SP_NUMBER_HASH_VAL", columnList = "HASH_IDENTITY,SP_VALUE"),
@Index(name = "IDX_SP_NUMBER_UPDATED", columnList = "SP_UPDATED"),
@Index(name = "IDX_SP_NUMBER_RESID", columnList = "RES_ID")
})
//@formatter:on
public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchParam {
private static final long serialVersionUID = 1L;
@ -56,6 +55,11 @@ public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchP
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_NUMBER")
@Column(name = "SP_ID")
private Long myId;
/**
* @since 3.5.0 - At some point this should be made not-null
*/
@Column(name = "HASH_IDENTITY", nullable = true)
private Long myHashIdentity;
public ResourceIndexedSearchParamNumber() {
}
@ -65,6 +69,20 @@ public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchP
setValue(theValue);
}
@PrePersist
public void calculateHashes() {
if (myHashIdentity == null) {
String resourceType = getResourceType();
String paramName = getParamName();
setHashIdentity(calculateHashIdentity(resourceType, paramName));
}
}
@Override
protected void clearHashes() {
myHashIdentity = null;
}
@Override
public boolean equals(Object theObj) {
if (this == theObj) {
@ -82,9 +100,18 @@ public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchP
b.append(getResource(), obj.getResource());
b.append(getValue(), obj.getValue());
b.append(isMissing(), obj.isMissing());
b.append(getHashIdentity(), obj.getHashIdentity());
return b.isEquals();
}
public Long getHashIdentity() {
return myHashIdentity;
}
public void setHashIdentity(Long theHashIdentity) {
myHashIdentity = theHashIdentity;
}
@Override
protected Long getId() {
return myId;

View File

@ -33,13 +33,14 @@ import org.hibernate.search.annotations.NumericField;
import javax.persistence.*;
import java.math.BigDecimal;
import java.math.RoundingMode;
//@formatter:off
@Embeddable
@Entity
@Table(name = "HFJ_SPIDX_QUANTITY", indexes = {
@Index(name = "IDX_SP_QUANTITY", columnList = "RES_TYPE,SP_NAME,SP_SYSTEM,SP_UNITS,SP_VALUE"),
// @Index(name = "IDX_SP_QUANTITY", columnList = "RES_TYPE,SP_NAME,SP_SYSTEM,SP_UNITS,SP_VALUE"),
@Index(name = "IDX_SP_QUANTITY_HASH", columnList = "HASH_IDENTITY,SP_VALUE"),
@Index(name = "IDX_SP_QUANTITY_HASH_UN", columnList = "HASH_IDENTITY_AND_UNITS,SP_VALUE"),
@Index(name = "IDX_SP_QUANTITY_UPDATED", columnList = "SP_UPDATED"),
@Index(name = "IDX_SP_QUANTITY_RESID", columnList = "RES_ID")
})
@ -66,20 +67,26 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc
@Column(name = "SP_ID")
private Long myId;
/**
* @since 3.4.0 - At some point this should be made not-null
* @since 3.5.0 - At some point this should be made not-null
*/
@Column(name = "HASH_UNITS_AND_VALPREFIX", nullable = true)
private Long myHashUnitsAndValPrefix;
@Column(name = "HASH_IDENTITY_AND_UNITS", nullable = true)
private Long myHashIdentityAndUnits;
/**
* @since 3.4.0 - At some point this should be made not-null
* @since 3.5.0 - At some point this should be made not-null
*/
@Column(name = "HASH_VALPREFIX", nullable = true)
private Long myHashValPrefix;
@Column(name = "HASH_IDENTITY_SYS_UNITS", nullable = true)
private Long myHashIdentitySystemAndUnits;
/**
* @since 3.5.0 - At some point this should be made not-null
*/
@Column(name = "HASH_IDENTITY", nullable = true)
private Long myHashIdentity;
public ResourceIndexedSearchParamQuantity() {
// nothing
}
public ResourceIndexedSearchParamQuantity(String theParamName, BigDecimal theValue, String theSystem, String theUnits) {
setParamName(theParamName);
setSystem(theSystem);
@ -89,16 +96,21 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc
@PrePersist
public void calculateHashes() {
if (myHashUnitsAndValPrefix == null) {
setHashUnitsAndValPrefix(hash(getResourceType(), getParamName(), getSystem(), getUnits(), toTruncatedString(getValue())));
setHashValPrefix(hash(getResourceType(), getParamName(), toTruncatedString(getValue())));
if (myHashIdentity == null) {
String resourceType = getResourceType();
String paramName = getParamName();
String units = getUnits();
String system = getSystem();
setHashIdentity(calculateHashIdentity(resourceType, paramName));
setHashIdentityAndUnits(calculateHashUnits(resourceType, paramName, units));
setHashIdentitySystemAndUnits(calculateHashSystemAndUnits(resourceType, paramName, system, units));
}
}
@Override
protected void clearHashes() {
myHashUnitsAndValPrefix = null;
myHashValPrefix = null;
myHashIdentity = null;
myHashIdentityAndUnits = null;
}
@Override
@ -119,27 +131,36 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc
b.append(getSystem(), obj.getSystem());
b.append(getUnits(), obj.getUnits());
b.append(getValue(), obj.getValue());
b.append(getHashUnitsAndValPrefix(), obj.getHashUnitsAndValPrefix());
b.append(getHashValPrefix(), obj.getHashValPrefix());
b.append(getHashIdentity(), obj.getHashIdentity());
b.append(getHashIdentitySystemAndUnits(), obj.getHashIdentitySystemAndUnits());
b.append(getHashIdentityAndUnits(), obj.getHashIdentityAndUnits());
return b.isEquals();
}
public Long getHashUnitsAndValPrefix() {
public Long getHashIdentity() {
calculateHashes();
return myHashUnitsAndValPrefix;
return myHashIdentity;
}
public void setHashUnitsAndValPrefix(Long theHashUnitsAndValPrefix) {
myHashUnitsAndValPrefix = theHashUnitsAndValPrefix;
public void setHashIdentity(Long theHashIdentity) {
myHashIdentity = theHashIdentity;
}
public Long getHashValPrefix() {
public Long getHashIdentityAndUnits() {
calculateHashes();
return myHashValPrefix;
return myHashIdentityAndUnits;
}
public void setHashValPrefix(Long theHashValPrefix) {
myHashValPrefix = theHashValPrefix;
public void setHashIdentityAndUnits(Long theHashIdentityAndUnits) {
myHashIdentityAndUnits = theHashIdentityAndUnits;
}
private Long getHashIdentitySystemAndUnits() {
return myHashIdentitySystemAndUnits;
}
public void setHashIdentitySystemAndUnits(Long theHashIdentitySystemAndUnits) {
myHashIdentitySystemAndUnits = theHashIdentitySystemAndUnits;
}
@Override
@ -176,14 +197,13 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc
@Override
public int hashCode() {
calculateHashes();
HashCodeBuilder b = new HashCodeBuilder();
b.append(getResourceType());
b.append(getParamName());
b.append(getResource());
b.append(getSystem());
b.append(getUnits());
b.append(getValue());
b.append(getHashUnitsAndValPrefix());
b.append(getHashValPrefix());
return b.toHashCode();
}
@ -201,14 +221,16 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc
b.append("units", getUnits());
b.append("value", getValue());
b.append("missing", isMissing());
b.append("hashIdentitySystemAndUnits", myHashIdentitySystemAndUnits);
return b.build();
}
private static String toTruncatedString(BigDecimal theValue) {
if (theValue == null) {
return null;
}
return theValue.setScale(0, RoundingMode.FLOOR).toPlainString();
public static long calculateHashSystemAndUnits(String theResourceType, String theParamName, String theSystem, String theUnits) {
return hash(theResourceType, theParamName, theSystem, theUnits);
}
public static long calculateHashUnits(String theResourceType, String theParamName, String theUnits) {
return hash(theResourceType, theParamName, theUnits);
}
}

View File

@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.entity;
* #L%
*/
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.rest.param.StringParam;
import org.apache.commons.lang3.StringUtils;
@ -38,7 +39,14 @@ import static org.apache.commons.lang3.StringUtils.left;
@Embeddable
@Entity
@Table(name = "HFJ_SPIDX_STRING", indexes = {
@Index(name = "IDX_SP_STRING", columnList = "RES_TYPE,SP_NAME,SP_VALUE_NORMALIZED"),
/*
* Note: We previously had indexes with the following names,
* do not reuse these names:
* IDX_SP_STRING
*/
@Index(name = "IDX_SP_STRING_HASH_NRM", columnList = "HASH_NORM_PREFIX,SP_VALUE_NORMALIZED"),
@Index(name = "IDX_SP_STRING_HASH_EXCT", columnList = "HASH_EXACT"),
@Index(name = "IDX_SP_STRING_UPDATED", columnList = "SP_UPDATED"),
@Index(name = "IDX_SP_STRING_RESID", columnList = "RES_ID")
})
@ -127,13 +135,16 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
*/
@Column(name = "HASH_EXACT", nullable = true)
private Long myHashExact;
@Transient
private transient DaoConfig myDaoConfig;
public ResourceIndexedSearchParamString() {
super();
}
public ResourceIndexedSearchParamString(String theName, String theValueNormalized, String theValueExact) {
public ResourceIndexedSearchParamString(DaoConfig theDaoConfig, String theName, String theValueNormalized, String theValueExact) {
setDaoConfig(theDaoConfig);
setParamName(theName);
setValueNormalized(theValueNormalized);
setValueExact(theValueExact);
@ -141,9 +152,13 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
@PrePersist
public void calculateHashes() {
if (myHashNormalizedPrefix == null) {
setHashNormalizedPrefix(hash(getResourceType(), getParamName(), left(getValueNormalized(), HASH_PREFIX_LENGTH)));
setHashExact(hash(getResourceType(), getParamName(), getValueExact()));
if (myHashNormalizedPrefix == null && myDaoConfig != null) {
String resourceType = getResourceType();
String paramName = getParamName();
String valueNormalized = getValueNormalized();
String valueExact = getValueExact();
setHashNormalizedPrefix(calculateHashNormalized(myDaoConfig, resourceType, paramName, valueNormalized));
setHashExact(calculateHashExact(resourceType, paramName, valueExact));
}
}
@ -169,8 +184,8 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
b.append(getParamName(), obj.getParamName());
b.append(getResource(), obj.getResource());
b.append(getValueExact(), obj.getValueExact());
b.append(getHashNormalizedPrefix(), obj.getHashNormalizedPrefix());
b.append(getHashExact(), obj.getHashExact());
b.append(getHashNormalizedPrefix(), obj.getHashNormalizedPrefix());
return b.isEquals();
}
@ -225,11 +240,14 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
b.append(getParamName());
b.append(getResource());
b.append(getValueExact());
b.append(getHashNormalizedPrefix());
b.append(getHashExact());
return b.toHashCode();
}
public BaseResourceIndexedSearchParam setDaoConfig(DaoConfig theDaoConfig) {
myDaoConfig = theDaoConfig;
return this;
}
@Override
public IQueryParameterType toQueryParameterType() {
return new StringParam(getValueExact());
@ -244,4 +262,23 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
return b.build();
}
public static long calculateHashExact(String theResourceType, String theParamName, String theValueExact) {
return hash(theResourceType, theParamName, theValueExact);
}
public static long calculateHashNormalized(DaoConfig theDaoConfig, String theResourceType, String theParamName, String theValueNormalized) {
/*
* If we're not allowing contained searches, we'll add the first
* bit of the normalized value to the hash. This helps to
* make the hash even more unique, which will be good for
* performance.
*/
int hashPrefixLength = HASH_PREFIX_LENGTH;
if (theDaoConfig.isAllowContainsSearches()) {
hashPrefixLength = 0;
}
return hash(theResourceType, theParamName, left(theValueNormalized, hashPrefixLength));
}
}

View File

@ -31,11 +31,23 @@ import org.hibernate.search.annotations.Field;
import javax.persistence.*;
import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.apache.commons.lang3.StringUtils.trim;
@Embeddable
@Entity
@Table(name = "HFJ_SPIDX_TOKEN", indexes = {
@Index(name = "IDX_SP_TOKEN", columnList = "RES_TYPE,SP_NAME,SP_SYSTEM,SP_VALUE"),
@Index(name = "IDX_SP_TOKEN_UNQUAL", columnList = "RES_TYPE,SP_NAME,SP_VALUE"),
/*
* Note: We previously had indexes with the following names,
* do not reuse these names:
* IDX_SP_TOKEN
* IDX_SP_TOKEN_UNQUAL
*/
@Index(name = "IDX_SP_TOKEN_HASH", columnList = "HASH_IDENTITY"),
@Index(name = "IDX_SP_TOKEN_HASH_S", columnList = "HASH_SYS"),
@Index(name = "IDX_SP_TOKEN_HASH_SV", columnList = "HASH_SYS_AND_VALUE"),
@Index(name = "IDX_SP_TOKEN_HASH_V", columnList = "HASH_VALUE"),
@Index(name = "IDX_SP_TOKEN_UPDATED", columnList = "SP_UPDATED"),
@Index(name = "IDX_SP_TOKEN_RESID", columnList = "RES_ID")
})
@ -50,12 +62,18 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
public String mySystem;
@Field()
@Column(name = "SP_VALUE", nullable = true, length = MAX_LENGTH)
public String myValue;
private String myValue;
@SuppressWarnings("unused")
@Id
@SequenceGenerator(name = "SEQ_SPIDX_TOKEN", sequenceName = "SEQ_SPIDX_TOKEN")
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_TOKEN")
@Column(name = "SP_ID")
private Long myId;
/**
* @since 3.4.0 - At some point this should be made not-null
*/
@Column(name = "HASH_IDENTITY", nullable = true)
private Long myHashIdentity;
/**
* @since 3.4.0 - At some point this should be made not-null
*/
@ -90,17 +108,20 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
setValue(theValue);
}
@PrePersist
public void calculateHashes() {
if (myHashSystem == null) {
setHashSystem(hash(getResourceType(), getParamName(), getSystem()));
setHashSystemAndValue(hash(getResourceType(), getParamName(), getSystem(), getValue()));
setHashValue(hash(getResourceType(), getParamName(), getValue()));
String resourceType = getResourceType();
String paramName = getParamName();
String system = getSystem();
String value = getValue();
setHashIdentity(calculateHashIdentity(resourceType, paramName));
setHashSystem(calculateHashSystem(resourceType, paramName, system));
setHashSystemAndValue(calculateHashSystemAndValue(resourceType, paramName, system, value));
setHashValue(calculateHashValue(resourceType, paramName, value));
}
}
@Override
protected void clearHashes() {
myHashSystem = null;
@ -125,37 +146,47 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
b.append(getResource(), obj.getResource());
b.append(getSystem(), obj.getSystem());
b.append(getValue(), obj.getValue());
b.append(getHashIdentity(), obj.getHashIdentity());
b.append(getHashSystem(), obj.getHashSystem());
b.append(getHashSystemAndValue(), obj.getHashSystemAndValue());
b.append(getHashValue(), obj.getHashValue());
return b.isEquals();
}
public Long getHashSystem() {
Long getHashSystem() {
calculateHashes();
return myHashSystem;
}
public void setHashSystem(Long theHashSystem) {
private Long getHashIdentity() {
calculateHashes();
return myHashIdentity;
}
private void setHashIdentity(Long theHashIdentity) {
myHashIdentity = theHashIdentity;
}
private void setHashSystem(Long theHashSystem) {
myHashSystem = theHashSystem;
}
public Long getHashSystemAndValue() {
Long getHashSystemAndValue() {
calculateHashes();
return myHashSystemAndValue;
}
public void setHashSystemAndValue(Long theHashSystemAndValue) {
private void setHashSystemAndValue(Long theHashSystemAndValue) {
calculateHashes();
myHashSystemAndValue = theHashSystemAndValue;
}
public Long getHashValue() {
Long getHashValue() {
calculateHashes();
return myHashValue;
}
public void setHashValue(Long theHashValue) {
private void setHashValue(Long theHashValue) {
myHashValue = theHashValue;
}
@ -184,18 +215,15 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
@Override
public int hashCode() {
calculateHashes();
HashCodeBuilder b = new HashCodeBuilder();
b.append(getParamName());
b.append(getResource());
b.append(getSystem());
b.append(getValue());
b.append(getHashSystem());
b.append(getHashSystemAndValue());
b.append(getHashValue());
return b.toHashCode();
}
@Override
public IQueryParameterType toQueryParameterType() {
return new TokenParam(getSystem(), getValue());
@ -210,4 +238,16 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
b.append("value", getValue());
return b.build();
}
public static long calculateHashSystem(String theResourceType, String theParamName, String theSystem) {
return hash(theResourceType, theParamName, trim(theSystem));
}
public static long calculateHashSystemAndValue(String theResourceType, String theParamName, String theSystem, String theValue) {
return hash(theResourceType, theParamName, defaultString(trim(theSystem)), trim(theValue));
}
public static long calculateHashValue(String theResourceType, String theParamName, String theValue) {
return hash(theResourceType, theParamName, trim(theValue));
}
}

View File

@ -34,6 +34,8 @@ import javax.persistence.*;
@Entity
@Table(name = "HFJ_SPIDX_URI", indexes = {
@Index(name = "IDX_SP_URI", columnList = "RES_TYPE,SP_NAME,SP_URI"),
@Index(name = "IDX_SP_URI_HASH_IDENTITY", columnList = "HASH_IDENTITY,SP_URI"),
@Index(name = "IDX_SP_URI_HASH_URI", columnList = "HASH_URI"),
@Index(name = "IDX_SP_URI_RESTYPE_NAME", columnList = "RES_TYPE,SP_NAME"),
@Index(name = "IDX_SP_URI_UPDATED", columnList = "SP_UPDATED"),
@Index(name = "IDX_SP_URI_COORDS", columnList = "RES_ID")
@ -59,11 +61,17 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara
*/
@Column(name = "HASH_URI", nullable = true)
private Long myHashUri;
/**
* @since 3.5.0 - At some point this should be made not-null
*/
@Column(name = "HASH_IDENTITY", nullable = true)
private Long myHashIdentity;
/**
* Constructor
*/
public ResourceIndexedSearchParamUri() {
super();
}
/**
@ -77,7 +85,11 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara
@PrePersist
public void calculateHashes() {
if (myHashUri == null) {
setHashUri(hash(getResourceType(), getParamName(), getUri()));
String resourceType = getResourceType();
String paramName = getParamName();
String uri = getUri();
setHashIdentity(calculateHashIdentity(resourceType, paramName));
setHashUri(calculateHashUri(resourceType, paramName, uri));
}
}
@ -103,9 +115,18 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara
b.append(getResource(), obj.getResource());
b.append(getUri(), obj.getUri());
b.append(getHashUri(), obj.getHashUri());
b.append(getHashIdentity(), obj.getHashIdentity());
return b.isEquals();
}
private Long getHashIdentity() {
return myHashIdentity;
}
private void setHashIdentity(long theHashIdentity) {
myHashIdentity = theHashIdentity;
}
public Long getHashUri() {
calculateHashes();
return myHashUri;
@ -153,4 +174,8 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara
return b.toString();
}
public static long calculateHashUri(String theResourceType, String theParamName, String theUri) {
return hash(theResourceType, theParamName, theUri);
}
}

View File

@ -66,14 +66,12 @@ public class ResourceLink implements Serializable {
@ManyToOne(optional = false, fetch=FetchType.LAZY)
@JoinColumn(name = "SRC_RESOURCE_ID", referencedColumnName = "RES_ID", nullable = false, foreignKey=@ForeignKey(name="FK_RESLINK_SOURCE"))
// @ContainedIn()
private ResourceTable mySourceResource;
@Column(name = "SRC_RESOURCE_ID", insertable = false, updatable = false, nullable = false)
private Long mySourceResourcePid;
@Column(name = "SOURCE_RESOURCE_TYPE", nullable=false, length=ResourceTable.RESTYPE_LEN)
@ColumnDefault("''") // TODO: remove this (it's only here for simplifying upgrades of 1.3 -> 1.4)
@Field()
private String mySourceResourceType;
@ -86,7 +84,6 @@ public class ResourceLink implements Serializable {
private Long myTargetResourcePid;
@Column(name = "TARGET_RESOURCE_TYPE", nullable=false, length=ResourceTable.RESTYPE_LEN)
@ColumnDefault("''") // TODO: remove this (it's only here for simplifying upgrades of 1.3 -> 1.4)
@Field()
private String myTargetResourceType;

View File

@ -0,0 +1,200 @@
package ca.uhn.fhir.jpa.entity;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.io.Serializable;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.Id;
import javax.persistence.Lob;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import org.hibernate.annotations.Immutable;
import org.hibernate.annotations.Subselect;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.InstantDt;
import ca.uhn.fhir.rest.api.Constants;
//@formatter:off
@Entity
@Immutable
@Subselect("SELECT h.pid as pid " +
", h.res_id as res_id " +
", h.res_type as res_type " +
", h.res_version as res_version " + // FHIR version
", h.res_ver as res_ver " + // resource version
", h.has_tags as has_tags " +
", h.res_deleted_at as res_deleted_at " +
", h.res_published as res_published " +
", h.res_updated as res_updated " +
", h.res_text as res_text " +
", h.res_encoding as res_encoding " +
", f.forced_id as forced_pid " +
"FROM HFJ_RES_VER h "
+ " LEFT OUTER JOIN HFJ_FORCED_ID f ON f.resource_pid = h.res_id "
+ " INNER JOIN HFJ_RESOURCE r ON r.res_id = h.res_id and r.res_ver = h.res_ver")
// @formatter:on
public class ResourceSearchView implements IBaseResourceEntity, Serializable {
private static final long serialVersionUID = 1L;
@Id
@Column(name = "PID")
private Long myId;
@Column(name = "RES_ID")
private Long myResourceId;
@Column(name = "RES_TYPE")
private String myResourceType;
@Column(name = "RES_VERSION")
@Enumerated(EnumType.STRING)
private FhirVersionEnum myFhirVersion;
@Column(name = "RES_VER")
private Long myResourceVersion;
@Column(name = "HAS_TAGS")
private boolean myHasTags;
@Column(name = "RES_DELETED_AT")
@Temporal(TemporalType.TIMESTAMP)
private Date myDeleted;
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "RES_PUBLISHED")
private Date myPublished;
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "RES_UPDATED")
private Date myUpdated;
@Column(name = "RES_TEXT")
@Lob()
private byte[] myResource;
@Column(name = "RES_ENCODING")
@Enumerated(EnumType.STRING)
private ResourceEncodingEnum myEncoding;
@Column(name = "forced_pid")
private String myForcedPid;
public ResourceSearchView() {
}
@Override
public Date getDeleted() {
return myDeleted;
}
public void setDeleted(Date theDate) {
myDeleted = theDate;
}
@Override
public FhirVersionEnum getFhirVersion() {
return myFhirVersion;
}
public void setFhirVersion(FhirVersionEnum theFhirVersion) {
myFhirVersion = theFhirVersion;
}
public String getForcedId() {
return myForcedPid;
}
@Override
public Long getId() {
return myResourceId;
}
@Override
public IdDt getIdDt() {
if (myForcedPid == null) {
Long id = myResourceId;
return new IdDt(myResourceType + '/' + id + '/' + Constants.PARAM_HISTORY + '/' + getVersion());
} else {
return new IdDt(
getResourceType() + '/' + getForcedId() + '/' + Constants.PARAM_HISTORY + '/' + getVersion());
}
}
@Override
public InstantDt getPublished() {
if (myPublished != null) {
return new InstantDt(myPublished);
} else {
return null;
}
}
public void setPublished(Date thePublished) {
myPublished = thePublished;
}
@Override
public Long getResourceId() {
return myResourceId;
}
@Override
public String getResourceType() {
return myResourceType;
}
@Override
public InstantDt getUpdated() {
return new InstantDt(myUpdated);
}
@Override
public Date getUpdatedDate() {
return myUpdated;
}
@Override
public long getVersion() {
return myResourceVersion;
}
@Override
public boolean isHasTags() {
return myHasTags;
}
public byte[] getResource() {
return myResource;
}
public ResourceEncodingEnum getEncoding() {
return myEncoding;
}
}

View File

@ -94,10 +94,9 @@ public class Search implements Serializable {
@OneToMany(mappedBy="mySearch")
private Collection<SearchResult> myResults;
// TODO: change nullable to false after 2.5
@NotNull
@Temporal(TemporalType.TIMESTAMP)
@Column(name="SEARCH_LAST_RETURNED", nullable=true, updatable=false)
@Column(name="SEARCH_LAST_RETURNED", nullable=false, updatable=false)
private Date mySearchLastReturned;
@Lob()

View File

@ -1,59 +0,0 @@
package ca.uhn.fhir.jpa.entity;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import javax.persistence.*;
@Entity
@Table(name = "HFJ_SEARCH_PARM", uniqueConstraints= {
@UniqueConstraint(name="IDX_SEARCHPARM_RESTYPE_SPNAME", columnNames= {"RES_TYPE", "PARAM_NAME"})
})
public class SearchParam {
@Id
@SequenceGenerator(name = "SEQ_SEARCHPARM_ID", sequenceName = "SEQ_SEARCHPARM_ID")
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SEARCHPARM_ID")
@Column(name = "PID")
private Long myId;
@Column(name="PARAM_NAME", length=BaseResourceIndexedSearchParam.MAX_SP_NAME, nullable=false, updatable=false)
private String myParamName;
@Column(name="RES_TYPE", length=ResourceTable.RESTYPE_LEN, nullable=false, updatable=false)
private String myResourceName;
public String getParamName() {
return myParamName;
}
public void setParamName(String theParamName) {
myParamName = theParamName;
}
public void setResourceName(String theResourceName) {
myResourceName = theResourceName;
}
public Long getId() {
return myId;
}
}

View File

@ -20,18 +20,16 @@ package ca.uhn.fhir.jpa.entity;
* #L%
*/
import java.io.Serializable;
import javax.persistence.*;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import javax.persistence.*;
import java.io.Serializable;
@Entity
@Table(name = "HFJ_RES_PARAM_PRESENT", indexes = {
@Index(name = "IDX_RESPARMPRESENT_RESID", columnList = "RES_ID")
}, uniqueConstraints = {
@UniqueConstraint(name = "IDX_RESPARMPRESENT_SPID_RESID", columnNames = { "SP_ID", "RES_ID" })
@Index(name = "IDX_RESPARMPRESENT_RESID", columnList = "RES_ID"),
@Index(name = "IDX_RESPARMPRESENT_HASHPRES", columnList = "HASH_PRESENCE")
})
public class SearchParamPresent implements Serializable {
@ -42,17 +40,15 @@ public class SearchParamPresent implements Serializable {
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_RESPARMPRESENT_ID")
@Column(name = "PID")
private Long myId;
@Column(name = "SP_PRESENT", nullable = false)
private boolean myPresent;
@ManyToOne()
@JoinColumn(name = "RES_ID", referencedColumnName = "RES_ID", nullable = false, foreignKey = @ForeignKey(name = "FK_RESPARMPRES_RESID"))
private ResourceTable myResource;
@ManyToOne()
@JoinColumn(name = "SP_ID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name = "FK_RESPARMPRES_SPID"))
private SearchParam mySearchParam;
@Transient
private transient String myParamName;
@Column(name = "HASH_PRESENCE")
private Long myHashPresence;
/**
* Constructor
@ -60,13 +56,40 @@ public class SearchParamPresent implements Serializable {
public SearchParamPresent() {
super();
}
@SuppressWarnings("unused")
@PrePersist
public void calculateHashes() {
if (myHashPresence == null) {
String resourceType = getResource().getResourceType();
String paramName = getParamName();
boolean present = myPresent;
setHashPresence(calculateHashPresence(resourceType, paramName, present));
}
}
public Long getHashPresence() {
return myHashPresence;
}
public void setHashPresence(Long theHashPresence) {
myHashPresence = theHashPresence;
}
public String getParamName() {
return myParamName;
}
public void setParamName(String theParamName) {
myParamName = theParamName;
}
public ResourceTable getResource() {
return myResource;
}
public SearchParam getSearchParam() {
return mySearchParam;
public void setResource(ResourceTable theResourceTable) {
myResource = theResourceTable;
}
public boolean isPresent() {
@ -77,22 +100,18 @@ public class SearchParamPresent implements Serializable {
myPresent = thePresent;
}
public void setResource(ResourceTable theResourceTable) {
myResource = theResourceTable;
}
public void setSearchParam(SearchParam theSearchParam) {
mySearchParam = theSearchParam;
}
@Override
public String toString() {
ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE);
b.append("res_pid", myResource.getIdDt().toUnqualifiedVersionless().getValue());
b.append("param", mySearchParam.getParamName());
b.append("resPid", myResource.getIdDt().toUnqualifiedVersionless().getValue());
b.append("paramName", myParamName);
b.append("present", myPresent);
return b.build();
}
public static long calculateHashPresence(String theResourceType, String theParamName, boolean thePresent) {
return BaseResourceIndexedSearchParam.hash(theResourceType, theParamName, Boolean.toString(thePresent));
}
}

View File

@ -45,7 +45,8 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
@Table(name = "TRM_CONCEPT", uniqueConstraints = {
@UniqueConstraint(name = "IDX_CONCEPT_CS_CODE", columnNames = {"CODESYSTEM_PID", "CODE"})
}, indexes = {
@Index(name = "IDX_CONCEPT_INDEXSTATUS", columnList = "INDEX_STATUS")
@Index(name = "IDX_CONCEPT_INDEXSTATUS", columnList = "INDEX_STATUS"),
@Index(name = "IDX_CONCEPT_UPDATED", columnList = "CONCEPT_UPDATED")
})
public class TermConcept implements Serializable {
protected static final int MAX_DESC_LENGTH = 400;
@ -59,15 +60,15 @@ public class TermConcept implements Serializable {
@Column(name = "CODE", length = 100, nullable = false)
@Fields({@Field(name = "myCode", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "exactAnalyzer")),})
private String myCode;
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "CONCEPT_UPDATED", nullable = true)
private Date myUpdated;
@ManyToOne()
@JoinColumn(name = "CODESYSTEM_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPT_PID_CS_PID"))
private TermCodeSystemVersion myCodeSystem;
@Column(name = "CODESYSTEM_PID", insertable = false, updatable = false)
@Fields({@Field(name = "myCodeSystemVersionPid")})
private long myCodeSystemVersionPid;
@Column(name = "DISPLAY", length = MAX_DESC_LENGTH, nullable = true)
@Fields({
@Field(name = "myDisplay", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "standardAnalyzer")),
@ -76,15 +77,12 @@ public class TermConcept implements Serializable {
@Field(name = "myDisplayPhonetic", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompletePhoneticAnalyzer"))
})
private String myDisplay;
@OneToMany(mappedBy = "myConcept", orphanRemoval = true)
@Field
@OneToMany(mappedBy = "myConcept", orphanRemoval = false)
@Field(name = "PROPmyProperties", analyzer = @Analyzer(definition = "termConceptPropertyAnalyzer"))
@FieldBridge(impl = TermConceptPropertyFieldBridge.class)
private Collection<TermConceptProperty> myProperties;
@OneToMany(mappedBy = "myConcept", orphanRemoval = true)
@OneToMany(mappedBy = "myConcept", orphanRemoval = false)
private Collection<TermConceptDesignation> myDesignations;
@Id()
@SequenceGenerator(name = "SEQ_CONCEPT_PID", sequenceName = "SEQ_CONCEPT_PID")
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_CONCEPT_PID")
@ -92,18 +90,17 @@ public class TermConcept implements Serializable {
private Long myId;
@Column(name = "INDEX_STATUS", nullable = true)
private Long myIndexStatus;
@Transient
@Field(name = "myParentPids", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "conceptParentPidsAnalyzer"))
@Lob
@Column(name="PARENT_PIDS", nullable = true)
private String myParentPids;
@OneToMany(cascade = {}, fetch = FetchType.LAZY, mappedBy = "myChild")
private Collection<TermConceptParentChildLink> myParents;
@Column(name = "CODE_SEQUENCE", nullable = true)
private Integer mySequence;
public TermConcept() {
super();
}
public TermConcept(TermCodeSystemVersion theCs, String theCode) {
setCodeSystemVersion(theCs);
setCode(theCode);
@ -130,6 +127,7 @@ public class TermConcept implements Serializable {
public TermConceptDesignation addDesignation() {
TermConceptDesignation designation = new TermConceptDesignation();
designation.setConcept(this);
designation.setCodeSystemVersion(myCodeSystem);
getDesignations().add(designation);
return designation;
}
@ -139,6 +137,7 @@ public class TermConcept implements Serializable {
TermConceptProperty property = new TermConceptProperty();
property.setConcept(this);
property.setCodeSystemVersion(myCodeSystem);
property.setType(thePropertyType);
property.setKey(thePropertyName);
property.setValue(thePropertyValue);
@ -294,6 +293,14 @@ public class TermConcept implements Serializable {
return null;
}
public Date getUpdated() {
return myUpdated;
}
public void setUpdated(Date theUpdated) {
myUpdated = theUpdated;
}
@Override
public int hashCode() {
HashCodeBuilder b = new HashCodeBuilder();

View File

@ -48,6 +48,14 @@ public class TermConceptDesignation implements Serializable {
private String myUseDisplay;
@Column(name = "VAL", length = 500, nullable = false)
private String myValue;
/**
* TODO: Make this non-null
*
* @since 3.5.0
*/
@ManyToOne
@JoinColumn(name = "CS_VER_PID", nullable = true, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTDESIG_CSV"))
private TermCodeSystemVersion myCodeSystemVersion;
public String getLanguage() {
return myLanguage;
@ -94,6 +102,11 @@ public class TermConceptDesignation implements Serializable {
return this;
}
public TermConceptDesignation setCodeSystemVersion(TermCodeSystemVersion theCodeSystemVersion) {
myCodeSystemVersion = theCodeSystemVersion;
return this;
}
public TermConceptDesignation setConcept(TermConcept theConcept) {
myConcept = theConcept;
return this;

View File

@ -38,6 +38,14 @@ public class TermConceptProperty implements Serializable {
@ManyToOne
@JoinColumn(name = "CONCEPT_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTPROP_CONCEPT"))
private TermConcept myConcept;
/**
* TODO: Make this non-null
*
* @since 3.5.0
*/
@ManyToOne
@JoinColumn(name = "CS_VER_PID", nullable = true, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTPROP_CSV"))
private TermCodeSystemVersion myCodeSystemVersion;
@Id()
@SequenceGenerator(name = "SEQ_CONCEPT_PROP_PID", sequenceName = "SEQ_CONCEPT_PROP_PID")
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_CONCEPT_PROP_PID")
@ -124,6 +132,11 @@ public class TermConceptProperty implements Serializable {
myValue = theValue;
}
public TermConceptProperty setCodeSystemVersion(TermCodeSystemVersion theCodeSystemVersion) {
myCodeSystemVersion = theCodeSystemVersion;
return this;
}
public void setConcept(TermConcept theConcept) {
myConcept = theConcept;
}

View File

@ -21,18 +21,22 @@ package ca.uhn.fhir.jpa.entity;
*/
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.hibernate.search.bridge.FieldBridge;
import org.hibernate.search.bridge.LuceneOptions;
import org.hibernate.search.bridge.StringBridge;
import java.util.Collection;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/**
* Allows hibernate search to index individual concepts' properties
*/
public class TermConceptPropertyFieldBridge implements FieldBridge, StringBridge {
public static final String PROP_PREFIX = "PROP__";
public static final String CONCEPT_FIELD_PROPERTY_PREFIX = "PROP";
/**
* Constructor
@ -48,15 +52,17 @@ public class TermConceptPropertyFieldBridge implements FieldBridge, StringBridge
@Override
public void set(String theName, Object theValue, Document theDocument, LuceneOptions theLuceneOptions) {
@SuppressWarnings("unchecked")
Collection<TermConceptProperty> properties = (Collection<TermConceptProperty>) theValue;
if (properties != null) {
for (TermConceptProperty next : properties) {
String propValue = next.getKey() + "=" + next.getValue();
theLuceneOptions.addFieldToDocument(theName, propValue, theDocument);
theDocument.add(new StringField(CONCEPT_FIELD_PROPERTY_PREFIX + next.getKey(), next.getValue(), Field.Store.YES));
if (next.getType() == TermConceptPropertyTypeEnum.CODING) {
propValue = next.getKey() + "=" + next.getDisplay();
theLuceneOptions.addFieldToDocument(theName, propValue, theDocument);
if (isNotBlank(next.getDisplay())) {
theDocument.add(new StringField(CONCEPT_FIELD_PROPERTY_PREFIX + next.getKey(), next.getDisplay(), Field.Store.YES));
}
}
}
}

View File

@ -0,0 +1,61 @@
package ca.uhn.fhir.jpa.provider;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoMessageHeader;
import ca.uhn.fhir.jpa.util.JpaConstants;
import ca.uhn.fhir.model.api.annotation.Description;
import ca.uhn.fhir.model.dstu2.resource.Bundle;
import ca.uhn.fhir.model.dstu2.resource.MessageHeader;
import ca.uhn.fhir.rest.annotation.Operation;
import ca.uhn.fhir.rest.annotation.OperationParam;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import javax.servlet.http.HttpServletRequest;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public class BaseJpaResourceProviderMessageHeaderDstu2 extends JpaResourceProviderDstu2<MessageHeader> {
/**
* /MessageHeader/$process-message
*/
@Operation(name = JpaConstants.OPERATION_PROCESS_MESSAGE, idempotent = false)
public IBaseBundle processMessage(
HttpServletRequest theServletRequest,
RequestDetails theRequestDetails,
@OperationParam(name = "content", min = 1, max = 1)
@Description(formalDefinition = "The message to process (or, if using asynchronous messaging, it may be a response message to accept)")
Bundle theMessageToProcess
) {
startRequest(theServletRequest);
try {
return ((IFhirResourceDaoMessageHeader<MessageHeader>) getDao()).messageHeaderProcessMessage(theRequestDetails, theMessageToProcess);
} finally {
endRequest(theServletRequest);
}
}
}

View File

@ -0,0 +1,61 @@
package ca.uhn.fhir.jpa.provider.dstu3;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoMessageHeader;
import ca.uhn.fhir.jpa.util.JpaConstants;
import ca.uhn.fhir.model.api.annotation.Description;
import ca.uhn.fhir.rest.annotation.Operation;
import ca.uhn.fhir.rest.annotation.OperationParam;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import org.hl7.fhir.dstu3.model.Bundle;
import org.hl7.fhir.dstu3.model.MessageHeader;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import javax.servlet.http.HttpServletRequest;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public class BaseJpaResourceProviderMessageHeaderDstu3 extends JpaResourceProviderDstu3<MessageHeader> {
/**
* /MessageHeader/$process-message
*/
@Operation(name = JpaConstants.OPERATION_PROCESS_MESSAGE, idempotent = false)
public IBaseBundle processMessage(
HttpServletRequest theServletRequest,
RequestDetails theRequestDetails,
@OperationParam(name = "content", min = 1, max = 1)
@Description(formalDefinition = "The message to process (or, if using asynchronous messaging, it may be a response message to accept)")
Bundle theMessageToProcess
) {
startRequest(theServletRequest);
try {
return ((IFhirResourceDaoMessageHeader<MessageHeader>) getDao()).messageHeaderProcessMessage(theRequestDetails, theMessageToProcess);
} finally {
endRequest(theServletRequest);
}
}
}

View File

@ -0,0 +1,61 @@
package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoMessageHeader;
import ca.uhn.fhir.jpa.util.JpaConstants;
import ca.uhn.fhir.model.api.annotation.Description;
import ca.uhn.fhir.rest.annotation.Operation;
import ca.uhn.fhir.rest.annotation.OperationParam;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.MessageHeader;
import javax.servlet.http.HttpServletRequest;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public class BaseJpaResourceProviderMessageHeaderR4 extends JpaResourceProviderR4<MessageHeader> {
/**
* /MessageHeader/$process-message
*/
@Operation(name = JpaConstants.OPERATION_PROCESS_MESSAGE, idempotent = false)
public IBaseBundle processMessage(
HttpServletRequest theServletRequest,
RequestDetails theRequestDetails,
@OperationParam(name = "content", min = 1, max = 1)
@Description(formalDefinition = "The message to process (or, if using asynchronous messaging, it may be a response message to accept)")
Bundle theMessageToProcess
) {
startRequest(theServletRequest);
try {
return ((IFhirResourceDaoMessageHeader<MessageHeader>) getDao()).messageHeaderProcessMessage(theRequestDetails, theMessageToProcess);
} finally {
endRequest(theServletRequest);
}
}
}

View File

@ -20,9 +20,7 @@ package ca.uhn.fhir.jpa.search;
* #L%
*/
import org.apache.lucene.analysis.core.LowerCaseFilterFactory;
import org.apache.lucene.analysis.core.StopFilterFactory;
import org.apache.lucene.analysis.core.WhitespaceTokenizerFactory;
import org.apache.lucene.analysis.core.*;
import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilterFactory;
import org.apache.lucene.analysis.ngram.EdgeNGramFilterFactory;
import org.apache.lucene.analysis.ngram.NGramFilterFactory;
@ -65,8 +63,9 @@ public class LuceneSearchMappingFactory {
.param("maxGramSize", "20")
.analyzerDef("standardAnalyzer", StandardTokenizerFactory.class)
.filter(LowerCaseFilterFactory.class)
.analyzerDef("exactAnalyzer", StandardTokenizerFactory.class)
.analyzerDef("conceptParentPidsAnalyzer", WhitespaceTokenizerFactory.class);
.analyzerDef("exactAnalyzer", KeywordTokenizerFactory.class)
.analyzerDef("conceptParentPidsAnalyzer", WhitespaceTokenizerFactory.class)
.analyzerDef("termConceptPropertyAnalyzer", WhitespaceTokenizerFactory.class);
return mapping;
}

View File

@ -178,9 +178,9 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
@Override
public List<Long> doInTransaction(TransactionStatus theStatus) {
final List<Long> resultPids = new ArrayList<Long>();
Page<SearchResult> searchResults = mySearchResultDao.findWithSearchUuid(foundSearch, page);
for (SearchResult next : searchResults) {
resultPids.add(next.getResourcePid());
Page<Long> searchResultPids = mySearchResultDao.findWithSearchUuid(foundSearch, page);
for (Long next : searchResultPids) {
resultPids.add(next);
}
return resultPids;
}

View File

@ -20,14 +20,12 @@ package ca.uhn.fhir.jpa.sp;
* #L%
*/
import java.util.Map;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import java.util.Map;
public interface ISearchParamPresenceSvc {
void updatePresence(ResourceTable theResource, Map<String, Boolean> theParamNameToPresence);
void flushCachesForUnitTest();
}

View File

@ -20,29 +20,17 @@ package ca.uhn.fhir.jpa.sp;
* #L%
*/
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import org.apache.commons.lang3.tuple.Pair;
import org.springframework.beans.factory.annotation.Autowired;
import ca.uhn.fhir.jpa.dao.data.ISearchParamDao;
import ca.uhn.fhir.jpa.dao.data.ISearchParamPresentDao;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.SearchParam;
import ca.uhn.fhir.jpa.entity.SearchParamPresent;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.*;
import java.util.Map.Entry;
public class SearchParamPresenceSvcImpl implements ISearchParamPresenceSvc {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SearchParamPresenceSvcImpl.class);
private Map<Pair<String, String>, SearchParam> myResourceTypeToSearchParamToEntity = new ConcurrentHashMap<Pair<String, String>, SearchParam>();
@Autowired
private ISearchParamDao mySearchParamDao;
@Autowired
private ISearchParamPresentDao mySearchParamPresentDao;
@ -55,62 +43,48 @@ public class SearchParamPresenceSvcImpl implements ISearchParamPresenceSvc {
return;
}
Map<String, Boolean> presenceMap = new HashMap<String, Boolean>(theParamNameToPresence);
List<SearchParamPresent> entitiesToSave = new ArrayList<SearchParamPresent>();
List<SearchParamPresent> entitiesToDelete = new ArrayList<SearchParamPresent>();
Map<String, Boolean> presenceMap = new HashMap<>(theParamNameToPresence);
// Find existing entries
Collection<SearchParamPresent> existing;
existing = mySearchParamPresentDao.findAllForResource(theResource);
Map<Long, SearchParamPresent> existingHashToPresence = new HashMap<>();
for (SearchParamPresent nextExistingEntity : existing) {
String nextSearchParamName = nextExistingEntity.getSearchParam().getParamName();
Boolean existingValue = presenceMap.remove(nextSearchParamName);
if (existingValue == null) {
entitiesToDelete.add(nextExistingEntity);
} else if (existingValue.booleanValue() == nextExistingEntity.isPresent()) {
ourLog.trace("No change for search param {}", nextSearchParamName);
} else {
nextExistingEntity.setPresent(existingValue);
entitiesToSave.add(nextExistingEntity);
}
existingHashToPresence.put(nextExistingEntity.getHashPresence(), nextExistingEntity);
}
// Find newly wanted set of entries
Map<Long, SearchParamPresent> newHashToPresence = new HashMap<>();
for (Entry<String, Boolean> next : presenceMap.entrySet()) {
String resourceType = theResource.getResourceType();
String paramName = next.getKey();
Pair<String, String> key = Pair.of(resourceType, paramName);
SearchParam searchParam = myResourceTypeToSearchParamToEntity.get(key);
if (searchParam == null) {
searchParam = mySearchParamDao.findForResource(resourceType, paramName);
if (searchParam != null) {
myResourceTypeToSearchParamToEntity.put(key, searchParam);
} else {
searchParam = new SearchParam();
searchParam.setResourceName(resourceType);
searchParam.setParamName(paramName);
searchParam = mySearchParamDao.save(searchParam);
ourLog.info("Added search param {} with pid {}", paramName, searchParam.getId());
// Don't add the newly saved entity to the map in case the save fails
}
}
SearchParamPresent present = new SearchParamPresent();
present.setResource(theResource);
present.setSearchParam(searchParam);
present.setParamName(paramName);
present.setPresent(next.getValue());
entitiesToSave.add(present);
present.calculateHashes();
newHashToPresence.put(present.getHashPresence(), present);
}
mySearchParamPresentDao.deleteInBatch(entitiesToDelete);
mySearchParamPresentDao.saveAll(entitiesToSave);
// Delete any that should be deleted
List<SearchParamPresent> toDelete = new ArrayList<>();
for (Entry<Long, SearchParamPresent> nextEntry : existingHashToPresence.entrySet()) {
if (newHashToPresence.containsKey(nextEntry.getKey()) == false) {
toDelete.add(nextEntry.getValue());
}
}
mySearchParamPresentDao.deleteInBatch(toDelete);
}
// Add any that should be added
List<SearchParamPresent> toAdd = new ArrayList<>();
for (Entry<Long, SearchParamPresent> nextEntry : newHashToPresence.entrySet()) {
if (existingHashToPresence.containsKey(nextEntry.getKey()) == false) {
toAdd.add(nextEntry.getValue());
}
}
mySearchParamPresentDao.saveAll(toAdd);
@Override
public void flushCachesForUnitTest() {
myResourceTypeToSearchParamToEntity.clear();
}
}

View File

@ -40,15 +40,20 @@ import com.github.benmanes.caffeine.cache.Caffeine;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Stopwatch;
import com.google.common.collect.ArrayListMultimap;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.lucene.search.Query;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.*;
import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults;
import org.hibernate.search.jpa.FullTextEntityManager;
import org.hibernate.search.jpa.FullTextQuery;
import org.hibernate.search.query.dsl.BooleanJunction;
import org.hibernate.search.query.dsl.QueryBuilder;
import org.hibernate.search.query.dsl.TermMatchingContext;
import org.hibernate.search.query.dsl.TermTermination;
import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.CodeSystem;
@ -61,6 +66,8 @@ import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Slice;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionStatus;
@ -77,6 +84,7 @@ import javax.persistence.TypedQuery;
import javax.persistence.criteria.*;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.isBlank;
@ -131,10 +139,14 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
private int myFetchSize = DEFAULT_FETCH_SIZE;
private ApplicationContext myApplicationContext;
private void addCodeIfNotAlreadyAdded(String theCodeSystem, ValueSet.ValueSetExpansionComponent theExpansionComponent, Set<String> theAddedCodes, TermConcept theConcept) {
if (theAddedCodes.add(theConcept.getCode())) {
/**
* @param theAdd If true, add the code. If false, remove the code.
*/
private void addCodeIfNotAlreadyAdded(String theCodeSystem, ValueSet.ValueSetExpansionComponent theExpansionComponent, Set<String> theAddedCodes, TermConcept theConcept, boolean theAdd) {
String code = theConcept.getCode();
if (theAdd && theAddedCodes.add(code)) {
ValueSet.ValueSetExpansionContainsComponent contains = theExpansionComponent.addContains();
contains.setCode(theConcept.getCode());
contains.setCode(code);
contains.setSystem(theCodeSystem);
contains.setDisplay(theConcept.getDisplay());
for (TermConceptDesignation nextDesignation : theConcept.getDesignations()) {
@ -147,18 +159,24 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
.setDisplay(nextDesignation.getUseDisplay());
}
}
if (!theAdd && theAddedCodes.remove(code)) {
removeCodeFromExpansion(theCodeSystem, code, theExpansionComponent);
}
}
private void addConceptsToList(ValueSet.ValueSetExpansionComponent theExpansionComponent, Set<String> theAddedCodes, String theSystem, List<CodeSystem.ConceptDefinitionComponent> theConcept) {
private void addConceptsToList(ValueSet.ValueSetExpansionComponent theExpansionComponent, Set<String> theAddedCodes, String theSystem, List<CodeSystem.ConceptDefinitionComponent> theConcept, boolean theAdd) {
for (CodeSystem.ConceptDefinitionComponent next : theConcept) {
if (!theAddedCodes.contains(next.getCode())) {
theAddedCodes.add(next.getCode());
if (theAdd && theAddedCodes.add(next.getCode())) {
ValueSet.ValueSetExpansionContainsComponent contains = theExpansionComponent.addContains();
contains.setCode(next.getCode());
contains.setSystem(theSystem);
contains.setDisplay(next.getDisplay());
}
addConceptsToList(theExpansionComponent, theAddedCodes, theSystem, next.getConcept());
if (!theAdd && theAddedCodes.remove(next.getCode())) {
removeCodeFromExpansion(theSystem, next.getCode(), theExpansionComponent);
}
addConceptsToList(theExpansionComponent, theAddedCodes, theSystem, next.getConcept(), theAdd);
}
}
@ -250,21 +268,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
int i = 0;
for (TermCodeSystemVersion next : myCodeSystemVersionDao.findByCodeSystemResource(theCodeSystem.getPid())) {
myConceptParentChildLinkDao.deleteByCodeSystemVersion(next.getPid());
for (TermConcept nextConcept : myConceptDao.findByCodeSystemVersion(next.getPid())) {
myConceptPropertyDao.deleteAll(nextConcept.getProperties());
myConceptDesignationDao.deleteAll(nextConcept.getDesignations());
myConceptDao.delete(nextConcept);
}
if (next.getCodeSystem().getCurrentVersion() == next) {
next.getCodeSystem().setCurrentVersion(null);
myCodeSystemDao.save(next.getCodeSystem());
}
myCodeSystemVersionDao.delete(next);
if (i++ % 1000 == 0) {
myEntityManager.flush();
}
deleteCodeSystemVersion(next.getPid());
}
myCodeSystemVersionDao.deleteForCodeSystem(theCodeSystem);
myCodeSystemDao.delete(theCodeSystem);
@ -272,6 +276,119 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
myEntityManager.flush();
}
public void deleteCodeSystemVersion(final Long theCodeSystemVersionPid) {
ourLog.info(" * Deleting code system version {}", theCodeSystemVersionPid);
PageRequest page1000 = PageRequest.of(0, 1000);
// Parent/Child links
{
String descriptor = "parent/child links";
Supplier<Slice<TermConceptParentChildLink>> loader = () -> myConceptParentChildLinkDao.findByCodeSystemVersion(page1000, theCodeSystemVersionPid);
Supplier<Integer> counter = () -> myConceptParentChildLinkDao.countByCodeSystemVersion(theCodeSystemVersionPid);
doDelete(descriptor, loader, counter, myConceptParentChildLinkDao);
}
// Properties
{
String descriptor = "concept properties";
Supplier<Slice<TermConceptProperty>> loader = () -> myConceptPropertyDao.findByCodeSystemVersion(page1000, theCodeSystemVersionPid);
Supplier<Integer> counter = () -> myConceptPropertyDao.countByCodeSystemVersion(theCodeSystemVersionPid);
doDelete(descriptor, loader, counter, myConceptPropertyDao);
}
// Designations
{
String descriptor = "concept designations";
Supplier<Slice<TermConceptDesignation>> loader = () -> myConceptDesignationDao.findByCodeSystemVersion(page1000, theCodeSystemVersionPid);
Supplier<Integer> counter = () -> myConceptDesignationDao.countByCodeSystemVersion(theCodeSystemVersionPid);
doDelete(descriptor, loader, counter, myConceptDesignationDao);
}
// Concepts
{
String descriptor = "concepts";
// For some reason, concepts are much slower to delete, so use a smaller batch size
PageRequest page100 = PageRequest.of(0, 100);
Supplier<Slice<TermConcept>> loader = () -> myConceptDao.findByCodeSystemVersion(page100, theCodeSystemVersionPid);
Supplier<Integer> counter = () -> myConceptDao.countByCodeSystemVersion(theCodeSystemVersionPid);
doDelete(descriptor, loader, counter, myConceptDao);
}
Optional<TermCodeSystem> codeSystemOpt = myCodeSystemDao.findWithCodeSystemVersionAsCurrentVersion(theCodeSystemVersionPid);
if (codeSystemOpt.isPresent()) {
TermCodeSystem codeSystem = codeSystemOpt.get();
ourLog.info(" * Removing code system version {} as current version of code system {}", theCodeSystemVersionPid, codeSystem.getPid());
codeSystem.setCurrentVersion(null);
myCodeSystemDao.save(codeSystem);
}
ourLog.info(" * Deleting code system version");
myCodeSystemVersionDao.deleteById(theCodeSystemVersionPid);
}
public void deleteConceptMap(ResourceTable theResourceTable) {
// Get existing entity so it can be deleted.
Optional<TermConceptMap> optionalExistingTermConceptMapById = myConceptMapDao.findTermConceptMapByResourcePid(theResourceTable.getId());
if (optionalExistingTermConceptMapById.isPresent()) {
TermConceptMap existingTermConceptMap = optionalExistingTermConceptMapById.get();
ourLog.info("Deleting existing TermConceptMap {} and its children...", existingTermConceptMap.getId());
for (TermConceptMapGroup group : existingTermConceptMap.getConceptMapGroups()) {
for (TermConceptMapGroupElement element : group.getConceptMapGroupElements()) {
for (TermConceptMapGroupElementTarget target : element.getConceptMapGroupElementTargets()) {
myConceptMapGroupElementTargetDao.deleteTermConceptMapGroupElementTargetById(target.getId());
}
myConceptMapGroupElementDao.deleteTermConceptMapGroupElementById(element.getId());
}
myConceptMapGroupDao.deleteTermConceptMapGroupById(group.getId());
}
myConceptMapDao.deleteTermConceptMapById(existingTermConceptMap.getId());
ourLog.info("Done deleting existing TermConceptMap {} and its children.", existingTermConceptMap.getId());
ourLog.info("Flushing...");
myConceptMapGroupElementTargetDao.flush();
myConceptMapGroupElementDao.flush();
myConceptMapGroupDao.flush();
myConceptMapDao.flush();
ourLog.info("Done flushing.");
}
}
@Override
@Transactional
public void deleteConceptMapAndChildren(ResourceTable theResourceTable) {
deleteConceptMap(theResourceTable);
}
private <T> void doDelete(String theDescriptor, Supplier<Slice<T>> theLoader, Supplier<Integer> theCounter, JpaRepository<T, ?> theDao) {
int count;
ourLog.info(" * Deleting {}", theDescriptor);
int totalCount = theCounter.get();
StopWatch sw = new StopWatch();
count = 0;
while (true) {
Slice<T> link = theLoader.get();
if (link.hasContent() == false) {
break;
}
theDao.deleteInBatch(link);
count += link.getNumberOfElements();
ourLog.info(" * {} {} deleted - {}/sec - ETA: {}", count, theDescriptor, sw.formatThroughput(count, TimeUnit.SECONDS), sw.getEstimatedTimeRemaining(count, totalCount));
}
theDao.flush();
}
private int ensureParentsSaved(Collection<TermConceptParentChildLink> theParents) {
ourLog.trace("Checking {} parents", theParents.size());
int retVal = 0;
@ -281,6 +398,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
TermConcept nextParent = nextLink.getParent();
retVal += ensureParentsSaved(nextParent.getParents());
if (nextParent.getId() == null) {
nextParent.setUpdated(new Date());
myConceptDao.saveAndFlush(nextParent);
retVal++;
ourLog.debug("Saved parent code {} and got id {}", nextParent.getCode(), nextParent.getId());
@ -296,133 +414,17 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
public ValueSet expandValueSet(ValueSet theValueSetToExpand) {
ValueSet.ValueSetExpansionComponent expansionComponent = new ValueSet.ValueSetExpansionComponent();
Set<String> addedCodes = new HashSet<>();
boolean haveIncludeCriteria = false;
// Handle includes
for (ValueSet.ConceptSetComponent include : theValueSetToExpand.getCompose().getInclude()) {
String system = include.getSystem();
if (isNotBlank(system)) {
ourLog.info("Starting expansion around code system: {}", system);
boolean add = true;
expandValueSetHandleIncludeOrExclude(expansionComponent, addedCodes, include, add);
}
TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(system);
if (cs != null) {
TermCodeSystemVersion csv = cs.getCurrentVersion();
/*
* Include Concepts
*/
for (ValueSet.ConceptReferenceComponent next : include.getConcept()) {
String nextCode = next.getCode();
if (isNotBlank(nextCode) && !addedCodes.contains(nextCode)) {
haveIncludeCriteria = true;
TermConcept code = findCode(system, nextCode);
if (code != null) {
addCodeIfNotAlreadyAdded(system, expansionComponent, addedCodes, code);
}
}
}
/*
* Filters
*/
if (include.getFilter().size() > 0) {
haveIncludeCriteria = true;
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(TermConcept.class).get();
BooleanJunction<?> bool = qb.bool();
bool.must(qb.keyword().onField("myCodeSystemVersionPid").matching(csv.getPid()).createQuery());
for (ValueSet.ConceptSetFilterComponent nextFilter : include.getFilter()) {
if (isBlank(nextFilter.getValue()) && nextFilter.getOp() == null && isBlank(nextFilter.getProperty())) {
continue;
}
if (isBlank(nextFilter.getValue()) || nextFilter.getOp() == null || isBlank(nextFilter.getProperty())) {
throw new InvalidRequestException("Invalid filter, must have fields populated: property op value");
}
if (nextFilter.getProperty().equals("display:exact") && nextFilter.getOp() == ValueSet.FilterOperator.EQUAL) {
addDisplayFilterExact(qb, bool, nextFilter);
} else if ("display".equals(nextFilter.getProperty()) && nextFilter.getOp() == ValueSet.FilterOperator.EQUAL) {
if (nextFilter.getValue().trim().contains(" ")) {
addDisplayFilterExact(qb, bool, nextFilter);
} else {
addDisplayFilterInexact(qb, bool, nextFilter);
}
} else if ((nextFilter.getProperty().equals("concept") || nextFilter.getProperty().equals("code")) && nextFilter.getOp() == ValueSet.FilterOperator.ISA) {
TermConcept code = findCode(system, nextFilter.getValue());
if (code == null) {
throw new InvalidRequestException("Invalid filter criteria - code does not exist: {" + system + "}" + nextFilter.getValue());
}
ourLog.info(" * Filtering on codes with a parent of {}/{}/{}", code.getId(), code.getCode(), code.getDisplay());
bool.must(qb.keyword().onField("myParentPids").matching("" + code.getId()).createQuery());
} else {
bool.must(qb.phrase().onField("myProperties").sentence(nextFilter.getProperty() + "=" + nextFilter.getValue()).createQuery());
}
}
Query luceneQuery = bool.createQuery();
FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, TermConcept.class);
jpaQuery.setMaxResults(1000);
StopWatch sw = new StopWatch();
@SuppressWarnings("unchecked")
List<TermConcept> result = jpaQuery.getResultList();
ourLog.info("Expansion completed in {}ms", sw.getMillis());
for (TermConcept nextConcept : result) {
addCodeIfNotAlreadyAdded(system, expansionComponent, addedCodes, nextConcept);
}
expansionComponent.setTotal(jpaQuery.getResultSize());
}
if (!haveIncludeCriteria) {
List<TermConcept> allCodes = findCodes(system);
for (TermConcept nextConcept : allCodes) {
addCodeIfNotAlreadyAdded(system, expansionComponent, addedCodes, nextConcept);
}
}
} else {
// No codesystem matching the URL found in the database
CodeSystem codeSystemFromContext = getCodeSystemFromContext(system);
if (codeSystemFromContext == null) {
throw new InvalidRequestException("Unknown code system: " + system);
}
if (include.getConcept().isEmpty() == false) {
for (ValueSet.ConceptReferenceComponent next : include.getConcept()) {
String nextCode = next.getCode();
if (isNotBlank(nextCode) && !addedCodes.contains(nextCode)) {
CodeSystem.ConceptDefinitionComponent code = findCode(codeSystemFromContext.getConcept(), nextCode);
if (code != null) {
addedCodes.add(nextCode);
ValueSet.ValueSetExpansionContainsComponent contains = expansionComponent.addContains();
contains.setCode(nextCode);
contains.setSystem(system);
contains.setDisplay(code.getDisplay());
}
}
}
} else {
List<CodeSystem.ConceptDefinitionComponent> concept = codeSystemFromContext.getConcept();
addConceptsToList(expansionComponent, addedCodes, system, concept);
}
}
}
// Handle excludes
for (ValueSet.ConceptSetComponent include : theValueSetToExpand.getCompose().getExclude()) {
boolean add = false;
expandValueSetHandleIncludeOrExclude(expansionComponent, addedCodes, include, add);
}
ValueSet valueSet = new ValueSet();
@ -443,6 +445,173 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
return retVal;
}
public void expandValueSetHandleIncludeOrExclude(ValueSet.ValueSetExpansionComponent theExpansionComponent, Set<String> theAddedCodes, ValueSet.ConceptSetComponent include, boolean theAdd) {
String system = include.getSystem();
if (isNotBlank(system)) {
ourLog.info("Starting expansion around code system: {}", system);
TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(system);
if (cs != null) {
TermCodeSystemVersion csv = cs.getCurrentVersion();
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(TermConcept.class).get();
BooleanJunction<?> bool = qb.bool();
bool.must(qb.keyword().onField("myCodeSystemVersionPid").matching(csv.getPid()).createQuery());
/*
* Filters
*/
if (include.getFilter().size() > 0) {
for (ValueSet.ConceptSetFilterComponent nextFilter : include.getFilter()) {
if (isBlank(nextFilter.getValue()) && nextFilter.getOp() == null && isBlank(nextFilter.getProperty())) {
continue;
}
if (isBlank(nextFilter.getValue()) || nextFilter.getOp() == null || isBlank(nextFilter.getProperty())) {
throw new InvalidRequestException("Invalid filter, must have fields populated: property op value");
}
if (nextFilter.getProperty().equals("display:exact") && nextFilter.getOp() == ValueSet.FilterOperator.EQUAL) {
addDisplayFilterExact(qb, bool, nextFilter);
} else if ("display".equals(nextFilter.getProperty()) && nextFilter.getOp() == ValueSet.FilterOperator.EQUAL) {
if (nextFilter.getValue().trim().contains(" ")) {
addDisplayFilterExact(qb, bool, nextFilter);
} else {
addDisplayFilterInexact(qb, bool, nextFilter);
}
} else if (nextFilter.getProperty().equals("concept") || nextFilter.getProperty().equals("code")) {
TermConcept code = findCode(system, nextFilter.getValue());
if (code == null) {
throw new InvalidRequestException("Invalid filter criteria - code does not exist: {" + system + "}" + nextFilter.getValue());
}
if (nextFilter.getOp() == ValueSet.FilterOperator.ISA) {
ourLog.info(" * Filtering on codes with a parent of {}/{}/{}", code.getId(), code.getCode(), code.getDisplay());
bool.must(qb.keyword().onField("myParentPids").matching("" + code.getId()).createQuery());
} else {
throw new InvalidRequestException("Don't know how to handle op=" + nextFilter.getOp() + " on property " + nextFilter.getProperty());
}
} else {
if (nextFilter.getOp() == ValueSet.FilterOperator.REGEX) {
/*
* We treat the regex filter as a match on the regex
* anywhere in the property string. The spec does not
* say whether or not this is the right behaviour, but
* there are examples that seem to suggest that it is.
*/
String value = nextFilter.getValue();
if (value.endsWith("$")) {
value = value.substring(0, value.length() - 1);
} else if (value.endsWith(".*") == false) {
value = value + ".*";
}
if (value.startsWith("^") == false && value.startsWith(".*") == false) {
value = ".*" + value;
} else if (value.startsWith("^")) {
value = value.substring(1);
}
Term term = new Term(TermConceptPropertyFieldBridge.CONCEPT_FIELD_PROPERTY_PREFIX + nextFilter.getProperty(), value);
RegexpQuery query = new RegexpQuery(term);
bool.must(query);
} else {
String value = nextFilter.getValue();
Term term = new Term(TermConceptPropertyFieldBridge.CONCEPT_FIELD_PROPERTY_PREFIX + nextFilter.getProperty(), value);
bool.must(new TermsQuery(term));
}
}
}
}
Query luceneQuery = bool.createQuery();
/*
* Include Concepts
*/
List<Term> codes = include
.getConcept()
.stream()
.filter(Objects::nonNull)
.map(ValueSet.ConceptReferenceComponent::getCode)
.filter(StringUtils::isNotBlank)
.map(t->new Term("myCode", t))
.collect(Collectors.toList());
if (codes.size() > 0) {
MultiPhraseQuery query = new MultiPhraseQuery();
query.add(codes.toArray(new Term[0]));
luceneQuery = new BooleanQuery.Builder()
.add(luceneQuery, BooleanClause.Occur.MUST)
.add(query, BooleanClause.Occur.MUST)
.build();
}
/*
* Execute the query
*/
FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, TermConcept.class);
jpaQuery.setMaxResults(1000);
StopWatch sw = new StopWatch();
@SuppressWarnings("unchecked")
List<TermConcept> result = jpaQuery.getResultList();
ourLog.info("Expansion completed in {}ms", sw.getMillis());
for (TermConcept nextConcept : result) {
addCodeIfNotAlreadyAdded(system, theExpansionComponent, theAddedCodes, nextConcept, theAdd);
}
} else {
// No codesystem matching the URL found in the database
CodeSystem codeSystemFromContext = getCodeSystemFromContext(system);
if (codeSystemFromContext == null) {
throw new InvalidRequestException("Unknown code system: " + system);
}
if (include.getConcept().isEmpty() == false) {
for (ValueSet.ConceptReferenceComponent next : include.getConcept()) {
String nextCode = next.getCode();
if (isNotBlank(nextCode) && !theAddedCodes.contains(nextCode)) {
CodeSystem.ConceptDefinitionComponent code = findCode(codeSystemFromContext.getConcept(), nextCode);
if (code != null) {
if (theAdd && theAddedCodes.add(nextCode)) {
ValueSet.ValueSetExpansionContainsComponent contains = theExpansionComponent.addContains();
contains.setCode(nextCode);
contains.setSystem(system);
contains.setDisplay(code.getDisplay());
}
if (!theAdd && theAddedCodes.remove(nextCode)) {
removeCodeFromExpansion(system, nextCode, theExpansionComponent);
}
}
}
}
} else {
List<CodeSystem.ConceptDefinitionComponent> concept = codeSystemFromContext.getConcept();
addConceptsToList(theExpansionComponent, theAddedCodes, system, concept, theAdd);
}
}
}
}
private void fetchChildren(TermConcept theConcept, Set<TermConcept> theSetToPopulate) {
for (TermConceptParentChildLink nextChildLink : theConcept.getChildren()) {
TermConcept nextChild = nextChildLink.getChild();
@ -731,9 +900,11 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
for (TermConcept nextConcept : concepts) {
StringBuilder parentsBuilder = new StringBuilder();
createParentsString(parentsBuilder, nextConcept.getId());
nextConcept.setParentPids(parentsBuilder.toString());
if (isBlank(nextConcept.getParentPidsAsString())) {
StringBuilder parentsBuilder = new StringBuilder();
createParentsString(parentsBuilder, nextConcept.getId());
nextConcept.setParentPids(parentsBuilder.toString());
}
saveConcept(nextConcept);
count++;
@ -745,6 +916,14 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
}
private void removeCodeFromExpansion(String theCodeSystem, String theCode, ValueSet.ValueSetExpansionComponent theExpansionComponent) {
theExpansionComponent
.getContains()
.removeIf(t ->
theCodeSystem.equals(t.getSystem()) &&
theCode.equals(t.getCode()));
}
private int saveConcept(TermConcept theConcept) {
int retVal = 0;
@ -759,6 +938,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
if (theConcept.getId() == null || theConcept.getIndexStatus() == null) {
retVal++;
theConcept.setIndexStatus(BaseHapiFhirDao.INDEX_STATUS_INDEXED);
theConcept.setUpdated(new Date());
myConceptDao.save(theConcept);
for (TermConceptProperty next : theConcept.getProperties()) {
@ -788,15 +968,16 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
return;
} else if (myDeferredConcepts.isEmpty() && myConceptLinksToSaveLater.isEmpty()) {
processReindexing();
return;
}
TransactionTemplate tt = new TransactionTemplate(myTransactionMgr);
tt.setPropagationBehavior(TransactionTemplate.PROPAGATION_REQUIRES_NEW);
tt.execute(t -> {
processDeferredConcepts();
return null;
});
if (!myDeferredConcepts.isEmpty() || !myConceptLinksToSaveLater.isEmpty()) {
tt.execute(t -> {
processDeferredConcepts();
return null;
});
}
if (myDeferredValueSets.size() > 0) {
tt.execute(t -> {
@ -847,20 +1028,12 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
ourLog.info("Deleting old code system versions");
for (TermCodeSystemVersion next : existing) {
ourLog.info(" * Deleting code system version {}", next.getPid());
myConceptParentChildLinkDao.deleteByCodeSystemVersion(next.getPid());
for (TermConcept nextConcept : myConceptDao.findByCodeSystemVersion(next.getPid())) {
myConceptPropertyDao.deleteAll(nextConcept.getProperties());
myConceptDao.delete(nextConcept);
}
Long codeSystemVersionPid = next.getPid();
deleteCodeSystemVersion(codeSystemVersionPid);
}
ourLog.info("Flushing...");
myConceptParentChildLinkDao.flush();
myConceptPropertyDao.flush();
myConceptDao.flush();
ourLog.info("Done flushing");
/*
@ -905,7 +1078,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
codeSystem.setCurrentVersion(theCodeSystemVersion);
codeSystem = myCodeSystemDao.saveAndFlush(codeSystem);
ourLog.info("Setting codesystemversion on {} concepts...", totalCodeCount);
ourLog.info("Setting CodeSystemVersion[{}] on {} concepts...", codeSystem.getPid(), totalCodeCount);
for (TermConcept next : theCodeSystemVersion.getConcepts()) {
populateVersion(next, codeSystemVersion);
@ -963,42 +1136,10 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
termConceptMap.setResource(theResourceTable);
termConceptMap.setUrl(theConceptMap.getUrl());
// Get existing entity so it can be deleted.
Optional<TermConceptMap> optionalExistingTermConceptMapById = myConceptMapDao.findTermConceptMapByResourcePid(theResourceTable.getId());
/*
* For now we always delete old versions. At some point, it would be nice to allow configuration to keep old versions.
*/
if (optionalExistingTermConceptMapById.isPresent()) {
TermConceptMap existingTermConceptMap = optionalExistingTermConceptMapById.get();
ourLog.info("Deleting existing TermConceptMap {} and its children...", existingTermConceptMap.getId());
for (TermConceptMapGroup group : existingTermConceptMap.getConceptMapGroups()) {
for (TermConceptMapGroupElement element : group.getConceptMapGroupElements()) {
for (TermConceptMapGroupElementTarget target : element.getConceptMapGroupElementTargets()) {
myConceptMapGroupElementTargetDao.deleteTermConceptMapGroupElementTargetById(target.getId());
}
myConceptMapGroupElementDao.deleteTermConceptMapGroupElementById(element.getId());
}
myConceptMapGroupDao.deleteTermConceptMapGroupById(group.getId());
}
myConceptMapDao.deleteTermConceptMapById(existingTermConceptMap.getId());
ourLog.info("Done deleting existing TermConceptMap {} and its children.", existingTermConceptMap.getId());
ourLog.info("Flushing...");
myConceptMapGroupElementTargetDao.flush();
myConceptMapGroupElementDao.flush();
myConceptMapGroupDao.flush();
myConceptMapDao.flush();
ourLog.info("Done flushing.");
}
deleteConceptMap(theResourceTable);
/*
* Do the upload.
@ -1019,6 +1160,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
throw new InternalErrorException(fe);
}
myConceptMapDao.save(termConceptMap);
int codesSaved = 0;
if (theConceptMap.hasGroup()) {
TermConceptMapGroup termConceptMapGroup;
@ -1054,7 +1196,12 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
termConceptMapGroupElementTarget.setCode(target.getCode());
termConceptMapGroupElementTarget.setDisplay(target.getDisplay());
termConceptMapGroupElementTarget.setEquivalence(target.getEquivalence());
myConceptMapGroupElementTargetDao.saveAndFlush(termConceptMapGroupElementTarget);
myConceptMapGroupElementTargetDao.save(termConceptMapGroupElementTarget);
if (codesSaved++ % 250 == 0) {
ourLog.info("Have saved {} codes in conceptmap", codesSaved);
myConceptMapGroupElementTargetDao.flush();
}
}
}
}

View File

@ -68,6 +68,8 @@ public interface IHapiTerminologySvc {
*/
IIdType storeNewCodeSystemVersion(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails, List<org.hl7.fhir.r4.model.ValueSet> theValueSets, List<org.hl7.fhir.r4.model.ConceptMap> theConceptMaps);
void deleteConceptMapAndChildren(ResourceTable theResourceTable);
void storeTermConceptMapAndChildren(ResourceTable theResourceTable, ConceptMap theConceptMap);
boolean supportsSystem(String theCodeSystem);

View File

@ -11,6 +11,7 @@ import ca.uhn.fhir.jpa.term.snomedct.SctHandlerRelationship;
import ca.uhn.fhir.jpa.util.Counter;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
@ -62,23 +63,27 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
public static final String SCT_FILE_CONCEPT = "Terminology/sct2_Concept_Full_";
public static final String SCT_FILE_DESCRIPTION = "Terminology/sct2_Description_Full-en";
public static final String SCT_FILE_RELATIONSHIP = "Terminology/sct2_Relationship_Full";
public static final String LOINC_ANSWERLIST_FILE = "AnswerList_Beta_1.csv";
public static final String LOINC_ANSWERLIST_LINK_FILE = "LoincAnswerListLink_Beta_1.csv";
public static final String LOINC_ANSWERLIST_FILE = "AnswerList.csv";
public static final String LOINC_ANSWERLIST_LINK_FILE = "LoincAnswerListLink.csv";
public static final String LOINC_DOCUMENT_ONTOLOGY_FILE = "DocumentOntology.csv";
public static final String LOINC_UPLOAD_PROPERTIES_FILE = "loincupload.properties";
public static final String LOINC_FILE = "loinc.csv";
public static final String LOINC_HIERARCHY_FILE = "MULTI-AXIAL_HIERARCHY.CSV";
public static final String LOINC_PART_FILE = "Part_Beta_1.csv";
public static final String LOINC_PART_LINK_FILE = "LoincPartLink_Beta_1.csv";
public static final String LOINC_PART_RELATED_CODE_MAPPING_FILE = "PartRelatedCodeMapping_Beta_1.csv";
public static final String LOINC_FILE = "Loinc.csv";
public static final String LOINC_HIERARCHY_FILE = "MultiAxialHierarchy.csv";
public static final String LOINC_PART_FILE = "Part.csv";
public static final String LOINC_PART_LINK_FILE = "LoincPartLink.csv";
public static final String LOINC_PART_RELATED_CODE_MAPPING_FILE = "PartRelatedCodeMapping.csv";
public static final String LOINC_RSNA_PLAYBOOK_FILE = "LoincRsnaRadiologyPlaybook.csv";
public static final String LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE = "Top2000CommonLabResultsUS.csv";
public static final String LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE = "Top2000CommonLabResultsSI.csv";
public static final String LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE = "Top2000CommonLabResultsUs.csv";
public static final String LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE = "Top2000CommonLabResultsSi.csv";
public static final String LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE = "LoincUniversalLabOrdersValueSet.csv";
public static final String LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV = "LoincIeeeMedicalDeviceCodeMappingTable.csv";
public static final String LOINC_IMAGING_DOCUMENT_CODES_FILE = "ImagingDocumentCodes.csv";
private static final int LOG_INCREMENT = 100000;
public static final String LOINC_GROUP_FILE = "Group.csv";
public static final String LOINC_GROUP_TERMS_FILE = "GroupLoincTerms.csv";
public static final String LOINC_PARENT_GROUP_FILE = "ParentGroup.csv";
private static final int LOG_INCREMENT = 1000;
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcImpl.class);
@Autowired
private IHapiTerminologySvc myTermSvc;
@Autowired(required = false)
@ -119,12 +124,20 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
}
private void iterateOverZipFile(LoadedFileDescriptors theDescriptors, String theFileNamePart, IRecordHandler theHandler, char theDelimiter, QuoteMode theQuoteMode) {
private void iterateOverZipFile(LoadedFileDescriptors theDescriptors, String theFileNamePart, IRecordHandler theHandler, char theDelimiter, QuoteMode theQuoteMode, boolean theIsPartialFilename) {
boolean foundMatch = false;
for (FileDescriptor nextZipBytes : theDescriptors.getUncompressedFileDescriptors()) {
String nextFilename = nextZipBytes.getFilename();
if (nextFilename.contains(theFileNamePart)) {
boolean matches;
if (theIsPartialFilename) {
matches = nextFilename.contains(theFileNamePart);
} else {
matches = nextFilename.endsWith("/" + theFileNamePart) || nextFilename.equals(theFileNamePart);
}
if (matches) {
ourLog.info("Processing file {}", nextFilename);
foundMatch = true;
Reader reader;
CSVParser parsed;
@ -149,6 +162,9 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
int nextLoggedCount = 0;
while (iter.hasNext()) {
CSVRecord nextRecord = iter.next();
if (nextRecord.isConsistent()==false) {
continue;
}
theHandler.accept(nextRecord);
count++;
if (count >= nextLoggedCount) {
@ -164,6 +180,10 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
}
if (!foundMatch) {
throw new InvalidRequestException("Did not find file matching " + theFileNamePart);
}
}
@Override
@ -171,10 +191,7 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
LoadedFileDescriptors descriptors = new LoadedFileDescriptors(theFiles);
List<String> mandatoryFilenameFragments = Arrays.asList(
LOINC_FILE,
LOINC_HIERARCHY_FILE);
descriptors.verifyMandatoryFilesExist(mandatoryFilenameFragments);
List<String> optionalFilenameFragments = Arrays.asList(
LOINC_HIERARCHY_FILE,
LOINC_UPLOAD_PROPERTIES_FILE,
LOINC_ANSWERLIST_FILE,
LOINC_ANSWERLIST_LINK_FILE,
@ -189,6 +206,10 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV,
LOINC_IMAGING_DOCUMENT_CODES_FILE
);
descriptors.verifyMandatoryFilesExist(mandatoryFilenameFragments);
List<String> optionalFilenameFragments = Arrays.asList(
);
descriptors.verifyOptionalFilesExist(optionalFilenameFragments);
ourLog.info("Beginning LOINC processing");
@ -251,60 +272,75 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
// Part file
handler = new LoincPartHandler(codeSystemVersion, code2concept);
iterateOverZipFile(theDescriptors, LOINC_PART_FILE, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_PART_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
Map<PartTypeAndPartName, String> partTypeAndPartNameToPartNumber = ((LoincPartHandler) handler).getPartTypeAndPartNameToPartNumber();
// Loinc Codes
handler = new LoincHandler(codeSystemVersion, code2concept, propertyNamesToTypes, partTypeAndPartNameToPartNumber);
iterateOverZipFile(theDescriptors, LOINC_FILE, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Loinc Hierarchy
handler = new LoincHierarchyHandler(codeSystemVersion, code2concept);
iterateOverZipFile(theDescriptors, LOINC_HIERARCHY_FILE, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_HIERARCHY_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Answer lists (ValueSets of potential answers/values for loinc "questions")
handler = new LoincAnswerListHandler(codeSystemVersion, code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_ANSWERLIST_FILE, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_ANSWERLIST_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Answer list links (connects loinc observation codes to answerlist codes)
handler = new LoincAnswerListLinkHandler(code2concept, valueSets);
iterateOverZipFile(theDescriptors, LOINC_ANSWERLIST_LINK_FILE, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_ANSWERLIST_LINK_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// RSNA Playbook file
// Note that this should come before the "Part Related Code Mapping"
// file because there are some duplicate mappings between these
// two files, and the RSNA Playbook file has more metadata
handler = new LoincRsnaPlaybookHandler(code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_RSNA_PLAYBOOK_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Part link file
handler = new LoincPartLinkHandler(codeSystemVersion, code2concept);
iterateOverZipFile(theDescriptors, LOINC_PART_LINK_FILE, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_PART_LINK_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Part related code mapping
handler = new LoincPartRelatedCodeMappingHandler(codeSystemVersion, code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_PART_RELATED_CODE_MAPPING_FILE, handler, ',', QuoteMode.NON_NUMERIC);
handler = new LoincPartRelatedCodeMappingHandler(code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_PART_RELATED_CODE_MAPPING_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Document Ontology File
handler = new LoincDocumentOntologyHandler(code2concept, propertyNamesToTypes, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_DOCUMENT_ONTOLOGY_FILE, handler, ',', QuoteMode.NON_NUMERIC);
// RSNA Playbook file
handler = new LoincRsnaPlaybookHandler(code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_RSNA_PLAYBOOK_FILE, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_DOCUMENT_ONTOLOGY_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Top 2000 Codes - US
handler = new LoincTop2000LabResultsUsHandler(code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Top 2000 Codes - SI
handler = new LoincTop2000LabResultsSiHandler(code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Universal Lab Order ValueSet
handler = new LoincUniversalOrderSetHandler(code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// IEEE Medical Device Codes
handler = new LoincIeeeMedicalDeviceCodeHandler(code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV, handler, ',', QuoteMode.NON_NUMERIC, false);
// Imaging Document Codes
handler = new LoincImagingDocumentCodeHandler(code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_IMAGING_DOCUMENT_CODES_FILE, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_IMAGING_DOCUMENT_CODES_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Group File
handler = new LoincGroupFileHandler(code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_GROUP_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Group Terms File
handler = new LoincGroupTermsFileHandler(code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_GROUP_TERMS_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Parent Group File
handler = new LoincParentGroupFileHandler(code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_PARENT_GROUP_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
IOUtils.closeQuietly(theDescriptors);
@ -332,18 +368,18 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
final Set<String> validConceptIds = new HashSet<>();
IRecordHandler handler = new SctHandlerConcept(validConceptIds);
iterateOverZipFile(theDescriptors, SCT_FILE_CONCEPT, handler, '\t', null);
iterateOverZipFile(theDescriptors, SCT_FILE_CONCEPT, handler, '\t', null, true);
ourLog.info("Have {} valid concept IDs", validConceptIds.size());
handler = new SctHandlerDescription(validConceptIds, code2concept, id2concept, codeSystemVersion);
iterateOverZipFile(theDescriptors, SCT_FILE_DESCRIPTION, handler, '\t', null);
iterateOverZipFile(theDescriptors, SCT_FILE_DESCRIPTION, handler, '\t', null, true);
ourLog.info("Got {} concepts, cloning map", code2concept.size());
final HashMap<String, TermConcept> rootConcepts = new HashMap<>(code2concept);
handler = new SctHandlerRelationship(codeSystemVersion, rootConcepts, code2concept);
iterateOverZipFile(theDescriptors, SCT_FILE_RELATIONSHIP, handler, '\t', null);
iterateOverZipFile(theDescriptors, SCT_FILE_RELATIONSHIP, handler, '\t', null, true);
IOUtils.closeQuietly(theDescriptors);

View File

@ -26,6 +26,8 @@ import org.hl7.fhir.r4.model.ConceptMap;
import org.hl7.fhir.r4.model.ContactPoint;
import org.hl7.fhir.r4.model.Enumerations;
import org.hl7.fhir.r4.model.ValueSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.List;
@ -35,8 +37,9 @@ import java.util.Properties;
import static org.apache.commons.lang3.StringUtils.*;
public abstract class BaseLoincHandler implements IRecordHandler {
private static final Logger ourLog = LoggerFactory.getLogger(BaseLoincHandler.class);
public static final String LOINC_COPYRIGHT_STATEMENT = "This content from LOINC® is copyright © 1995 Regenstrief Institute, Inc. and the LOINC Committee, and available at no cost under the license at https://loinc.org/license/";
/**
* This is <b>NOT</b> the LOINC CodeSystem URI! It is just
* the website URL to LOINC.
@ -52,8 +55,10 @@ public abstract class BaseLoincHandler implements IRecordHandler {
BaseLoincHandler(Map<String, TermConcept> theCode2Concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {
myValueSets = theValueSets;
myValueSets.forEach(t -> myIdToValueSet.put(t.getId(), t));
myCode2Concept = theCode2Concept;
myConceptMaps = theConceptMaps;
myConceptMaps.forEach(t -> myIdToConceptMaps.put(t.getId(), t));
myUploadProperties = theUploadProperties;
}
@ -80,10 +85,9 @@ public abstract class BaseLoincHandler implements IRecordHandler {
String displayName = theDisplayName;
if (isBlank(displayName)) {
for (TermConcept next : myCode2Concept.values()) {
if (next.getCode().equals(theCode)) {
displayName = next.getDisplay();
}
TermConcept concept = myCode2Concept.get(theCode);
if (concept != null) {
displayName = concept.getDisplay();
}
}
@ -176,6 +180,8 @@ public abstract class BaseLoincHandler implements IRecordHandler {
.setCode(theMapping.getTargetCode())
.setDisplay(theMapping.getTargetDisplay())
.setEquivalence(theMapping.getEquivalence());
} else {
ourLog.info("Not going to add a mapping from [{}/{}] to [{}/{}] because one already exists", theMapping.getSourceCodeSystem(), theMapping.getSourceCode(), theMapping.getTargetCodeSystem(), theMapping.getTargetCode());
}
}
@ -192,7 +198,6 @@ public abstract class BaseLoincHandler implements IRecordHandler {
vs.setUrl(theValueSetUri);
vs.setId(theValueSetId);
vs.setVersion(version);
vs.setName(theValueSetName);
vs.setStatus(Enumerations.PublicationStatus.ACTIVE);
vs.setPublisher(REGENSTRIEF_INSTITUTE_INC);
vs.addContact()
@ -206,6 +211,11 @@ public abstract class BaseLoincHandler implements IRecordHandler {
} else {
vs = myIdToValueSet.get(theValueSetId);
}
if (isBlank(vs.getName()) && isNotBlank(theValueSetName)) {
vs.setName(theValueSetName);
}
return vs;
}

View File

@ -0,0 +1,62 @@
package ca.uhn.fhir.jpa.term.loinc;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.term.IRecordHandler;
import org.apache.commons.csv.CSVRecord;
import org.hl7.fhir.r4.model.ConceptMap;
import org.hl7.fhir.r4.model.ValueSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import static org.apache.commons.lang3.StringUtils.trim;
public class LoincGroupFileHandler extends BaseLoincHandler implements IRecordHandler {
public static final String VS_URI_PREFIX = "http://loinc.org/vs/";
public LoincGroupFileHandler(Map<String, TermConcept> theCode2concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {
super(theCode2concept, theValueSets, theConceptMaps, theUploadProperties);
}
@Override
public void accept(CSVRecord theRecord) {
//"ParentGroupId","GroupId","Group","Archetype","Status","VersionFirstReleased"
String parentGroupId = trim(theRecord.get("ParentGroupId"));
String groupId = trim(theRecord.get("GroupId"));
String groupName = trim(theRecord.get("Group"));
ValueSet parentValueSet = getValueSet(parentGroupId, VS_URI_PREFIX + parentGroupId, null, null);
parentValueSet
.getCompose()
.getIncludeFirstRep()
.addValueSet(VS_URI_PREFIX + groupId);
// Create group to set its name (terms are added in a different
// handler)
getValueSet(groupId, VS_URI_PREFIX + groupId, groupName, null);
}
}

View File

@ -0,0 +1,53 @@
package ca.uhn.fhir.jpa.term.loinc;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
import ca.uhn.fhir.jpa.term.IRecordHandler;
import org.apache.commons.csv.CSVRecord;
import org.hl7.fhir.r4.model.ConceptMap;
import org.hl7.fhir.r4.model.ValueSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import static org.apache.commons.lang3.StringUtils.trim;
public class LoincGroupTermsFileHandler extends BaseLoincHandler implements IRecordHandler {
public LoincGroupTermsFileHandler(Map<String, TermConcept> theCode2concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {
super(theCode2concept, theValueSets, theConceptMaps, theUploadProperties);
}
@Override
public void accept(CSVRecord theRecord) {
//"Category","GroupId","Archetype","LoincNumber","LongCommonName"
String groupId = trim(theRecord.get("GroupId"));
String loincNumber = trim(theRecord.get("LoincNumber"));
ValueSet valueSet = getValueSet(groupId, LoincGroupFileHandler.VS_URI_PREFIX + groupId, null, null);
addCodeAsIncludeToValueSet(valueSet, IHapiTerminologyLoaderSvc.LOINC_URI, loincNumber, null);
}
}

View File

@ -39,6 +39,7 @@ import static org.apache.commons.lang3.StringUtils.trim;
public class LoincHandler implements IRecordHandler {
private static final Logger ourLog = LoggerFactory.getLogger(LoincHandler.class);
private final Map<String, TermConcept> myCode2Concept;
private final TermCodeSystemVersion myCodeSystemVersion;
private final Map<String, CodeSystem.PropertyType> myPropertyNames;
@ -86,7 +87,17 @@ public class LoincHandler implements IRecordHandler {
concept.addPropertyString(nextPropertyName, nextPropertyValue);
break;
case CODING:
PartTypeAndPartName key = new PartTypeAndPartName(nextPropertyName, nextPropertyValue);
// FIXME: handle "Ser/Plas^Donor"
String propertyValue = nextPropertyValue;
if (nextPropertyName.equals("COMPONENT")) {
if (propertyValue.contains("^")) {
propertyValue = propertyValue.substring(0, propertyValue.indexOf("^"));
} else if (propertyValue.contains("/")) {
propertyValue = propertyValue.substring(0, propertyValue.indexOf("/"));
}
}
PartTypeAndPartName key = new PartTypeAndPartName(nextPropertyName, propertyValue);
String partNumber = myPartTypeAndPartNameToPartNumber.get(key);
if (partNumber == null && nextPropertyName.equals("TIME_ASPCT")) {
@ -106,11 +117,12 @@ public class LoincHandler implements IRecordHandler {
continue;
}
// Validate.notBlank(partNumber, "Unknown part: " + key);
if (isNotBlank(partNumber)) {
concept.addPropertyCoding(nextPropertyName, IHapiTerminologyLoaderSvc.LOINC_URI, partNumber, nextPropertyValue);
} else {
ourLog.warn("Unable to find part code with TYPE[{}] and NAME[{}]", key.getPartType(), key.getPartName());
String msg = "Unable to find part code with TYPE[" + key.getPartType() + "] and NAME[" + nextPropertyValue + "] (using name " + propertyValue + ")";
ourLog.warn(msg);
// throw new InternalErrorException(msg);
}
break;
case DECIMAL:
@ -129,5 +141,4 @@ public class LoincHandler implements IRecordHandler {
myCode2Concept.put(code, concept);
}
}
private static final Logger ourLog = LoggerFactory.getLogger(LoincHandler.class);
}

View File

@ -37,7 +37,7 @@ import static org.apache.commons.lang3.StringUtils.trim;
public class LoincIeeeMedicalDeviceCodeHandler extends BaseLoincHandler implements IRecordHandler {
public static final String LOINC_IEEE_CM_ID = "LOINC-IEEE-MEDICAL-DEVICE-CM";
public static final String LOINC_IEEE_CM_URI = "http://loinc.org/fhir/loinc-ieee-device-code-mappings";
public static final String LOINC_IEEE_CM_URI = "http://loinc.org/cm/loinc-to-ieee-device-codes";
public static final String LOINC_IEEE_CM_NAME = "LOINC/IEEE Device Code Mappings";
private static final String CM_COPYRIGHT = "This content from LOINC® is copyright © 1995 Regenstrief Institute, Inc. and the LOINC Committee, and available at no cost under the license at https://loinc.org/license/. The LOINC/IEEE Medical Device Code Mapping Table contains content from IEEE (http://ieee.org), copyright © 2017 IEEE.";

View File

@ -36,7 +36,7 @@ import static org.apache.commons.lang3.StringUtils.trim;
public class LoincImagingDocumentCodeHandler extends BaseLoincHandler implements IRecordHandler {
public static final String VS_ID = "loinc-imaging-document-codes";
public static final String VS_URI = "http://loinc.org/fhir/loinc-imaging-document-codes";
public static final String VS_URI = "http://loinc.org/vs/loinc-imaging-document-codes";
public static final String VS_NAME = "LOINC Imaging Document Codes";
public LoincImagingDocumentCodeHandler(Map<String, TermConcept> theCode2concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {

View File

@ -0,0 +1,51 @@
package ca.uhn.fhir.jpa.term.loinc;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.term.IRecordHandler;
import org.apache.commons.csv.CSVRecord;
import org.hl7.fhir.r4.model.ConceptMap;
import org.hl7.fhir.r4.model.ValueSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import static org.apache.commons.lang3.StringUtils.trim;
public class LoincParentGroupFileHandler extends BaseLoincHandler implements IRecordHandler {
public LoincParentGroupFileHandler(Map<String, TermConcept> theCode2concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {
super(theCode2concept, theValueSets, theConceptMaps, theUploadProperties);
}
@Override
public void accept(CSVRecord theRecord) {
// "ParentGroupId","ParentGroup","Status"
String parentGroupId = trim(theRecord.get("ParentGroupId"));
String parentGroupName = trim(theRecord.get("ParentGroup"));
getValueSet(parentGroupId, LoincGroupFileHandler.VS_URI_PREFIX + parentGroupId, parentGroupName, null);
}
}

View File

@ -20,7 +20,6 @@ package ca.uhn.fhir.jpa.term.loinc;
* #L%
*/
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
import ca.uhn.fhir.jpa.term.IRecordHandler;
@ -41,23 +40,23 @@ public class LoincPartRelatedCodeMappingHandler extends BaseLoincHandler impleme
public static final String LOINC_SCT_PART_MAP_ID = "loinc-parts-to-snomed-ct";
public static final String LOINC_SCT_PART_MAP_URI = "http://loinc.org/cm/loinc-parts-to-snomed-ct";
public static final String LOINC_SCT_PART_MAP_NAME = "LOINC Part Map to SNOMED CT";
public static final String LOINC_RXNORM_PART_MAP_ID = "loinc-parts-to-rxnorm";
public static final String LOINC_RXNORM_PART_MAP_URI = "http://loinc.org/cm/loinc-parts-to-rxnorm";
public static final String LOINC_RXNORM_PART_MAP_NAME = "LOINC Part Map to RxNORM";
public static final String LOINC_RADLEX_PART_MAP_ID = "loinc-parts-to-radlex";
public static final String LOINC_RADLEX_PART_MAP_URI = "http://loinc.org/cm/loinc-parts-to-radlex";
public static final String LOINC_RADLEX_PART_MAP_NAME = "LOINC Part Map to RADLEX";
public static final String LOINC_TERM_TO_RPID_PART_MAP_ID = "loinc-term-to-rpids";
public static final String LOINC_TERM_TO_RPID_PART_MAP_URI = "http://loinc.org/cm/loinc-to-rpids";
public static final String LOINC_TERM_TO_RPID_PART_MAP_NAME = "LOINC Terms to RadLex RPIDs";
public static final String LOINC_PART_TO_RID_PART_MAP_ID = "loinc-part-to-rids";
public static final String LOINC_PART_TO_RID_PART_MAP_URI = "http://loinc.org/cm/loinc-to-rids";
public static final String LOINC_PART_TO_RID_PART_MAP_NAME = "LOINC Parts to RadLex RIDs";
private static final String LOINC_SCT_PART_MAP_NAME = "LOINC Part Map to SNOMED CT";
private static final String LOINC_RXNORM_PART_MAP_ID = "loinc-parts-to-rxnorm";
private static final String LOINC_RXNORM_PART_MAP_URI = "http://loinc.org/cm/loinc-parts-to-rxnorm";
private static final String LOINC_RXNORM_PART_MAP_NAME = "LOINC Part Map to RxNORM";
private static final String CM_COPYRIGHT = "This content from LOINC® is copyright © 1995 Regenstrief Institute, Inc. and the LOINC Committee, and available at no cost under the license at https://loinc.org/license/. The LOINC Part File, LOINC/SNOMED CT Expression Association and Map Sets File, RELMA database and associated search index files include SNOMED Clinical Terms (SNOMED CT®) which is used by permission of the International Health Terminology Standards Development Organisation (IHTSDO) under license. All rights are reserved. SNOMED CT® was originally created by The College of American Pathologists. “SNOMED” and “SNOMED CT” are registered trademarks of the IHTSDO. Use of SNOMED CT content is subject to the terms and conditions set forth in the SNOMED CT Affiliate License Agreement. It is the responsibility of those implementing this product to ensure they are appropriately licensed and for more information on the license, including how to register as an Affiliate Licensee, please refer to http://www.snomed.org/snomed-ct/get-snomed-ct or info@snomed.org. Under the terms of the Affiliate License, use of SNOMED CT in countries that are not IHTSDO Members is subject to reporting and fee payment obligations. However, IHTSDO agrees to waive the requirements to report and pay fees for use of SNOMED CT content included in the LOINC Part Mapping and LOINC Term Associations for purposes that support or enable more effective use of LOINC. This material includes content from the US Edition to SNOMED CT, which is developed and maintained by the U.S. National Library of Medicine and is available to authorized UMLS Metathesaurus Licensees from the UTS Downloads site at https://uts.nlm.nih.gov.";
private final Map<String, TermConcept> myCode2Concept;
private final TermCodeSystemVersion myCodeSystemVersion;
private final List<ConceptMap> myConceptMaps;
private static final String LOINC_PUBCHEM_PART_MAP_URI = "http://loinc.org/cm/loinc-parts-to-pubchem";
private static final String LOINC_PUBCHEM_PART_MAP_ID = "loinc-parts-to-pubchem";
private static final String LOINC_PUBCHEM_PART_MAP_NAME = "LOINC Part Map to PubChem";
public LoincPartRelatedCodeMappingHandler(TermCodeSystemVersion theCodeSystemVersion, Map<String, TermConcept> theCode2concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {
public LoincPartRelatedCodeMappingHandler(Map<String, TermConcept> theCode2concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {
super(theCode2concept, theValueSets, theConceptMaps, theUploadProperties);
myCodeSystemVersion = theCodeSystemVersion;
myCode2Concept = theCode2concept;
myConceptMaps = theConceptMaps;
}
@Override
@ -108,12 +107,20 @@ public class LoincPartRelatedCodeMappingHandler extends BaseLoincHandler impleme
loincPartMapName = LOINC_RXNORM_PART_MAP_NAME;
break;
case "http://www.radlex.org":
loincPartMapId = LOINC_RADLEX_PART_MAP_ID;
loincPartMapUri = LOINC_RADLEX_PART_MAP_URI;
loincPartMapName = LOINC_RADLEX_PART_MAP_NAME;
loincPartMapId = LOINC_PART_TO_RID_PART_MAP_ID;
loincPartMapUri = LOINC_PART_TO_RID_PART_MAP_URI;
loincPartMapName = LOINC_PART_TO_RID_PART_MAP_NAME;
break;
case "http://pubchem.ncbi.nlm.nih.gov":
loincPartMapId = LOINC_PUBCHEM_PART_MAP_ID;
loincPartMapUri = LOINC_PUBCHEM_PART_MAP_URI;
loincPartMapName = LOINC_PUBCHEM_PART_MAP_NAME;
break;
default:
throw new InternalErrorException("Don't know how to handle mapping to system: " + extCodeSystem);
loincPartMapId = extCodeSystem.replaceAll("[^a-zA-Z]", "");
loincPartMapUri = extCodeSystem;
loincPartMapName = "Unknown Mapping";
break;
}
addConceptMapEntry(

View File

@ -39,21 +39,16 @@ public class LoincRsnaPlaybookHandler extends BaseLoincHandler implements IRecor
public static final String RSNA_CODES_VS_ID = "loinc-rsna-radiology-playbook";
public static final String RSNA_CODES_VS_URI = "http://loinc.org/vs/loinc-rsna-radiology-playbook";
public static final String RSNA_CODES_VS_NAME = "LOINC/RSNA Radiology Playbook";
public static final String RID_MAPPING_CM_ID = "LOINC-TO-RID-CODES-CM";
public static final String RID_MAPPING_CM_URI = "http://loinc.org/rid-codes";
public static final String RID_MAPPING_CM_NAME = "RSNA Playbook RID Codes Mapping";
public static final String RID_CS_URI = "http://www.radlex.org";
public static final String RPID_MAPPING_CM_ID = "LOINC-TO-RPID-CODES-CM";
public static final String RPID_MAPPING_CM_URI = "http://loinc.org/rpid-codes";
public static final String RPID_MAPPING_CM_NAME = "RSNA Playbook RPID Codes Mapping";
/*
* About these being the same - Per Dan:
* About these being the same - Per Dan Vreeman:
* We had some discussion about this, and both
* RIDs (RadLex clinical terms) and RPIDs (Radlex Playbook Ids)
* RIDs (RadLex clinical terms) and RPIDs (Radlex Playbook Ids)
* belong to the same "code system" since they will never collide.
* The codesystem uri is "http://www.radlex.org". FYI, that's
* now listed on the FHIR page:
* https://www.hl7.org/fhir/terminologies-systems.html
* -ja
*/
public static final String RPID_CS_URI = RID_CS_URI;
private static final String CM_COPYRIGHT = "This content from LOINC® is copyright © 1995 Regenstrief Institute, Inc. and the LOINC Committee, and available at no cost under the license at https://loinc.org/license/. The LOINC/RSNA Radiology Playbook and the LOINC Part File contain content from RadLex® (http://rsna.org/RadLex.aspx), copyright © 2005-2017, The Radiological Society of North America, Inc., available at no cost under the license at http://www.rsna.org/uploadedFiles/RSNA/Content/Informatics/RadLex_License_Agreement_and_Terms_of_Use_V2_Final.pdf.";
@ -179,9 +174,9 @@ public class LoincRsnaPlaybookHandler extends BaseLoincHandler implements IRecor
if (isNotBlank(rid)) {
addConceptMapEntry(
new ConceptMapping()
.setConceptMapId(RID_MAPPING_CM_ID)
.setConceptMapUri(RID_MAPPING_CM_URI)
.setConceptMapName(RID_MAPPING_CM_NAME)
.setConceptMapId(LoincPartRelatedCodeMappingHandler.LOINC_PART_TO_RID_PART_MAP_ID)
.setConceptMapUri(LoincPartRelatedCodeMappingHandler.LOINC_PART_TO_RID_PART_MAP_URI)
.setConceptMapName(LoincPartRelatedCodeMappingHandler.LOINC_PART_TO_RID_PART_MAP_NAME)
.setSourceCodeSystem(IHapiTerminologyLoaderSvc.LOINC_URI)
.setSourceCode(partNumber)
.setSourceDisplay(partName)
@ -196,9 +191,9 @@ public class LoincRsnaPlaybookHandler extends BaseLoincHandler implements IRecor
if (isNotBlank(rpid)) {
addConceptMapEntry(
new ConceptMapping()
.setConceptMapId(RPID_MAPPING_CM_ID)
.setConceptMapUri(RPID_MAPPING_CM_URI)
.setConceptMapName(RPID_MAPPING_CM_NAME)
.setConceptMapId(LoincPartRelatedCodeMappingHandler.LOINC_TERM_TO_RPID_PART_MAP_ID)
.setConceptMapUri(LoincPartRelatedCodeMappingHandler.LOINC_TERM_TO_RPID_PART_MAP_URI)
.setConceptMapName(LoincPartRelatedCodeMappingHandler.LOINC_TERM_TO_RPID_PART_MAP_NAME)
.setSourceCodeSystem(IHapiTerminologyLoaderSvc.LOINC_URI)
.setSourceCode(loincNumber)
.setSourceDisplay(longCommonName)

View File

@ -34,7 +34,7 @@ import static org.apache.commons.lang3.StringUtils.trim;
public class LoincUniversalOrderSetHandler extends BaseLoincHandler implements IRecordHandler {
public static final String VS_ID = "loinc-universal-order-set-vs";
public static final String VS_URI = "http://loinc.org/fhir/loinc-universal-order-set";
public static final String VS_URI = "http://loinc.org/vs/loinc-universal-order-set";
public static final String VS_NAME = "LOINC Universal Order Set";
public LoincUniversalOrderSetHandler(Map<String, TermConcept> theCode2concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {

View File

@ -150,6 +150,11 @@ public class JpaConstants {
*/
public static final String OPERATION_EVERYTHING = "$everything";
/**
* Operation name for the $process-message operation
*/
public static final String OPERATION_PROCESS_MESSAGE = "$process-message";
/**
* Operation name for the $meta-delete operation
*/

View File

@ -83,14 +83,19 @@ public class ReindexController implements IReindexController {
break;
}
}
} catch (Exception e) {
ourLog.error("Failure during reindex", e);
count = -1;
} finally {
myReindexingLock.release();
}
synchronized (this) {
if (count == null) {
ourLog.info("Reindex pass complete, no remaining resource to index");
myDontReindexUntil = System.currentTimeMillis() + DateUtils.MILLIS_PER_HOUR;
} else {
ourLog.info("Reindex pass complete, {} remaining resource to index", count);
myDontReindexUntil = null;
}
}

View File

@ -24,7 +24,7 @@ import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import org.hl7.fhir.r4.hapi.ctx.DefaultProfileValidationSupport;
import org.hl7.fhir.r4.hapi.ctx.ValidationSupportChain;
import org.hl7.fhir.r4.hapi.validation.ValidationSupportChain;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;

View File

@ -12,6 +12,7 @@ import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Lazy;
import org.springframework.core.env.Environment;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.transaction.annotation.EnableTransactionManagement;
@ -43,11 +44,6 @@ public class TestDstu2Config extends BaseJavaConfigDstu2 {
private Exception myLastStackTrace;
private String myLastStackTraceThreadName;
@Bean(name="maxDatabaseThreadsForTest")
public Integer getMaxThread(){
return ourMaxThreads;
}
@Bean()
public DaoConfig daoConfig() {
return new DaoConfig();
@ -131,6 +127,11 @@ public class TestDstu2Config extends BaseJavaConfigDstu2 {
return retVal;
}
@Bean(name = "maxDatabaseThreadsForTest")
public Integer getMaxThread() {
return ourMaxThreads;
}
private Properties jpaProperties() {
Properties extraProperties = new Properties();
extraProperties.put("hibernate.format_sql", "true");
@ -165,4 +166,9 @@ public class TestDstu2Config extends BaseJavaConfigDstu2 {
return retVal;
}
@Bean
public UnregisterScheduledProcessor unregisterScheduledProcessor(Environment theEnv) {
return new UnregisterScheduledProcessor(theEnv);
}
}

View File

@ -6,14 +6,9 @@ import ca.uhn.fhir.jpa.subscription.email.IEmailSender;
import ca.uhn.fhir.jpa.subscription.email.JavaMailEmailSender;
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
import ca.uhn.fhir.validation.ResultSeverityEnum;
import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel;
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
import org.apache.commons.dbcp2.BasicDataSource;
import org.hibernate.jpa.HibernatePersistenceProvider;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.config.BeanFactoryPostProcessor;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.beans.factory.support.DefaultListableBeanFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Lazy;
@ -22,13 +17,11 @@ import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
import org.springframework.core.env.Environment;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.scheduling.annotation.ScheduledAnnotationBeanPostProcessor;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import javax.persistence.EntityManagerFactory;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
@ -194,23 +187,4 @@ public class TestDstu3Config extends BaseJavaConfigDstu3 {
}
public class UnregisterScheduledProcessor implements BeanFactoryPostProcessor {
private final Environment myEnvironment;
public UnregisterScheduledProcessor(Environment theEnv) {
myEnvironment = theEnv;
}
@Override
public void postProcessBeanFactory(final ConfigurableListableBeanFactory beanFactory) throws BeansException {
String schedulingDisabled = myEnvironment.getProperty("scheduling_disabled");
if ("true".equals(schedulingDisabled)) {
for (String beanName : beanFactory.getBeanNamesForType(ScheduledAnnotationBeanPostProcessor.class)) {
((DefaultListableBeanFactory) beanFactory).removeBeanDefinition(beanName);
}
}
}
}
}

View File

@ -7,15 +7,13 @@ import net.ttddyy.dsproxy.listener.ThreadQueryCountHolder;
import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel;
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
import org.apache.commons.dbcp2.BasicDataSource;
import org.hibernate.jpa.HibernatePersistenceProvider;
import org.hibernate.query.criteria.LiteralHandlingMode;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Lazy;
import org.springframework.orm.hibernate5.HibernateExceptionTranslator;
import org.springframework.core.env.Environment;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.orm.jpa.vendor.HibernateJpaDialect;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import javax.persistence.EntityManagerFactory;
@ -107,7 +105,7 @@ public class TestR4Config extends BaseJavaConfigR4 {
DataSource dataSource = ProxyDataSourceBuilder
.create(retVal)
// .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL")
.logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL")
.logSlowQueryBySlf4j(10, TimeUnit.SECONDS)
.countQuery(new ThreadQueryCountHolder())
.build();
@ -163,6 +161,11 @@ public class TestR4Config extends BaseJavaConfigR4 {
return retVal;
}
@Bean
public UnregisterScheduledProcessor unregisterScheduledProcessor(Environment theEnv) {
return new UnregisterScheduledProcessor(theEnv);
}
public static int getMaxThreads() {
return ourMaxThreads;
}

View File

@ -0,0 +1,39 @@
package ca.uhn.fhir.jpa.config;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.config.BeanFactoryPostProcessor;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.beans.factory.support.DefaultListableBeanFactory;
import org.springframework.core.env.Environment;
import org.springframework.scheduling.annotation.ScheduledAnnotationBeanPostProcessor;
import org.springframework.scheduling.concurrent.ExecutorConfigurationSupport;
/**
* This bean postprocessor disables all scheduled tasks. It is intended
* only to be used in unit tests in circumstances where scheduled
* tasks cause issues.
*/
public class UnregisterScheduledProcessor implements BeanFactoryPostProcessor {
private final Environment myEnvironment;
public UnregisterScheduledProcessor(Environment theEnv) {
myEnvironment = theEnv;
}
@Override
public void postProcessBeanFactory(final ConfigurableListableBeanFactory beanFactory) throws BeansException {
String schedulingDisabled = myEnvironment.getProperty("scheduling_disabled");
if ("true".equals(schedulingDisabled)) {
for (String beanName : beanFactory.getBeanNamesForType(ScheduledAnnotationBeanPostProcessor.class)) {
((DefaultListableBeanFactory) beanFactory).removeBeanDefinition(beanName);
}
for (String beanName : beanFactory.getBeanNamesForType(ExecutorConfigurationSupport.class)) {
ExecutorConfigurationSupport executorConfigSupport = ((DefaultListableBeanFactory) beanFactory).getBean(beanName, ExecutorConfigurationSupport.class);
executorConfigSupport.shutdown();
}
}
}
}

View File

@ -331,7 +331,6 @@ public abstract class BaseJpaTest {
theSystemDao.expunge(new ExpungeOptions().setExpungeEverything(true));
theDaoConfig.setExpungeEnabled(expungeEnabled);
theSearchParamPresenceSvc.flushCachesForUnitTest();
theSearchParamRegistry.forceRefresh();
}

View File

@ -24,17 +24,33 @@ public class FhirResourceDaoDstu3ConceptMapTest extends BaseJpaDstu3Test {
private IIdType myConceptMapId;
@AfterClass
public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest();
}
@Before
@Transactional
public void before02() {
myConceptMapId = myConceptMapDao.create(createConceptMap(), mySrd).getId().toUnqualifiedVersionless();
}
@Test
public void testDeleteConceptMap() {
myConceptMapDao.delete(myConceptMapId);
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
TranslationRequest translationRequest = new TranslationRequest();
translationRequest.getCodeableConcept().addCoding()
.setSystem(CS_URL)
.setCode("12345");
translationRequest.setTargetSystem(new UriType(CS_URL_3));
TranslationResult translationResult = myConceptMapDao.translate(translationRequest, null);
assertFalse(translationResult.getResult().booleanValue());
}
});
}
@Test
public void testTranslateByCodeSystemsAndSourceCodeOneToMany() {
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
@ -81,4 +97,9 @@ public class FhirResourceDaoDstu3ConceptMapTest extends BaseJpaDstu3Test {
}
});
}
@AfterClass
public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest();
}
}

View File

@ -64,7 +64,6 @@ public class FhirResourceDaoDstu3SearchCustomSearchParamTest extends BaseJpaDstu
}
}
@Test
public void testCreateInvalidParamNoPath() {
SearchParameter fooSp = new SearchParameter();
@ -858,6 +857,49 @@ public class FhirResourceDaoDstu3SearchCustomSearchParamTest extends BaseJpaDstu
}
@Test
public void testSearchParameterDescendsIntoContainedResource() {
SearchParameter sp = new SearchParameter();
sp.addBase("Observation");
sp.setCode("specimencollectedtime");
sp.setType(Enumerations.SearchParamType.DATE);
sp.setTitle("Observation Specimen Collected Time");
sp.setExpression("Observation.specimen.resolve().receivedTime");
sp.setXpathUsage(SearchParameter.XPathUsageType.NORMAL);
sp.setStatus(Enumerations.PublicationStatus.ACTIVE);
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(sp));
mySearchParameterDao.create(sp);
mySearchParamRegsitry.forceRefresh();
Specimen specimen = new Specimen();
specimen.setId("#FOO");
specimen.setReceivedTimeElement(new DateTimeType("2011-01-01"));
Observation o = new Observation();
o.setId("O1");
o.getContained().add(specimen);
o.setStatus(Observation.ObservationStatus.FINAL);
o.setSpecimen(new Reference("#FOO"));
myObservationDao.update(o);
specimen = new Specimen();
specimen.setId("#FOO");
specimen.setReceivedTimeElement(new DateTimeType("2011-01-03"));
o = new Observation();
o.setId("O2");
o.getContained().add(specimen);
o.setStatus(Observation.ObservationStatus.FINAL);
o.setSpecimen(new Reference("#FOO"));
myObservationDao.update(o);
SearchParameterMap params = new SearchParameterMap();
params.add("specimencollectedtime", new DateParam("2011-01-01"));
IBundleProvider outcome = myObservationDao.search(params);
List<String> ids = toUnqualifiedVersionlessIdValues(outcome);
ourLog.info("IDS: " + ids);
assertThat(ids, contains("Observation/O1"));
}
@Test
public void testSearchWithCustomParam() {

View File

@ -7,6 +7,7 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.search.JpaRuntimeSearchParam;
import org.hl7.fhir.dstu3.hapi.validation.DefaultProfileValidationSupport;
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
@ -81,7 +82,8 @@ public class SearchParamExtractorDstu3Test {
}
};
SearchParamExtractorDstu3 extractor = new SearchParamExtractorDstu3(ourCtx, ourValidationSupport, searchParamRegistry);
SearchParamExtractorDstu3 extractor = new SearchParamExtractorDstu3(new DaoConfig(), ourCtx, ourValidationSupport, searchParamRegistry);
extractor.start();
Set<BaseResourceIndexedSearchParam> tokens = extractor.extractSearchParamTokens(new ResourceTable(), obs);
assertEquals(1, tokens.size());
ResourceIndexedSearchParamToken token = (ResourceIndexedSearchParamToken) tokens.iterator().next();

Some files were not shown because too many files have changed in this diff Show More