Merge branch 'master' into 6469-reduce-memory-overhead-of-searches
This commit is contained in:
commit
dcf0c7158b
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.7.0-SNAPSHOT</version>
|
||||
<version>7.7.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.0-SNAPSHOT</version>
|
||||
<version>7.7.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.0-SNAPSHOT</version>
|
||||
<version>7.7.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -60,6 +60,7 @@ public class RuntimeSearchParam {
|
|||
private final List<Component> myComponents;
|
||||
private final IIdType myIdUnqualifiedVersionless;
|
||||
private IPhoneticEncoder myPhoneticEncoder;
|
||||
private boolean myEnabledForSearching = true;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
|
@ -166,6 +167,24 @@ public class RuntimeSearchParam {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Is this search parameter actually enabled for being used in searches (as opposed to only being used for
|
||||
* generating indexes, which might be desired while the search parameter is still being indexed). This
|
||||
* setting defaults to {@literal true} if it isn't set otherwise.
|
||||
*/
|
||||
public boolean isEnabledForSearching() {
|
||||
return myEnabledForSearching;
|
||||
}
|
||||
|
||||
/**
|
||||
* Is this search parameter actually enabled for being used in searches (as opposed to only being used for
|
||||
* generating indexes, which might be desired while the search parameter is still being indexed). This
|
||||
* setting defaults to {@literal true} if it isn't set otherwise.
|
||||
*/
|
||||
public void setEnabledForSearching(boolean theEnabledForSearching) {
|
||||
myEnabledForSearching = theEnabledForSearching;
|
||||
}
|
||||
|
||||
public List<Component> getComponents() {
|
||||
return myComponents;
|
||||
}
|
||||
|
@ -361,13 +380,6 @@ public class RuntimeSearchParam {
|
|||
return !myUpliftRefchains.isEmpty();
|
||||
}
|
||||
|
||||
public enum RuntimeSearchParamStatusEnum {
|
||||
ACTIVE,
|
||||
DRAFT,
|
||||
RETIRED,
|
||||
UNKNOWN
|
||||
}
|
||||
|
||||
/**
|
||||
* This method tests whether a given FHIRPath expression <i>could</i>
|
||||
* possibly apply to the given resource type.
|
||||
|
@ -413,6 +425,13 @@ public class RuntimeSearchParam {
|
|||
return false;
|
||||
}
|
||||
|
||||
public enum RuntimeSearchParamStatusEnum {
|
||||
ACTIVE,
|
||||
DRAFT,
|
||||
RETIRED,
|
||||
UNKNOWN
|
||||
}
|
||||
|
||||
public static class Component {
|
||||
private final String myExpression;
|
||||
private final String myReference;
|
||||
|
|
|
@ -63,6 +63,17 @@ public interface IFhirVersion {
|
|||
|
||||
IIdType newIdType();
|
||||
|
||||
/**
|
||||
* Creates a new {@link IIdType} instance for the given version with the given value
|
||||
*
|
||||
* @since 8.0.0
|
||||
*/
|
||||
default IIdType newIdType(String theValue) {
|
||||
IIdType retVal = newIdType();
|
||||
retVal.setValue(theValue);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an instance of <code>IFhirVersionServer<code> for this version.
|
||||
* Note that this method may only be called if the <code>hapi-fhir-server</code>
|
||||
|
|
|
@ -110,6 +110,12 @@ public class HapiExtensions {
|
|||
|
||||
public static final String EXT_SP_UNIQUE = "http://hapifhir.io/fhir/StructureDefinition/sp-unique";
|
||||
|
||||
/**
|
||||
* URL for extension on a Search Parameter which determines whether it should be enabled for searching for resources
|
||||
*/
|
||||
public static final String EXT_SEARCHPARAM_ENABLED_FOR_SEARCHING =
|
||||
"http://hapifhir.io/fhir/StructureDefinition/searchparameter-enabled-for-searching";
|
||||
|
||||
/**
|
||||
* URL for extension on a Phonetic String SearchParameter indicating that text values should be phonetically indexed with the named encoder
|
||||
*/
|
||||
|
|
|
@ -40,13 +40,22 @@ import java.util.stream.Stream;
|
|||
*/
|
||||
public class TaskChunker<T> {
|
||||
|
||||
public void chunk(Collection<T> theInput, int theChunkSize, Consumer<List<T>> theBatchConsumer) {
|
||||
public static <T> void chunk(List<T> theInput, int theChunkSize, Consumer<List<T>> theBatchConsumer) {
|
||||
if (theInput.size() <= theChunkSize) {
|
||||
theBatchConsumer.accept(theInput);
|
||||
return;
|
||||
}
|
||||
chunk((Collection<T>) theInput, theChunkSize, theBatchConsumer);
|
||||
}
|
||||
|
||||
public static <T> void chunk(Collection<T> theInput, int theChunkSize, Consumer<List<T>> theBatchConsumer) {
|
||||
List<T> input;
|
||||
if (theInput instanceof List) {
|
||||
input = (List<T>) theInput;
|
||||
} else {
|
||||
input = new ArrayList<>(theInput);
|
||||
}
|
||||
|
||||
for (int i = 0; i < input.size(); i += theChunkSize) {
|
||||
int to = i + theChunkSize;
|
||||
to = Math.min(to, input.size());
|
||||
|
@ -56,12 +65,11 @@ public class TaskChunker<T> {
|
|||
}
|
||||
|
||||
@Nonnull
|
||||
public <T> Stream<List<T>> chunk(Stream<T> theStream, int theChunkSize) {
|
||||
public static <T> Stream<List<T>> chunk(Stream<T> theStream, int theChunkSize) {
|
||||
return StreamUtil.partition(theStream, theChunkSize);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public void chunk(Iterator<T> theIterator, int theChunkSize, Consumer<List<T>> theListConsumer) {
|
||||
public static <T> void chunk(Iterator<T> theIterator, int theChunkSize, Consumer<List<T>> theListConsumer) {
|
||||
chunk(Streams.stream(theIterator), theChunkSize).forEach(theListConsumer);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -166,6 +166,7 @@ public enum VersionEnum {
|
|||
V7_4_2,
|
||||
V7_4_3,
|
||||
V7_4_4,
|
||||
V7_4_5,
|
||||
|
||||
V7_5_0,
|
||||
V7_6_0,
|
||||
|
|
|
@ -87,6 +87,7 @@ ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceVersionConstraintFail
|
|||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index.
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected. It can also happen when a request disables the Upsert Existence Check.
|
||||
ca.uhn.fhir.jpa.binary.interceptor.BinaryStorageInterceptor.externalizedBinaryStorageExtensionFoundInRequestBody=Illegal extension found in request payload - URL "{0}" and value "{1}"
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.cantUndeleteWithDeletesDisabled=Unable to restore previously deleted resource as deletes are disabled on this server.
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.incomingNoopInTransaction=Transaction contains resource with operation NOOP. This is only valid as a response operation, not in a request
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlInvalidResourceType=Invalid match URL "{0}" - Unknown resource type: "{1}"
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidMatchUrlNoMatches=Invalid match URL "{0}" - No resources match this search
|
||||
|
@ -133,6 +134,7 @@ ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulTimingSuffix=Took {0}ms.
|
|||
ca.uhn.fhir.jpa.dao.BaseStorageDao.deleteResourceNotExisting=Not deleted, resource {0} does not exist.
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.deleteResourceAlreadyDeleted=Not deleted, resource {0} was already deleted.
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSearchParameter=Unknown search parameter "{0}" for resource type "{1}". Valid search parameters for this search are: {2}
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSearchParameterNotEnabledForSearch=Search parameter "{0}" for resource type "{1}" is not active for searching. Valid search parameters for this search are: {2}
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSortParameter=Unknown _sort parameter value "{0}" for resource type "{1}" (Note: sort parameters values must use a valid Search Parameter). Valid values for this search are: {2}
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSortParameterTooManyChains=Invalid _sort expression, can not chain more than once in a sort expression: {0}
|
||||
|
||||
|
@ -199,6 +201,7 @@ ca.uhn.fhir.jpa.search.builder.predicate.ResourceLinkPredicateBuilder.invalidTar
|
|||
ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl.invalidResourceType=Invalid/unsupported resource type: "{0}"
|
||||
|
||||
ca.uhn.fhir.jpa.dao.index.IdHelperService.nonUniqueForcedId=Non-unique ID specified, can not process request
|
||||
ca.uhn.fhir.jpa.dao.index.IdHelperService.deletedId=Resource {0} has been deleted
|
||||
|
||||
ca.uhn.fhir.jpa.partition.PartitionLookupSvcImpl.noIdSupplied=No Partition ID supplied
|
||||
ca.uhn.fhir.jpa.partition.PartitionLookupSvcImpl.missingPartitionIdOrName=Partition must have an ID and a Name
|
||||
|
|
|
@ -37,7 +37,7 @@ public class TaskChunkerTest {
|
|||
List<Integer> input = newIntRangeList(0, 35);
|
||||
|
||||
// Execute
|
||||
new TaskChunker<Integer>().chunk(input, 10, myConsumer);
|
||||
TaskChunker.chunk(input, 10, myConsumer);
|
||||
|
||||
// Verify
|
||||
verify(myConsumer, times(4)).accept(myConsumerCaptor.capture());
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>7.7.0-SNAPSHOT</version>
|
||||
<version>7.7.7-SNAPSHOT</version>
|
||||
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
|
@ -12,7 +12,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.0-SNAPSHOT</version>
|
||||
<version>7.7.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.7.0-SNAPSHOT</version>
|
||||
<version>7.7.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.0-SNAPSHOT</version>
|
||||
<version>7.7.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>7.7.0-SNAPSHOT</version>
|
||||
<version>7.7.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.7.0-SNAPSHOT</version>
|
||||
<version>7.7.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.0-SNAPSHOT</version>
|
||||
<version>7.7.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -1357,7 +1357,9 @@ public class GenericOkHttpClientDstu2Test {
|
|||
.returnBundle(Bundle.class)
|
||||
.execute();
|
||||
|
||||
assertEquals(ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=identifier%2Cname", MY_SERVLET.ourRequestUri);
|
||||
assertThat(MY_SERVLET.ourRequestUri).isIn(
|
||||
ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=identifier%2Cname",
|
||||
ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=name%2Cidentifier");
|
||||
|
||||
// assertThat(MY_SERVLET.ourRequestUri,
|
||||
// either(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=name%2Cidentifier")).or(equalTo(ourServer.getBaseUrl() +
|
||||
|
@ -1391,7 +1393,10 @@ public class GenericOkHttpClientDstu2Test {
|
|||
.execute();
|
||||
|
||||
assertThat(MY_SERVLET.ourRequestUri).contains(ourServer.getBaseUrl() + "/fhir/Patient/_search?");
|
||||
assertThat(MY_SERVLET.ourRequestUri).contains("_elements=identifier%2Cname");
|
||||
assertThat(MY_SERVLET.ourRequestUri).satisfiesAnyOf(
|
||||
s -> assertThat(s).contains("_elements=identifier%2Cname"),
|
||||
s -> assertThat(s).contains("_elements=name%2Cidentifier")
|
||||
);
|
||||
assertThat(MY_SERVLET.ourRequestUri).doesNotContain("_format=json");
|
||||
|
||||
// assertThat(MY_SERVLET.ourRequestUri,
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.0-SNAPSHOT</version>
|
||||
<version>7.7.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.0-SNAPSHOT</version>
|
||||
<version>7.7.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.7.0-SNAPSHOT</version>
|
||||
<version>7.7.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.0-SNAPSHOT</version>
|
||||
<version>7.7.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -330,7 +330,7 @@ public class AuthorizationInterceptors {
|
|||
SearchNarrowingConsentService consentService =
|
||||
new SearchNarrowingConsentService(validationSupport, searchParamRegistry);
|
||||
|
||||
// Create a ConsentIntereptor to apply the ConsentService and register it with the server
|
||||
// Create a ConsentInterceptor to apply the ConsentService and register it with the server
|
||||
ConsentInterceptor consentInterceptor = new ConsentInterceptor();
|
||||
consentInterceptor.registerConsentService(consentService);
|
||||
restfulServer.registerInterceptor(consentInterceptor);
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
title: "Remove a dependency on a Java 1.7 class (ReflectiveOperationException) in several spots in the codebase. This dependency was accidentally introduced in 1.3, and animal-sniffer-plugin failed to detect it (sigh)."
|
||||
- item:
|
||||
type: "add"
|
||||
title: "Add two new server interceptors: RequestValidatingInterceptor and ResponseValidatingInterceptor which can be used to validate incoming requests or outgoing responses using the standard FHIR validation tools. See the Server Validation Page for examples of how to use these interceptors. These intereptors have both been enabled on the <a href=\"http://fhirtest.uhn.ca\">public test page</a>."
|
||||
title: "Add two new server interceptors: RequestValidatingInterceptor and ResponseValidatingInterceptor which can be used to validate incoming requests or outgoing responses using the standard FHIR validation tools. See the Server Validation Page for examples of how to use these interceptors. These interceptors have both been enabled on the <a href=\"http://fhirtest.uhn.ca\">public test page</a>."
|
||||
- item:
|
||||
issue: "259"
|
||||
type: "fix"
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
title: "<b>New Feature</b>: The JPA server now supports the <code>_filter</code> search parameter when configured to do so. The <a href=\"http://hl7.org/fhir/search_filter.html\">filter search parameter</a> is an extremely flexible and powerful feature, allowing for advanced grouping and order of operations on searches. It can be dangerous however, as it potentially allows users to create queries for which no database indexes exist in the default configuration so it is disabled by default. Thanks to Anthony Sute for the pull request and all of his support in what turned out to be a lengthy merge!"
|
||||
- item:
|
||||
type: "add"
|
||||
title: "<b>New Feature</b>: A new interceptor called CascadingDeleteInterceptor has been added to the JPA project. This interceptor allows deletes to cascade when a specific URL parameter or header is added to the request. Cascading deletes can also be controlled by a new flag in the AuthorizationIntereptor RuleBuilder, in order to ensure that cascading deletes are only available to users with sufficient permission."
|
||||
title: "<b>New Feature</b>: A new interceptor called CascadingDeleteInterceptor has been added to the JPA project. This interceptor allows deletes to cascade when a specific URL parameter or header is added to the request. Cascading deletes can also be controlled by a new flag in the AuthorizationInterceptor RuleBuilder, in order to ensure that cascading deletes are only available to users with sufficient permission."
|
||||
- item:
|
||||
type: "add"
|
||||
title: "Several enhancements have been made to the <code>AuthorizationInterceptor</code> : <ul> <li>The interceptor now registers against the <code>STORAGE_PRESHOW_RESOURCES</code> interceptor hook, which allows it to successfully authorize JPA operations that don't actually return resource content, such as GraphQL responses, and resources that have been filtered using the <code>_elements</code> parameter.</li> <li> </li>The rule list is now cached on a per-request basis, which should improve performance</ul>"
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
---
|
||||
release-date: "2023-02-18"
|
||||
release-date: "2024-02-18"
|
||||
codename: "Apollo"
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
---
|
||||
release-date: "2023-05-18"
|
||||
release-date: "2024-05-18"
|
||||
codename: "Borealis"
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
release-date: "2024-10-21"
|
||||
codename: "Copernicus"
|
|
@ -1,6 +1,8 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6372
|
||||
jira: SMILE-9073
|
||||
backport: 7.4.5
|
||||
title: "Searches that combined full-text searching (i.e. `_text` or `_content`)
|
||||
with other search parameters could fail to return all results if we encountered
|
||||
1600 matches against the full-text index where none of them match the rest of the query.
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: add
|
||||
issue: 6107
|
||||
title: "A new extension has been created for use on SearchParameter resources in the JPA server. This extension causes
|
||||
a SearchParameter to be indexed, but to not be available for use in searches. This can be set when a new SP is created
|
||||
in order to prevent it from being used before an index has been completed. See
|
||||
[Introducing Search Parameters on Existing Data](https://smilecdr.com/docs/fhir_standard/fhir_search_custom_search_parameters.html) for more information."
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: perf
|
||||
issue: 6224
|
||||
title: "The JPA server will no longer use a separate thread and database connection
|
||||
to resolve tag definitions. This should improve performance in some cases, and
|
||||
resolves compatibility issues for some environments. Thanks to Ibrahim (Trifork A/S)
|
||||
for the pull request!"
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6258
|
||||
title: "The AuthorizationInterceptor handling for operations has been improved
|
||||
so that operation rules now directly test the contents of response Bundle
|
||||
or Parameters objects returned by the operation when configure to require
|
||||
explicit response authorization. This fixes a regression in 7.4.0 where
|
||||
operation responses could sometimes be denied even if appropriate
|
||||
permissions were granted to view resources in a response bundle. Thanks to
|
||||
Gijsbert van den Brink for reporting the issue with a sample test!"
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: perf
|
||||
issue: 6395
|
||||
title: "A new configuration option has been added to `SubsciptionSubmitterConfig` which
|
||||
causes Subscription resources to be submitted to the processing queue synchronously
|
||||
instead of asynchronously as all other resources are. This is useful for cases where
|
||||
subscriptions need to be activated quickly. Thanks to Michal Sevcik for the contribution!"
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: add
|
||||
issue: 6398
|
||||
title: "The NPM package search module has been enhanced to support searching by
|
||||
the package author and the package version attributes."
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6407
|
||||
title: "Corrected IHE BALP AuditEvent generation, so that it records one Audit Event per resource owner. Thanks to Jens Villadsen (@jkiddo) for the contribution!"
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: change
|
||||
issue: 6460
|
||||
title: "The HFJ_RES_LINK table with no longer store the `PARTITION_DATE` value for the indexed link target
|
||||
resource, as this was an unused feature which has been removed as a part of a larger performance optimization."
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
type: perf
|
||||
issue: 6460
|
||||
title: "The JPA server FHIR transaction processor will now pre-fetch the target
|
||||
resource state for references to resources that don't also appear in the
|
||||
transaction bundle. This means that if you process a large FHIR transaction containing
|
||||
many references to other resources in the repository that are not also being
|
||||
updated in the same transaction, you should see a very significant improvement
|
||||
in performance."
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: perf
|
||||
issue: 6460
|
||||
title: "The JPA server FHIR transaction processor will now more aggressively cache
|
||||
resource IDs for previously seen resources, reducing the number of database reads
|
||||
required when processing transactions. This should provide a noticeable improvement
|
||||
in performance when processing transactions which update pre-existing resources."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: change
|
||||
issue: 6460
|
||||
title: "If deletes are disabled in the JPA server, it is no longer possible to un-delete
|
||||
a resource (i.e. update a previously deleted resource to make it non-deleted)."
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
type: change
|
||||
issue: 6460
|
||||
title: "When performing a FHIR Transaction which deletes and then updates (or otherwise
|
||||
un-deletes) the same resource within a single transaction, the delete was previously
|
||||
not stored as a distinct version (meaning that the resource version was only
|
||||
incremented once, and no delete was actually stored in the resource history. This
|
||||
has been changed so that deletes will always appear as a distinct entry in the
|
||||
resource history."
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
type: add
|
||||
issue: 6464
|
||||
title: "A new experimental interceptor called the MdmReadVirtualizationInterceptor
|
||||
has been added. This interceptor rewrites results when querying an MDM-enabled
|
||||
JPA server in order to always include linked resources and rerwrites query results
|
||||
to link to the MDM golden resource. This interceptor is still being developed
|
||||
and should be used with caution."
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6475
|
||||
jira: SMILE-8490
|
||||
title: "Previously, submitting a transaction bundle containing a conditional delete, a conditional create, and a
|
||||
resource which relied on this conditional create as a reference would lead to excessive Hibernate warnings in the logs.
|
||||
This has been fixed."
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
- item:
|
||||
type: "add"
|
||||
title: "The version of a few dependencies have been bumped to more recent versions
|
||||
(dependent HAPI modules listed in brackets):
|
||||
<ul>
|
||||
<li>org.hl7.fhir.core (Base): 6.3.25 -> 6.4.0</li>
|
||||
</ul>"
|
|
@ -51,6 +51,14 @@ HAPI is built primary using [Apache Maven](http://maven.apache.org/). Even if yo
|
|||
[INFO] Finished at: 2016-02-27T15:05:35+00:00
|
||||
```
|
||||
|
||||
# Rebuilding the Database Schema
|
||||
|
||||
Database schema is built as part of your maven build, but in case you need to rebuild it later, you can use the command:
|
||||
|
||||
```bash
|
||||
mvn hapi-tinder:generate-ddl
|
||||
```
|
||||
|
||||
# Troubleshooting
|
||||
|
||||
If the build fails to execute successfully, try the following:
|
||||
|
|
|
@ -238,3 +238,7 @@ In order to improve sorting performance when chained sorts are needed, an [Uplif
|
|||
# _include and _revinclude order
|
||||
|
||||
By default, all _revincludes will be performed first and then all _includes are performed afterwards. However, if any _revinclude parameters are modified with :iterate (or :recurse for earlier versions of FHIR) then all _include parameters will be evaluated first.
|
||||
|
||||
# Custom Search Parameters
|
||||
|
||||
HAPI FHIR has the ability to index and use custom search parameters, including parameters which enforce uniqueness, parametrs which index combinations of parameters, and parameters which are indexed but not used for searches until they are ready. See [Custom Search Parameters](https://smilecdr.com/docs/fhir_standard/fhir_search_custom_search_parameters.html) for more information.
|
||||
|
|
|
@ -287,7 +287,7 @@ Dates are a special case, since it is a fairly common scenario to want to match
|
|||
|
||||
# Resource Includes (_include)
|
||||
|
||||
FHIR allows clients to request that specific linked resources be included as contained resources, which means that they will be "embedded" in a special container called "contained" within the parent resource.
|
||||
Using the `_include` FHIR parameter, clients can request that specific linked resources be embedded directly within search results. These included resources will have a search.mode of "include".
|
||||
|
||||
HAPI allows you to add a parameter for accepting includes if you wish to support them for specific search methods.
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.0-SNAPSHOT</version>
|
||||
<version>7.7.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.0-SNAPSHOT</version>
|
||||
<version>7.7.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -1474,7 +1474,9 @@ public class GenericJaxRsClientDstu2Test {
|
|||
.execute();
|
||||
|
||||
|
||||
assertEquals(ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=identifier%2Cname", CAPTURE_SERVLET.ourRequestUri);
|
||||
assertThat(CAPTURE_SERVLET.ourRequestUri).isIn(
|
||||
ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=identifier%2Cname",
|
||||
ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=name%2Cidentifier");
|
||||
|
||||
// assertThat(MY_SERVLET.ourRequestUri,
|
||||
// either(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=name%2Cidentifier")).or(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=identifier%2Cname")));
|
||||
|
@ -1511,7 +1513,10 @@ public class GenericJaxRsClientDstu2Test {
|
|||
|
||||
|
||||
assertThat(CAPTURE_SERVLET.ourRequestUri).contains(ourServer.getBaseUrl() + "/fhir/Patient/_search?");
|
||||
assertThat(CAPTURE_SERVLET.ourRequestUri).contains("_elements=identifier%2Cname");
|
||||
assertThat(CAPTURE_SERVLET.ourRequestUri).satisfiesAnyOf(
|
||||
s -> assertThat(s).contains("_elements=identifier%2Cname"),
|
||||
s -> assertThat(s).contains("_elements=name%2Cidentifier")
|
||||
);
|
||||
|
||||
// assertThat(MY_SERVLET.ourRequestUri,
|
||||
// either(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=name%2Cidentifier")).or(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=identifier%2Cname")));
|
||||
|
|
|
@ -1546,7 +1546,9 @@ public class GenericJaxRsClientDstu3Test {
|
|||
.execute();
|
||||
//@formatter:on
|
||||
|
||||
assertEquals(ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=identifier%2Cname", MY_SERVLET.ourRequestUri);
|
||||
assertThat(MY_SERVLET.ourRequestUri).isIn(
|
||||
ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=identifier%2Cname",
|
||||
ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=name%2Cidentifier");
|
||||
|
||||
// assertThat(MY_SERVLET.ourRequestUri,
|
||||
// either(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=name%2Cidentifier")).or(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=identifier%2Cname")));
|
||||
|
@ -1583,7 +1585,10 @@ public class GenericJaxRsClientDstu3Test {
|
|||
//@formatter:on
|
||||
|
||||
assertThat(MY_SERVLET.ourRequestUri).contains(ourServer.getBaseUrl() + "/fhir/Patient/_search?");
|
||||
assertThat(MY_SERVLET.ourRequestUri).contains("_elements=identifier%2Cname");
|
||||
assertThat(MY_SERVLET.ourRequestUri).satisfiesAnyOf(
|
||||
s -> assertThat(s).contains("_elements=identifier%2Cname"),
|
||||
s -> assertThat(s).contains("_elements=name%2Cidentifier")
|
||||
);
|
||||
|
||||
// assertThat(MY_SERVLET.ourRequestUri,
|
||||
// either(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=name%2Cidentifier")).or(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=identifier%2Cname")));
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.0-SNAPSHOT</version>
|
||||
<version>7.7.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -20,7 +20,11 @@
|
|||
package ca.uhn.fhir.jpa.config;
|
||||
|
||||
import com.google.common.base.Strings;
|
||||
import org.hibernate.cfg.AvailableSettings;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import org.hibernate.cfg.BatchSettings;
|
||||
import org.hibernate.cfg.JdbcSettings;
|
||||
import org.hibernate.cfg.ManagedBeanSettings;
|
||||
import org.hibernate.cfg.QuerySettings;
|
||||
import org.hibernate.query.criteria.ValueHandlingMode;
|
||||
import org.hibernate.resource.jdbc.spi.PhysicalConnectionHandlingMode;
|
||||
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
|
||||
|
@ -46,18 +50,19 @@ public class HapiFhirLocalContainerEntityManagerFactoryBean extends LocalContain
|
|||
myConfigurableListableBeanFactory = theConfigurableListableBeanFactory;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public Map<String, Object> getJpaPropertyMap() {
|
||||
Map<String, Object> retVal = super.getJpaPropertyMap();
|
||||
|
||||
// SOMEDAY these defaults can be set in the constructor. setJpaProperties does a merge.
|
||||
if (!retVal.containsKey(AvailableSettings.CRITERIA_VALUE_HANDLING_MODE)) {
|
||||
retVal.put(AvailableSettings.CRITERIA_VALUE_HANDLING_MODE, ValueHandlingMode.BIND);
|
||||
if (!retVal.containsKey(QuerySettings.CRITERIA_VALUE_HANDLING_MODE)) {
|
||||
retVal.put(QuerySettings.CRITERIA_VALUE_HANDLING_MODE, ValueHandlingMode.BIND);
|
||||
}
|
||||
|
||||
if (!retVal.containsKey(AvailableSettings.CONNECTION_HANDLING)) {
|
||||
if (!retVal.containsKey(JdbcSettings.CONNECTION_HANDLING)) {
|
||||
retVal.put(
|
||||
AvailableSettings.CONNECTION_HANDLING,
|
||||
JdbcSettings.CONNECTION_HANDLING,
|
||||
PhysicalConnectionHandlingMode.DELAYED_ACQUISITION_AND_RELEASE_AFTER_TRANSACTION);
|
||||
}
|
||||
|
||||
|
@ -65,26 +70,25 @@ public class HapiFhirLocalContainerEntityManagerFactoryBean extends LocalContain
|
|||
* Set some performance options
|
||||
*/
|
||||
|
||||
if (!retVal.containsKey(AvailableSettings.STATEMENT_BATCH_SIZE)) {
|
||||
retVal.put(AvailableSettings.STATEMENT_BATCH_SIZE, "30");
|
||||
if (!retVal.containsKey(BatchSettings.STATEMENT_BATCH_SIZE)) {
|
||||
retVal.put(BatchSettings.STATEMENT_BATCH_SIZE, "30");
|
||||
}
|
||||
|
||||
if (!retVal.containsKey(AvailableSettings.ORDER_INSERTS)) {
|
||||
retVal.put(AvailableSettings.ORDER_INSERTS, "true");
|
||||
if (!retVal.containsKey(BatchSettings.ORDER_INSERTS)) {
|
||||
retVal.put(BatchSettings.ORDER_INSERTS, "true");
|
||||
}
|
||||
|
||||
if (!retVal.containsKey(AvailableSettings.ORDER_UPDATES)) {
|
||||
retVal.put(AvailableSettings.ORDER_UPDATES, "true");
|
||||
if (!retVal.containsKey(BatchSettings.ORDER_UPDATES)) {
|
||||
retVal.put(BatchSettings.ORDER_UPDATES, "true");
|
||||
}
|
||||
|
||||
if (!retVal.containsKey(AvailableSettings.BATCH_VERSIONED_DATA)) {
|
||||
retVal.put(AvailableSettings.BATCH_VERSIONED_DATA, "true");
|
||||
if (!retVal.containsKey(BatchSettings.BATCH_VERSIONED_DATA)) {
|
||||
retVal.put(BatchSettings.BATCH_VERSIONED_DATA, "true");
|
||||
}
|
||||
// Why is this here, you ask? LocalContainerEntityManagerFactoryBean actually clobbers the setting hibernate
|
||||
// needs
|
||||
// in order to be able to resolve beans, so we add it back in manually here
|
||||
if (!retVal.containsKey(AvailableSettings.BEAN_CONTAINER)) {
|
||||
retVal.put(AvailableSettings.BEAN_CONTAINER, new SpringBeanContainer(myConfigurableListableBeanFactory));
|
||||
// needs in order to be able to resolve beans, so we add it back in manually here
|
||||
if (!retVal.containsKey(ManagedBeanSettings.BEAN_CONTAINER)) {
|
||||
retVal.put(ManagedBeanSettings.BEAN_CONTAINER, new SpringBeanContainer(myConfigurableListableBeanFactory));
|
||||
}
|
||||
|
||||
return retVal;
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.0-SNAPSHOT</version>
|
||||
<version>7.7.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -414,7 +414,6 @@
|
|||
<plugin>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-tinder-plugin</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>build_dstu2</id>
|
||||
|
@ -525,7 +524,6 @@
|
|||
<plugin>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-tinder-plugin</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
|
@ -534,6 +532,7 @@
|
|||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<skip>false</skip>
|
||||
<packageNames>
|
||||
<packageName>ca.uhn.fhir.jpa.entity</packageName>
|
||||
<packageName>ca.uhn.fhir.jpa.model.entity</packageName>
|
||||
|
|
|
@ -42,7 +42,6 @@ import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
|||
import ca.uhn.fhir.jpa.model.search.SearchBuilderLoadIncludesParameters;
|
||||
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.util.QueryChunker;
|
||||
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
|
||||
import ca.uhn.fhir.mdm.dao.IMdmLinkDao;
|
||||
import ca.uhn.fhir.mdm.model.MdmPidTuple;
|
||||
|
@ -59,6 +58,7 @@ import ca.uhn.fhir.util.ExtensionUtil;
|
|||
import ca.uhn.fhir.util.HapiExtensions;
|
||||
import ca.uhn.fhir.util.Logs;
|
||||
import ca.uhn.fhir.util.SearchParameterUtil;
|
||||
import ca.uhn.fhir.util.TaskChunker;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import jakarta.persistence.EntityManager;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
@ -315,8 +315,7 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
|
|||
// for each patient pid ->
|
||||
// search for the target resources, with their correct patient references, chunked.
|
||||
// The results will be jammed into myReadPids
|
||||
QueryChunker<JpaPid> queryChunker = new QueryChunker<>();
|
||||
queryChunker.chunk(expandedMemberResourceIds, QUERY_CHUNK_SIZE, (idChunk) -> {
|
||||
TaskChunker.chunk(expandedMemberResourceIds, QUERY_CHUNK_SIZE, (idChunk) -> {
|
||||
try {
|
||||
queryResourceTypeWithReferencesToPatients(pids, idChunk, theParams, theDef);
|
||||
} catch (IOException ex) {
|
||||
|
@ -625,7 +624,8 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
|
|||
resourceToCheck = "Patient";
|
||||
activeSearchParamName = "organization";
|
||||
}
|
||||
return mySearchParamRegistry.getActiveSearchParam(resourceToCheck, activeSearchParamName);
|
||||
return mySearchParamRegistry.getActiveSearchParam(
|
||||
resourceToCheck, activeSearchParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -43,6 +43,7 @@ import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
|||
import ca.uhn.fhir.jpa.bulk.imprt.svc.BulkDataImportSvcImpl;
|
||||
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
|
||||
import ca.uhn.fhir.jpa.cache.ResourceVersionSvcDaoImpl;
|
||||
import ca.uhn.fhir.jpa.dao.CacheTagDefinitionDao;
|
||||
import ca.uhn.fhir.jpa.dao.DaoSearchParamProvider;
|
||||
import ca.uhn.fhir.jpa.dao.HistoryBuilder;
|
||||
import ca.uhn.fhir.jpa.dao.HistoryBuilderFactory;
|
||||
|
@ -56,6 +57,7 @@ import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
|
|||
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceModifiedDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceSearchUrlDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITagDefinitionDao;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ExpungeEverythingService;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ExpungeOperation;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ExpungeService;
|
||||
|
@ -377,17 +379,17 @@ public class JpaConfig {
|
|||
|
||||
@Bean
|
||||
public TaskScheduler taskScheduler() {
|
||||
ConcurrentTaskScheduler retVal = new ConcurrentTaskScheduler();
|
||||
retVal.setConcurrentExecutor(scheduledExecutorService().getObject());
|
||||
retVal.setScheduledExecutor(scheduledExecutorService().getObject());
|
||||
ConcurrentTaskScheduler retVal = new ConcurrentTaskScheduler(
|
||||
scheduledExecutorService().getObject(),
|
||||
scheduledExecutorService().getObject());
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Bean(name = TASK_EXECUTOR_NAME)
|
||||
public AsyncTaskExecutor taskExecutor() {
|
||||
ConcurrentTaskScheduler retVal = new ConcurrentTaskScheduler();
|
||||
retVal.setConcurrentExecutor(scheduledExecutorService().getObject());
|
||||
retVal.setScheduledExecutor(scheduledExecutorService().getObject());
|
||||
ConcurrentTaskScheduler retVal = new ConcurrentTaskScheduler(
|
||||
scheduledExecutorService().getObject(),
|
||||
scheduledExecutorService().getObject());
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
@ -893,4 +895,10 @@ public class JpaConfig {
|
|||
FhirContext theFhirContext, HibernatePropertiesProvider theHibernatePropertiesProvider) {
|
||||
return new ResourceHistoryCalculator(theFhirContext, theHibernatePropertiesProvider.isOracleDialect());
|
||||
}
|
||||
|
||||
@Bean
|
||||
public CacheTagDefinitionDao tagDefinitionDao(
|
||||
ITagDefinitionDao tagDefinitionDao, MemoryCacheService memoryCacheService) {
|
||||
return new CacheTagDefinitionDao(tagDefinitionDao, memoryCacheService);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,6 +29,7 @@ import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
|||
import ca.uhn.fhir.mdm.dao.IMdmLinkDao;
|
||||
import ca.uhn.fhir.mdm.dao.IMdmLinkImplFactory;
|
||||
import ca.uhn.fhir.mdm.svc.MdmLinkExpandSvc;
|
||||
import ca.uhn.fhir.mdm.svc.MdmSearchExpansionSvc;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
|
@ -40,6 +41,11 @@ public class MdmJpaConfig {
|
|||
return new MdmLinkExpandSvc();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public MdmSearchExpansionSvc mdmSearchExpansionSvc() {
|
||||
return new MdmSearchExpansionSvc();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public IMdmLinkDao<JpaPid, MdmLink> mdmLinkDao() {
|
||||
return new MdmLinkDaoJpaImpl();
|
||||
|
|
|
@ -75,7 +75,6 @@ import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher;
|
|||
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.jpa.util.AddRemoveCount;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.jpa.util.QueryChunker;
|
||||
import ca.uhn.fhir.model.api.IResource;
|
||||
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
|
||||
|
@ -89,7 +88,6 @@ import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
|
|||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
|
@ -107,14 +105,8 @@ import jakarta.annotation.Nonnull;
|
|||
import jakarta.annotation.Nullable;
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import jakarta.persistence.EntityManager;
|
||||
import jakarta.persistence.NoResultException;
|
||||
import jakarta.persistence.PersistenceContext;
|
||||
import jakarta.persistence.PersistenceContextType;
|
||||
import jakarta.persistence.TypedQuery;
|
||||
import jakarta.persistence.criteria.CriteriaBuilder;
|
||||
import jakarta.persistence.criteria.CriteriaQuery;
|
||||
import jakarta.persistence.criteria.Predicate;
|
||||
import jakarta.persistence.criteria.Root;
|
||||
import org.apache.commons.lang3.NotImplementedException;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
@ -136,19 +128,11 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.stereotype.Repository;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.support.TransactionCallback;
|
||||
import org.springframework.transaction.support.TransactionSynchronization;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.List;
|
||||
|
@ -158,7 +142,6 @@ import java.util.stream.Collectors;
|
|||
import javax.xml.stream.events.Characters;
|
||||
import javax.xml.stream.events.XMLEvent;
|
||||
|
||||
import static java.util.Objects.isNull;
|
||||
import static java.util.Objects.nonNull;
|
||||
import static org.apache.commons.collections4.CollectionUtils.isEqualCollection;
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
|
@ -182,8 +165,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
public static final long INDEX_STATUS_INDEXED = 1L;
|
||||
public static final long INDEX_STATUS_INDEXING_FAILED = 2L;
|
||||
public static final String NS_JPA_PROFILE = "https://github.com/hapifhir/hapi-fhir/ns/jpa/profile";
|
||||
// total attempts to do a tag transaction
|
||||
private static final int TOTAL_TAG_READ_ATTEMPTS = 10;
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BaseHapiFhirDao.class);
|
||||
private static boolean ourValidationDisabledForUnitTest;
|
||||
private static boolean ourDisableIncrementOnUpdateForUnitTest = false;
|
||||
|
@ -248,17 +229,14 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
@Autowired
|
||||
private IPartitionLookupSvc myPartitionLookupSvc;
|
||||
|
||||
@Autowired
|
||||
private MemoryCacheService myMemoryCacheService;
|
||||
|
||||
@Autowired(required = false)
|
||||
private IFulltextSearchSvc myFulltextSearchSvc;
|
||||
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTransactionManager;
|
||||
protected ResourceHistoryCalculator myResourceHistoryCalculator;
|
||||
|
||||
@Autowired
|
||||
protected ResourceHistoryCalculator myResourceHistoryCalculator;
|
||||
protected CacheTagDefinitionDao cacheTagDefinitionDao;
|
||||
|
||||
protected final CodingSpy myCodingSpy = new CodingSpy();
|
||||
|
||||
|
@ -307,7 +285,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
TagList tagList = ResourceMetadataKeyEnum.TAG_LIST.get(theResource);
|
||||
if (tagList != null) {
|
||||
for (Tag next : tagList) {
|
||||
TagDefinition def = getTagOrNull(
|
||||
TagDefinition def = cacheTagDefinitionDao.getTagOrNull(
|
||||
theTransactionDetails,
|
||||
TagTypeEnum.TAG,
|
||||
next.getScheme(),
|
||||
|
@ -326,7 +304,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
List<BaseCodingDt> securityLabels = ResourceMetadataKeyEnum.SECURITY_LABELS.get(theResource);
|
||||
if (securityLabels != null) {
|
||||
for (BaseCodingDt next : securityLabels) {
|
||||
TagDefinition def = getTagOrNull(
|
||||
TagDefinition def = cacheTagDefinitionDao.getTagOrNull(
|
||||
theTransactionDetails,
|
||||
TagTypeEnum.SECURITY_LABEL,
|
||||
next.getSystemElement().getValue(),
|
||||
|
@ -345,7 +323,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
List<IdDt> profiles = ResourceMetadataKeyEnum.PROFILES.get(theResource);
|
||||
if (profiles != null) {
|
||||
for (IIdType next : profiles) {
|
||||
TagDefinition def = getTagOrNull(
|
||||
TagDefinition def = cacheTagDefinitionDao.getTagOrNull(
|
||||
theTransactionDetails, TagTypeEnum.PROFILE, NS_JPA_PROFILE, next.getValue(), null, null, null);
|
||||
if (def != null) {
|
||||
ResourceTag tag = theEntity.addTag(def);
|
||||
|
@ -364,7 +342,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
List<? extends IBaseCoding> tagList = theResource.getMeta().getTag();
|
||||
if (tagList != null) {
|
||||
for (IBaseCoding next : tagList) {
|
||||
TagDefinition def = getTagOrNull(
|
||||
TagDefinition def = cacheTagDefinitionDao.getTagOrNull(
|
||||
theTransactionDetails,
|
||||
TagTypeEnum.TAG,
|
||||
next.getSystem(),
|
||||
|
@ -383,7 +361,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
List<? extends IBaseCoding> securityLabels = theResource.getMeta().getSecurity();
|
||||
if (securityLabels != null) {
|
||||
for (IBaseCoding next : securityLabels) {
|
||||
TagDefinition def = getTagOrNull(
|
||||
TagDefinition def = cacheTagDefinitionDao.getTagOrNull(
|
||||
theTransactionDetails,
|
||||
TagTypeEnum.SECURITY_LABEL,
|
||||
next.getSystem(),
|
||||
|
@ -402,7 +380,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
List<? extends IPrimitiveType<String>> profiles = theResource.getMeta().getProfile();
|
||||
if (profiles != null) {
|
||||
for (IPrimitiveType<String> next : profiles) {
|
||||
TagDefinition def = getTagOrNull(
|
||||
TagDefinition def = cacheTagDefinitionDao.getTagOrNull(
|
||||
theTransactionDetails, TagTypeEnum.PROFILE, NS_JPA_PROFILE, next.getValue(), null, null, null);
|
||||
if (def != null) {
|
||||
ResourceTag tag = theEntity.addTag(def);
|
||||
|
@ -422,7 +400,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
if (!def.isStandardType()) {
|
||||
String profile = def.getResourceProfile("");
|
||||
if (isNotBlank(profile)) {
|
||||
TagDefinition profileDef = getTagOrNull(
|
||||
TagDefinition profileDef = cacheTagDefinitionDao.getTagOrNull(
|
||||
theTransactionDetails, TagTypeEnum.PROFILE, NS_JPA_PROFILE, profile, null, null, null);
|
||||
|
||||
ResourceTag tag = theEntity.addTag(profileDef);
|
||||
|
@ -456,164 +434,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
myContext = theContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* <code>null</code> will only be returned if the scheme and tag are both blank
|
||||
*/
|
||||
protected TagDefinition getTagOrNull(
|
||||
TransactionDetails theTransactionDetails,
|
||||
TagTypeEnum theTagType,
|
||||
String theScheme,
|
||||
String theTerm,
|
||||
String theLabel,
|
||||
String theVersion,
|
||||
Boolean theUserSelected) {
|
||||
if (isBlank(theScheme) && isBlank(theTerm) && isBlank(theLabel)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
MemoryCacheService.TagDefinitionCacheKey key =
|
||||
toTagDefinitionMemoryCacheKey(theTagType, theScheme, theTerm, theVersion, theUserSelected);
|
||||
|
||||
TagDefinition retVal = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.TAG_DEFINITION, key);
|
||||
if (retVal == null) {
|
||||
HashMap<MemoryCacheService.TagDefinitionCacheKey, TagDefinition> resolvedTagDefinitions =
|
||||
theTransactionDetails.getOrCreateUserData(
|
||||
HapiTransactionService.XACT_USERDATA_KEY_RESOLVED_TAG_DEFINITIONS, HashMap::new);
|
||||
|
||||
retVal = resolvedTagDefinitions.get(key);
|
||||
|
||||
if (retVal == null) {
|
||||
// actual DB hit(s) happen here
|
||||
retVal = getOrCreateTag(theTagType, theScheme, theTerm, theLabel, theVersion, theUserSelected);
|
||||
|
||||
TransactionSynchronization sync = new AddTagDefinitionToCacheAfterCommitSynchronization(key, retVal);
|
||||
TransactionSynchronizationManager.registerSynchronization(sync);
|
||||
|
||||
resolvedTagDefinitions.put(key, retVal);
|
||||
}
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the tag defined by the fed in values, or saves it if it does not
|
||||
* exist.
|
||||
* <p>
|
||||
* Can also throw an InternalErrorException if something bad happens.
|
||||
*/
|
||||
private TagDefinition getOrCreateTag(
|
||||
TagTypeEnum theTagType,
|
||||
String theScheme,
|
||||
String theTerm,
|
||||
String theLabel,
|
||||
String theVersion,
|
||||
Boolean theUserSelected) {
|
||||
|
||||
TypedQuery<TagDefinition> q = buildTagQuery(theTagType, theScheme, theTerm, theVersion, theUserSelected);
|
||||
q.setMaxResults(1);
|
||||
|
||||
TransactionTemplate template = new TransactionTemplate(myTransactionManager);
|
||||
template.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
|
||||
|
||||
// this transaction will attempt to get or create the tag,
|
||||
// repeating (on any failure) 10 times.
|
||||
// if it fails more than this, we will throw exceptions
|
||||
TagDefinition retVal;
|
||||
int count = 0;
|
||||
HashSet<Throwable> throwables = new HashSet<>();
|
||||
do {
|
||||
try {
|
||||
retVal = template.execute(new TransactionCallback<TagDefinition>() {
|
||||
|
||||
// do the actual DB call(s) to read and/or write the values
|
||||
private TagDefinition readOrCreate() {
|
||||
TagDefinition val;
|
||||
try {
|
||||
val = q.getSingleResult();
|
||||
} catch (NoResultException e) {
|
||||
val = new TagDefinition(theTagType, theScheme, theTerm, theLabel);
|
||||
val.setVersion(theVersion);
|
||||
val.setUserSelected(theUserSelected);
|
||||
myEntityManager.persist(val);
|
||||
}
|
||||
return val;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TagDefinition doInTransaction(TransactionStatus status) {
|
||||
TagDefinition tag = null;
|
||||
|
||||
try {
|
||||
tag = readOrCreate();
|
||||
} catch (Exception ex) {
|
||||
// log any exceptions - just in case
|
||||
// they may be signs of things to come...
|
||||
ourLog.warn(
|
||||
"Tag read/write failed: "
|
||||
+ ex.getMessage() + ". "
|
||||
+ "This is not a failure on its own, "
|
||||
+ "but could be useful information in the result of an actual failure.",
|
||||
ex);
|
||||
throwables.add(ex);
|
||||
}
|
||||
|
||||
return tag;
|
||||
}
|
||||
});
|
||||
} catch (Exception ex) {
|
||||
// transaction template can fail if connections to db are exhausted and/or timeout
|
||||
ourLog.warn(
|
||||
"Transaction failed with: {}. Transaction will rollback and be reattempted.", ex.getMessage());
|
||||
retVal = null;
|
||||
}
|
||||
count++;
|
||||
} while (retVal == null && count < TOTAL_TAG_READ_ATTEMPTS);
|
||||
|
||||
if (retVal == null) {
|
||||
// if tag is still null,
|
||||
// something bad must be happening
|
||||
// - throw
|
||||
String msg = throwables.stream().map(Throwable::getMessage).collect(Collectors.joining(", "));
|
||||
throw new InternalErrorException(Msg.code(2023)
|
||||
+ "Tag get/create failed after "
|
||||
+ TOTAL_TAG_READ_ATTEMPTS
|
||||
+ " attempts with error(s): "
|
||||
+ msg);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private TypedQuery<TagDefinition> buildTagQuery(
|
||||
TagTypeEnum theTagType, String theScheme, String theTerm, String theVersion, Boolean theUserSelected) {
|
||||
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
|
||||
CriteriaQuery<TagDefinition> cq = builder.createQuery(TagDefinition.class);
|
||||
Root<TagDefinition> from = cq.from(TagDefinition.class);
|
||||
|
||||
List<Predicate> predicates = new ArrayList<>();
|
||||
predicates.add(builder.and(
|
||||
builder.equal(from.get("myTagType"), theTagType), builder.equal(from.get("myCode"), theTerm)));
|
||||
|
||||
predicates.add(
|
||||
isBlank(theScheme)
|
||||
? builder.isNull(from.get("mySystem"))
|
||||
: builder.equal(from.get("mySystem"), theScheme));
|
||||
|
||||
predicates.add(
|
||||
isBlank(theVersion)
|
||||
? builder.isNull(from.get("myVersion"))
|
||||
: builder.equal(from.get("myVersion"), theVersion));
|
||||
|
||||
predicates.add(
|
||||
isNull(theUserSelected)
|
||||
? builder.isNull(from.get("myUserSelected"))
|
||||
: builder.equal(from.get("myUserSelected"), theUserSelected));
|
||||
|
||||
cq.where(predicates.toArray(new Predicate[0]));
|
||||
return myEntityManager.createQuery(cq);
|
||||
}
|
||||
|
||||
void incrementId(T theResource, ResourceTable theSavedEntity, IIdType theResourceId) {
|
||||
if (theResourceId == null || theResourceId.getVersionIdPart() == null) {
|
||||
theSavedEntity.initializeVersion();
|
||||
|
@ -933,7 +753,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
@Override
|
||||
@CoverageIgnore
|
||||
public BaseHasResource readEntity(IIdType theValueId, RequestDetails theRequest) {
|
||||
throw new NotImplementedException(Msg.code(927) + "");
|
||||
throw new NotImplementedException(Msg.code(927));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1200,6 +1020,10 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
if (entity.getId() == null) {
|
||||
myEntityManager.persist(entity);
|
||||
|
||||
if (entity.getFhirId() == null) {
|
||||
entity.setFhirId(Long.toString(entity.getResourceId()));
|
||||
}
|
||||
|
||||
postPersist(entity, (T) theResource, theRequest);
|
||||
|
||||
} else if (entity.getDeleted() != null) {
|
||||
|
@ -1499,7 +1323,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
* the previous version entity.
|
||||
*/
|
||||
if (historyEntry == null) {
|
||||
historyEntry = theEntity.toHistory(versionedTags);
|
||||
historyEntry = theEntity.toHistory(versionedTags && theEntity.getDeleted() == null);
|
||||
}
|
||||
|
||||
historyEntry.setEncoding(theChanged.getEncoding());
|
||||
|
@ -1507,7 +1331,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
historyEntry.setResourceTextVc(theChanged.getResourceText());
|
||||
|
||||
ourLog.debug("Saving history entry ID[{}] for RES_ID[{}]", historyEntry.getId(), historyEntry.getResourceId());
|
||||
myResourceHistoryTableDao.save(historyEntry);
|
||||
myEntityManager.persist(historyEntry);
|
||||
theEntity.setCurrentVersionEntity(historyEntry);
|
||||
|
||||
// Save resource source
|
||||
|
@ -1665,6 +1489,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
wasDeleted = theOldResource.isDeleted();
|
||||
}
|
||||
|
||||
if (wasDeleted && !myStorageSettings.isDeleteEnabled()) {
|
||||
String msg = myContext.getLocalizer().getMessage(BaseHapiFhirDao.class, "cantUndeleteWithDeletesDisabled");
|
||||
throw new InvalidRequestException(Msg.code(2573) + msg);
|
||||
}
|
||||
|
||||
DaoMethodOutcome outcome = toMethodOutcome(
|
||||
theRequestDetails, savedEntity, theResource, theMatchUrl, theOperationType)
|
||||
.setCreated(wasDeleted);
|
||||
|
@ -1839,9 +1668,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
}
|
||||
|
||||
@PostConstruct
|
||||
public void start() {
|
||||
// nothing yet
|
||||
}
|
||||
public void start() {}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setStorageSettingsForUnitTest(JpaStorageSettings theStorageSettings) {
|
||||
|
@ -1880,30 +1707,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
myJpaStorageResourceParser = theJpaStorageResourceParser;
|
||||
}
|
||||
|
||||
private class AddTagDefinitionToCacheAfterCommitSynchronization implements TransactionSynchronization {
|
||||
|
||||
private final TagDefinition myTagDefinition;
|
||||
private final MemoryCacheService.TagDefinitionCacheKey myKey;
|
||||
|
||||
public AddTagDefinitionToCacheAfterCommitSynchronization(
|
||||
MemoryCacheService.TagDefinitionCacheKey theKey, TagDefinition theTagDefinition) {
|
||||
myTagDefinition = theTagDefinition;
|
||||
myKey = theKey;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterCommit() {
|
||||
myMemoryCacheService.put(MemoryCacheService.CacheEnum.TAG_DEFINITION, myKey, myTagDefinition);
|
||||
}
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static MemoryCacheService.TagDefinitionCacheKey toTagDefinitionMemoryCacheKey(
|
||||
TagTypeEnum theTagType, String theScheme, String theTerm, String theVersion, Boolean theUserSelected) {
|
||||
return new MemoryCacheService.TagDefinitionCacheKey(
|
||||
theTagType, theScheme, theTerm, theVersion, theUserSelected);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static String parseContentTextIntoWords(FhirContext theContext, IBaseResource theResource) {
|
||||
|
||||
|
|
|
@ -43,6 +43,7 @@ import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
|||
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
|
||||
import ca.uhn.fhir.jpa.api.model.LazyDaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
|
||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.delete.DeleteConflictUtil;
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
|
@ -567,8 +568,11 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
// Pre-cache the resource ID
|
||||
jpaPid.setAssociatedResourceId(entity.getIdType(myFhirContext));
|
||||
myIdHelperService.addResolvedPidToForcedId(
|
||||
jpaPid, theRequestPartitionId, getResourceName(), entity.getFhirId(), null);
|
||||
String fhirId = entity.getFhirId();
|
||||
if (fhirId == null) {
|
||||
fhirId = Long.toString(entity.getId());
|
||||
}
|
||||
myIdHelperService.addResolvedPidToFhirId(jpaPid, theRequestPartitionId, getResourceName(), fhirId, null);
|
||||
theTransactionDetails.addResolvedResourceId(jpaPid.getAssociatedResourceId(), jpaPid);
|
||||
theTransactionDetails.addResolvedResource(jpaPid.getAssociatedResourceId(), theResource);
|
||||
|
||||
|
@ -1043,7 +1047,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
if (!entityHasTag) {
|
||||
theEntity.setHasTags(true);
|
||||
|
||||
TagDefinition def = getTagOrNull(
|
||||
TagDefinition def = cacheTagDefinitionDao.getTagOrNull(
|
||||
theTransactionDetails,
|
||||
nextDef.getTagType(),
|
||||
nextDef.getSystem(),
|
||||
|
@ -1736,8 +1740,13 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
validateResourceTypeAndThrowInvalidRequestException(theId);
|
||||
|
||||
BaseHasResource entity;
|
||||
JpaPid pid = myIdHelperService.resolveResourcePersistentIds(
|
||||
requestPartitionId, getResourceName(), theId.getIdPart());
|
||||
JpaPid pid = myIdHelperService
|
||||
.resolveResourceIdentity(
|
||||
requestPartitionId,
|
||||
getResourceName(),
|
||||
theId.getIdPart(),
|
||||
ResolveIdentityMode.includeDeleted().cacheOk())
|
||||
.getPersistentId();
|
||||
Set<Integer> readPartitions = null;
|
||||
if (requestPartitionId.isAllPartitions()) {
|
||||
entity = myEntityManager.find(ResourceTable.class, pid.getId());
|
||||
|
@ -1779,10 +1788,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
}
|
||||
|
||||
if (entity == null) {
|
||||
throw new ResourceNotFoundException(Msg.code(1996) + "Resource " + theId + " is not known");
|
||||
}
|
||||
|
||||
if (theId.hasVersionIdPart()) {
|
||||
if (!theId.isVersionIdPartValidLong()) {
|
||||
throw new ResourceNotFoundException(Msg.code(978)
|
||||
|
@ -1822,7 +1827,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
}
|
||||
|
||||
Validate.notNull(entity);
|
||||
if (entity == null) {
|
||||
throw new ResourceNotFoundException(Msg.code(1996) + "Resource " + theId + " is not known");
|
||||
}
|
||||
|
||||
validateResourceType(entity);
|
||||
|
||||
if (theCheckForForcedId) {
|
||||
|
@ -1871,8 +1879,27 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
|
||||
if (persistentId == null) {
|
||||
persistentId = myIdHelperService.resolveResourcePersistentIds(
|
||||
theRequestPartitionId, getResourceName(), theId.getIdPart());
|
||||
String resourceName = getResourceName();
|
||||
if (myStorageSettings.getResourceClientIdStrategy()
|
||||
== JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC) {
|
||||
if (theId.isIdPartValidLong()) {
|
||||
/*
|
||||
* If it's a pure numeric ID and we are in ALPHANUMERIC mode, then the number
|
||||
* corresponds to a DB PID. In this case we want to resolve it regardless of
|
||||
* which type the client has supplied. This is because DB PIDs are unique across
|
||||
* all resource types (unlike FHIR_IDs which are namespaced to the resource type).
|
||||
* We want to load the resource with that PID regardless of type because if
|
||||
* the user is trying to update it we want to fail if the type is wrong, as
|
||||
* opposed to trying to create a new instance.
|
||||
*/
|
||||
resourceName = null;
|
||||
}
|
||||
}
|
||||
persistentId = myIdHelperService.resolveResourceIdentityPid(
|
||||
theRequestPartitionId,
|
||||
resourceName,
|
||||
theId.getIdPart(),
|
||||
ResolveIdentityMode.includeDeleted().cacheOk());
|
||||
}
|
||||
|
||||
ResourceTable entity = myEntityManager.find(ResourceTable.class, persistentId.getId());
|
||||
|
|
|
@ -192,7 +192,7 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
|
|||
HapiTransactionService.requireTransaction();
|
||||
List<Long> pids = theResolvedIds.stream().map(t -> ((JpaPid) t).getId()).collect(Collectors.toList());
|
||||
|
||||
new QueryChunker<Long>().chunk(pids, idChunk -> {
|
||||
QueryChunker.chunk(pids, idChunk -> {
|
||||
|
||||
/*
|
||||
* Pre-fetch the resources we're touching in this transaction in mass - this reduced the
|
||||
|
|
|
@ -0,0 +1,132 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.data.ITagDefinitionDao;
|
||||
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
|
||||
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.stereotype.Repository;
|
||||
import org.springframework.transaction.support.TransactionSynchronization;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
|
||||
@Repository
|
||||
public class CacheTagDefinitionDao {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(CacheTagDefinitionDao.class);
|
||||
|
||||
private final ITagDefinitionDao tagDefinitionDao;
|
||||
private final MemoryCacheService memoryCacheService;
|
||||
|
||||
public CacheTagDefinitionDao(ITagDefinitionDao tagDefinitionDao, MemoryCacheService memoryCacheService) {
|
||||
this.tagDefinitionDao = tagDefinitionDao;
|
||||
this.memoryCacheService = memoryCacheService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a TagDefinition or null if the scheme, term, and label are all blank.
|
||||
*/
|
||||
protected TagDefinition getTagOrNull(
|
||||
TransactionDetails transactionDetails,
|
||||
TagTypeEnum tagType,
|
||||
String scheme,
|
||||
String term,
|
||||
String label,
|
||||
String version,
|
||||
Boolean userSelected) {
|
||||
|
||||
if (isBlank(scheme) && isBlank(term) && isBlank(label)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
MemoryCacheService.TagDefinitionCacheKey key =
|
||||
toTagDefinitionMemoryCacheKey(tagType, scheme, term, version, userSelected);
|
||||
TagDefinition tagDefinition = memoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.TAG_DEFINITION, key);
|
||||
|
||||
if (tagDefinition == null) {
|
||||
HashMap<MemoryCacheService.TagDefinitionCacheKey, TagDefinition> resolvedTagDefinitions =
|
||||
transactionDetails.getOrCreateUserData("resolvedTagDefinitions", HashMap::new);
|
||||
|
||||
tagDefinition = resolvedTagDefinitions.get(key);
|
||||
|
||||
if (tagDefinition == null) {
|
||||
tagDefinition = getOrCreateTag(tagType, scheme, term, label, version, userSelected);
|
||||
|
||||
TransactionSynchronization sync =
|
||||
new AddTagDefinitionToCacheAfterCommitSynchronization(key, tagDefinition);
|
||||
TransactionSynchronizationManager.registerSynchronization(sync);
|
||||
|
||||
resolvedTagDefinitions.put(key, tagDefinition);
|
||||
}
|
||||
}
|
||||
|
||||
return tagDefinition;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets or creates a TagDefinition entity.
|
||||
*/
|
||||
private TagDefinition getOrCreateTag(
|
||||
TagTypeEnum tagType, String scheme, String term, String label, String version, Boolean userSelected) {
|
||||
List<TagDefinition> result = tagDefinitionDao.findByTagTypeAndSchemeAndTermAndVersionAndUserSelected(
|
||||
tagType, scheme, term, version, userSelected, Pageable.ofSize(1));
|
||||
|
||||
if (!result.isEmpty()) {
|
||||
return result.get(0);
|
||||
} else {
|
||||
// Create a new TagDefinition if no result is found
|
||||
TagDefinition newTag = new TagDefinition(tagType, scheme, term, label);
|
||||
newTag.setVersion(version);
|
||||
newTag.setUserSelected(userSelected);
|
||||
return tagDefinitionDao.save(newTag);
|
||||
}
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private static MemoryCacheService.TagDefinitionCacheKey toTagDefinitionMemoryCacheKey(
|
||||
TagTypeEnum tagType, String scheme, String term, String version, Boolean userSelected) {
|
||||
return new MemoryCacheService.TagDefinitionCacheKey(tagType, scheme, term, version, userSelected);
|
||||
}
|
||||
|
||||
private class AddTagDefinitionToCacheAfterCommitSynchronization implements TransactionSynchronization {
|
||||
private final TagDefinition tagDefinition;
|
||||
private final MemoryCacheService.TagDefinitionCacheKey key;
|
||||
|
||||
public AddTagDefinitionToCacheAfterCommitSynchronization(
|
||||
MemoryCacheService.TagDefinitionCacheKey key, TagDefinition tagDefinition) {
|
||||
this.tagDefinition = tagDefinition;
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterCommit() {
|
||||
memoryCacheService.put(MemoryCacheService.CacheEnum.TAG_DEFINITION, key, tagDefinition);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -137,7 +137,8 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
public ExtendedHSearchIndexData extractLuceneIndexData(
|
||||
IBaseResource theResource, ResourceIndexedSearchParams theNewParams) {
|
||||
String resourceType = myFhirContext.getResourceType(theResource);
|
||||
ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams(resourceType);
|
||||
ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams(
|
||||
resourceType, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
ExtendedHSearchIndexExtractor extractor = new ExtendedHSearchIndexExtractor(
|
||||
myStorageSettings, myFhirContext, activeSearchParams, mySearchParamExtractor);
|
||||
return extractor.extract(theResource, theNewParams);
|
||||
|
|
|
@ -81,7 +81,7 @@ public class HistoryBuilder {
|
|||
private FhirContext myCtx;
|
||||
|
||||
@Autowired
|
||||
private IIdHelperService myIdHelperService;
|
||||
private IIdHelperService<JpaPid> myIdHelperService;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
|
@ -150,13 +150,13 @@ public class HistoryBuilder {
|
|||
query.setMaxResults(theToIndex - theFromIndex);
|
||||
|
||||
List<ResourceHistoryTable> tables = query.getResultList();
|
||||
if (tables.size() > 0) {
|
||||
if (!tables.isEmpty()) {
|
||||
ImmutableListMultimap<Long, ResourceHistoryTable> resourceIdToHistoryEntries =
|
||||
Multimaps.index(tables, ResourceHistoryTable::getResourceId);
|
||||
Set<JpaPid> pids = resourceIdToHistoryEntries.keySet().stream()
|
||||
.map(JpaPid::fromId)
|
||||
.collect(Collectors.toSet());
|
||||
PersistentIdToForcedIdMap pidToForcedId = myIdHelperService.translatePidsToForcedIds(pids);
|
||||
PersistentIdToForcedIdMap<JpaPid> pidToForcedId = myIdHelperService.translatePidsToForcedIds(pids);
|
||||
ourLog.trace("Translated IDs: {}", pidToForcedId.getResourcePersistentIdOptionalMap());
|
||||
|
||||
for (Long nextResourceId : resourceIdToHistoryEntries.keySet()) {
|
||||
|
@ -172,8 +172,9 @@ public class HistoryBuilder {
|
|||
// For that reason, strip the prefix before setting the transientForcedId below.
|
||||
// If not stripped this messes up the id of the resource as the resourceType would be repeated
|
||||
// twice like Patient/Patient/1234 in the resource constructed
|
||||
if (resourceId.startsWith(myResourceType + "/")) {
|
||||
resourceId = resourceId.substring(myResourceType.length() + 1);
|
||||
int slashIdx = resourceId.indexOf('/');
|
||||
if (slashIdx != -1) {
|
||||
resourceId = resourceId.substring(slashIdx + 1);
|
||||
}
|
||||
} else {
|
||||
resourceId = nextResourceId.toString();
|
||||
|
|
|
@ -242,13 +242,15 @@ public class JpaResourceDaoCodeSystem<T extends IBaseResource> extends BaseHapiF
|
|||
theTransactionDetails,
|
||||
theForceUpdate,
|
||||
theCreateNewHistoryEntry);
|
||||
if (!retVal.isUnchangedInCurrentOperation()) {
|
||||
if (thePerformIndexing) {
|
||||
if (!retVal.isUnchangedInCurrentOperation()) {
|
||||
|
||||
org.hl7.fhir.r4.model.CodeSystem cs = myVersionCanonicalizer.codeSystemToCanonical(theResource);
|
||||
addPidToResource(theEntity, cs);
|
||||
org.hl7.fhir.r4.model.CodeSystem cs = myVersionCanonicalizer.codeSystemToCanonical(theResource);
|
||||
addPidToResource(theEntity, cs);
|
||||
|
||||
myTerminologyCodeSystemStorageSvc.storeNewCodeSystemVersionIfNeeded(
|
||||
cs, (ResourceTable) theEntity, theRequest);
|
||||
myTerminologyCodeSystemStorageSvc.storeNewCodeSystemVersionIfNeeded(
|
||||
cs, (ResourceTable) theEntity, theRequest);
|
||||
}
|
||||
}
|
||||
|
||||
return retVal;
|
||||
|
|
|
@ -22,6 +22,8 @@ package ca.uhn.fhir.jpa.dao;
|
|||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoObservation;
|
||||
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
|
||||
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
|
@ -39,12 +41,15 @@ import jakarta.persistence.PersistenceContext;
|
|||
import jakarta.persistence.PersistenceContextType;
|
||||
import jakarta.servlet.http.HttpServletResponse;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
||||
public class JpaResourceDaoObservation<T extends IBaseResource> extends BaseHapiFhirResourceDao<T>
|
||||
|
@ -138,13 +143,14 @@ public class JpaResourceDaoObservation<T extends IBaseResource> extends BaseHapi
|
|||
patientParams.addAll(theSearchParameterMap.get(getSubjectParamName()));
|
||||
}
|
||||
|
||||
Map<IIdType, ReferenceParam> ids = new HashMap<>();
|
||||
for (List<? extends IQueryParameterType> nextPatientList : patientParams) {
|
||||
for (IQueryParameterType nextOr : nextPatientList) {
|
||||
if (nextOr instanceof ReferenceParam) {
|
||||
ReferenceParam ref = (ReferenceParam) nextOr;
|
||||
JpaPid pid = myIdHelperService.resolveResourcePersistentIds(
|
||||
requestPartitionId, ref.getResourceType(), ref.getIdPart());
|
||||
orderedSubjectReferenceMap.put(pid.getId(), nextOr);
|
||||
IIdType id = myFhirContext.getVersion().newIdType();
|
||||
id.setParts(null, ref.getResourceType(), ref.getIdPart(), null);
|
||||
ids.put(id, ref);
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
Msg.code(942) + "Invalid token type (expecting ReferenceParam): " + nextOr.getClass());
|
||||
|
@ -152,6 +158,15 @@ public class JpaResourceDaoObservation<T extends IBaseResource> extends BaseHapi
|
|||
}
|
||||
}
|
||||
|
||||
Map<IIdType, IResourceLookup<JpaPid>> resolvedIds = myIdHelperService.resolveResourceIdentities(
|
||||
requestPartitionId,
|
||||
ids.keySet(),
|
||||
ResolveIdentityMode.includeDeleted().cacheOk());
|
||||
for (Map.Entry<IIdType, ReferenceParam> entry : ids.entrySet()) {
|
||||
IResourceLookup<JpaPid> lookup = resolvedIds.get(entry.getKey());
|
||||
orderedSubjectReferenceMap.put(lookup.getPersistentId().getId(), entry.getValue());
|
||||
}
|
||||
|
||||
theSearchParameterMap.remove(getSubjectParamName());
|
||||
theSearchParameterMap.remove(getPatientParamName());
|
||||
|
||||
|
|
|
@ -303,12 +303,14 @@ public class JpaResourceDaoValueSet<T extends IBaseResource> extends BaseHapiFhi
|
|||
theForceUpdate,
|
||||
theCreateNewHistoryEntry);
|
||||
|
||||
if (getStorageSettings().isPreExpandValueSets() && !retVal.isUnchangedInCurrentOperation()) {
|
||||
if (retVal.getDeleted() == null) {
|
||||
ValueSet valueSet = myVersionCanonicalizer.valueSetToCanonical(theResource);
|
||||
myTerminologySvc.storeTermValueSet(retVal, valueSet);
|
||||
} else {
|
||||
myTerminologySvc.deleteValueSetAndChildren(retVal);
|
||||
if (thePerformIndexing) {
|
||||
if (getStorageSettings().isPreExpandValueSets() && !retVal.isUnchangedInCurrentOperation()) {
|
||||
if (retVal.getDeleted() == null) {
|
||||
ValueSet valueSet = myVersionCanonicalizer.valueSetToCanonical(theResource);
|
||||
myTerminologySvc.storeTermValueSet(retVal, valueSet);
|
||||
} else {
|
||||
myTerminologySvc.deleteValueSetAndChildren(retVal);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -26,25 +26,29 @@ import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
|||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
|
||||
import ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect;
|
||||
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
|
||||
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.util.QueryChunker;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.util.FhirTerser;
|
||||
import ca.uhn.fhir.util.ResourceReferenceInfo;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import ca.uhn.fhir.util.TaskChunker;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.persistence.EntityManager;
|
||||
import jakarta.persistence.FlushModeType;
|
||||
import jakarta.persistence.PersistenceContext;
|
||||
import jakarta.persistence.PersistenceContextType;
|
||||
import jakarta.persistence.PersistenceException;
|
||||
|
@ -67,6 +71,7 @@ import org.springframework.context.ApplicationContext;
|
|||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.List;
|
||||
|
@ -83,6 +88,7 @@ public class TransactionProcessor extends BaseTransactionProcessor {
|
|||
|
||||
public static final Pattern SINGLE_PARAMETER_MATCH_URL_PATTERN = Pattern.compile("^[^?]+[?][a-z0-9-]+=[^&,]+$");
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(TransactionProcessor.class);
|
||||
public static final int CONDITIONAL_URL_FETCH_CHUNK_SIZE = 100;
|
||||
|
||||
@Autowired
|
||||
private ApplicationContext myApplicationContext;
|
||||
|
@ -146,25 +152,51 @@ public class TransactionProcessor extends BaseTransactionProcessor {
|
|||
List<IBase> theEntries,
|
||||
StopWatch theTransactionStopWatch) {
|
||||
|
||||
ITransactionProcessorVersionAdapter<?, ?> versionAdapter = getVersionAdapter();
|
||||
RequestPartitionId requestPartitionId =
|
||||
super.determineRequestPartitionIdForWriteEntries(theRequest, theEntries);
|
||||
/*
|
||||
* We temporarily set the flush mode for the duration of the DB transaction
|
||||
* from the default of AUTO to the temporary value of COMMIT here. We do this
|
||||
* because in AUTO mode, if any SQL SELECTs are required during the
|
||||
* processing of an individual transaction entry, the server will flush the
|
||||
* pending INSERTs/UPDATEs to the database before executing the SELECT.
|
||||
* This hurts performance since we don't get the benefit of batching those
|
||||
* write operations as much as possible. The tradeoff here is that we
|
||||
* could theoretically have transaction operations which try to read
|
||||
* data previously written in the same transaction, and they won't see it.
|
||||
* This shouldn't actually be an issue anyhow - we pre-fetch conditional
|
||||
* URLs and reference targets at the start of the transaction. But this
|
||||
* tradeoff still feels worth it, since the most common use of transactions
|
||||
* is for fast writing of data.
|
||||
*
|
||||
* Note that it's probably not necessary to reset it back, it should
|
||||
* automatically go back to the default value after the transaction but
|
||||
* we reset it just to be safe.
|
||||
*/
|
||||
FlushModeType initialFlushMode = myEntityManager.getFlushMode();
|
||||
try {
|
||||
myEntityManager.setFlushMode(FlushModeType.COMMIT);
|
||||
|
||||
if (requestPartitionId != null) {
|
||||
preFetch(theTransactionDetails, theEntries, versionAdapter, requestPartitionId);
|
||||
ITransactionProcessorVersionAdapter<?, ?> versionAdapter = getVersionAdapter();
|
||||
RequestPartitionId requestPartitionId =
|
||||
super.determineRequestPartitionIdForWriteEntries(theRequest, theEntries);
|
||||
|
||||
if (requestPartitionId != null) {
|
||||
preFetch(theTransactionDetails, theEntries, versionAdapter, requestPartitionId);
|
||||
}
|
||||
|
||||
return super.doTransactionWriteOperations(
|
||||
theRequest,
|
||||
theActionName,
|
||||
theTransactionDetails,
|
||||
theAllIds,
|
||||
theIdSubstitutions,
|
||||
theIdToPersistedOutcome,
|
||||
theResponse,
|
||||
theOriginalRequestOrder,
|
||||
theEntries,
|
||||
theTransactionStopWatch);
|
||||
} finally {
|
||||
myEntityManager.setFlushMode(initialFlushMode);
|
||||
}
|
||||
|
||||
return super.doTransactionWriteOperations(
|
||||
theRequest,
|
||||
theActionName,
|
||||
theTransactionDetails,
|
||||
theAllIds,
|
||||
theIdSubstitutions,
|
||||
theIdToPersistedOutcome,
|
||||
theResponse,
|
||||
theOriginalRequestOrder,
|
||||
theEntries,
|
||||
theTransactionStopWatch);
|
||||
}
|
||||
|
||||
private void preFetch(
|
||||
|
@ -199,40 +231,100 @@ public class TransactionProcessor extends BaseTransactionProcessor {
|
|||
RequestPartitionId theRequestPartitionId,
|
||||
Set<String> foundIds,
|
||||
List<Long> idsToPreFetch) {
|
||||
List<IIdType> idsToPreResolve = new ArrayList<>();
|
||||
|
||||
FhirTerser terser = myFhirContext.newTerser();
|
||||
|
||||
// Key: The ID of the resource
|
||||
// Value: TRUE if we should prefetch the existing resource details and all stored indexes,
|
||||
// FALSE if we should prefetch only the identity (resource ID and deleted status)
|
||||
Map<IIdType, Boolean> idsToPreResolve = new HashMap<>(theEntries.size() * 3);
|
||||
|
||||
for (IBase nextEntry : theEntries) {
|
||||
IBaseResource resource = theVersionAdapter.getResource(nextEntry);
|
||||
if (resource != null) {
|
||||
String verb = theVersionAdapter.getEntryRequestVerb(myFhirContext, nextEntry);
|
||||
|
||||
/*
|
||||
* Pre-fetch any resources that are potentially being directly updated by ID
|
||||
*/
|
||||
if ("PUT".equals(verb) || "PATCH".equals(verb)) {
|
||||
String requestUrl = theVersionAdapter.getEntryRequestUrl(nextEntry);
|
||||
if (countMatches(requestUrl, '/') == 1 && countMatches(requestUrl, '?') == 0) {
|
||||
if (countMatches(requestUrl, '?') == 0) {
|
||||
IIdType id = myFhirContext.getVersion().newIdType();
|
||||
id.setValue(requestUrl);
|
||||
idsToPreResolve.add(id);
|
||||
IIdType unqualifiedVersionless = id.toUnqualifiedVersionless();
|
||||
idsToPreResolve.put(unqualifiedVersionless, Boolean.TRUE);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Pre-fetch any resources that are referred to directly by ID (don't replace
|
||||
* the TRUE flag with FALSE in case we're updating a resource but also
|
||||
* pointing to that resource elsewhere in the bundle)
|
||||
*/
|
||||
if ("PUT".equals(verb) || "POST".equals(verb)) {
|
||||
for (ResourceReferenceInfo referenceInfo : terser.getAllResourceReferences(resource)) {
|
||||
IIdType reference = referenceInfo.getResourceReference().getReferenceElement();
|
||||
if (reference != null
|
||||
&& !reference.isLocal()
|
||||
&& !reference.isUuid()
|
||||
&& reference.hasResourceType()
|
||||
&& reference.hasIdPart()
|
||||
&& !reference.getValue().contains("?")) {
|
||||
idsToPreResolve.putIfAbsent(reference.toUnqualifiedVersionless(), Boolean.FALSE);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
List<JpaPid> outcome =
|
||||
myIdHelperService.resolveResourcePersistentIdsWithCache(theRequestPartitionId, idsToPreResolve).stream()
|
||||
.collect(Collectors.toList());
|
||||
for (JpaPid next : outcome) {
|
||||
foundIds.add(
|
||||
next.getAssociatedResourceId().toUnqualifiedVersionless().getValue());
|
||||
theTransactionDetails.addResolvedResourceId(next.getAssociatedResourceId(), next);
|
||||
if (myStorageSettings.getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY
|
||||
|| !next.getAssociatedResourceId().isIdPartValidLong()) {
|
||||
idsToPreFetch.add(next.getId());
|
||||
|
||||
/*
|
||||
* If all the entries in the pre-fetch ID map have a value of TRUE, this
|
||||
* means we only have IDs associated with resources we're going to directly
|
||||
* update/patch within the transaction. In that case, it's fine to include
|
||||
* deleted resources, since updating them will bring them back to life.
|
||||
*
|
||||
* If we have any FALSE entries, we're also pre-fetching reference targets
|
||||
* which means we don't want deleted resources, because those are not OK
|
||||
* to reference.
|
||||
*/
|
||||
boolean preFetchIncludesReferences = idsToPreResolve.values().stream().anyMatch(t -> !t);
|
||||
ResolveIdentityMode resolveMode = preFetchIncludesReferences
|
||||
? ResolveIdentityMode.excludeDeleted().noCacheUnlessDeletesDisabled()
|
||||
: ResolveIdentityMode.includeDeleted().cacheOk();
|
||||
|
||||
Map<IIdType, IResourceLookup<JpaPid>> outcomes = myIdHelperService.resolveResourceIdentities(
|
||||
theRequestPartitionId, idsToPreResolve.keySet(), resolveMode);
|
||||
for (Map.Entry<IIdType, IResourceLookup<JpaPid>> entry : outcomes.entrySet()) {
|
||||
JpaPid next = (JpaPid) entry.getValue().getPersistentId();
|
||||
IIdType unqualifiedVersionlessId = entry.getKey();
|
||||
foundIds.add(unqualifiedVersionlessId.getValue());
|
||||
theTransactionDetails.addResolvedResourceId(unqualifiedVersionlessId, next);
|
||||
if (idsToPreResolve.get(unqualifiedVersionlessId) == Boolean.TRUE) {
|
||||
if (myStorageSettings.getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY
|
||||
|| (next.getAssociatedResourceId() != null
|
||||
&& !next.getAssociatedResourceId().isIdPartValidLong())) {
|
||||
idsToPreFetch.add(next.getId());
|
||||
}
|
||||
}
|
||||
}
|
||||
for (IIdType next : idsToPreResolve) {
|
||||
if (!foundIds.contains(next.toUnqualifiedVersionless().getValue())) {
|
||||
|
||||
// Any IDs that could not be resolved are presumably not there, so
|
||||
// cache that fact so we don't look again later
|
||||
for (IIdType next : idsToPreResolve.keySet()) {
|
||||
if (!foundIds.contains(next.getValue())) {
|
||||
theTransactionDetails.addResolvedResourceId(next.toUnqualifiedVersionless(), null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void handleVerbChangeInTransactionWriteOperations() {
|
||||
super.handleVerbChangeInTransactionWriteOperations();
|
||||
|
||||
myEntityManager.flush();
|
||||
}
|
||||
|
||||
private void preFetchConditionalUrls(
|
||||
TransactionDetails theTransactionDetails,
|
||||
List<IBase> theEntries,
|
||||
|
@ -274,12 +366,10 @@ public class TransactionProcessor extends BaseTransactionProcessor {
|
|||
}
|
||||
}
|
||||
|
||||
new QueryChunker<MatchUrlToResolve>()
|
||||
.chunk(
|
||||
searchParameterMapsToResolve,
|
||||
100,
|
||||
map -> preFetchSearchParameterMaps(
|
||||
theTransactionDetails, theRequestPartitionId, map, idsToPreFetch));
|
||||
TaskChunker.chunk(
|
||||
searchParameterMapsToResolve,
|
||||
CONDITIONAL_URL_FETCH_CHUNK_SIZE,
|
||||
map -> preFetchSearchParameterMaps(theTransactionDetails, theRequestPartitionId, map, idsToPreFetch));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.springframework.data.repository.history.RevisionRepository;
|
|||
import org.springframework.data.repository.query.Param;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
@ -122,6 +123,20 @@ public interface IMdmLinkJpaRepository
|
|||
List<MdmPidTuple> expandPidsByGoldenResourcePidAndMatchResult(
|
||||
@Param("goldenPid") Long theSourcePid, @Param("matchResult") MdmMatchResultEnum theMdmMatchResultEnum);
|
||||
|
||||
@Query(
|
||||
"SELECT lookup_link.myGoldenResourcePid as goldenPid, gld_rt.myPartitionIdValue as goldenPartitionId, lookup_link.mySourcePid as sourcePid, lookup_link.myPartitionIdValue as sourcePartitionId "
|
||||
+ "FROM MdmLink lookup_link "
|
||||
+ "INNER JOIN ResourceTable gld_rt "
|
||||
+ "on lookup_link.myGoldenResourcePid=gld_rt.myId "
|
||||
+ "WHERE "
|
||||
+ " (lookup_link.myGoldenResourcePid IN (:pids) "
|
||||
+ " OR"
|
||||
+ " lookup_link.mySourcePid IN (:pids))"
|
||||
+ "AND lookup_link.myMatchResult = :matchResult")
|
||||
List<MdmPidTuple> expandPidsByGoldenResourcePidsOrSourcePidsAndMatchResult(
|
||||
@Param("pids") Collection<Long> theSourcePid,
|
||||
@Param("matchResult") MdmMatchResultEnum theMdmMatchResultEnum);
|
||||
|
||||
@Query(
|
||||
"SELECT ml.myId FROM MdmLink ml WHERE ml.myMdmSourceType = :resourceName AND ml.myCreated <= :highThreshold ORDER BY ml.myCreated DESC")
|
||||
List<Long> findPidByResourceNameAndThreshold(
|
||||
|
|
|
@ -20,8 +20,25 @@
|
|||
package ca.uhn.fhir.jpa.dao.data;
|
||||
|
||||
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
|
||||
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public interface ITagDefinitionDao extends JpaRepository<TagDefinition, Long>, IHapiFhirJpaRepository {
|
||||
// nothing
|
||||
@Query("SELECT t FROM TagDefinition t WHERE " + "t.myTagType = :tagType AND "
|
||||
+ "( :scheme IS NULL OR :scheme = '' OR t.mySystem = :scheme ) AND "
|
||||
+ "t.myCode = :term AND "
|
||||
+ "( :version IS NULL OR :version = '' OR t.myVersion = :version ) AND "
|
||||
+ "( :userSelected IS NULL OR t.myUserSelected = :userSelected )")
|
||||
List<TagDefinition> findByTagTypeAndSchemeAndTermAndVersionAndUserSelected(
|
||||
@Param("tagType") TagTypeEnum tagType,
|
||||
@Param("scheme") String scheme,
|
||||
@Param("term") String term,
|
||||
@Param("version") String version,
|
||||
@Param("userSelected") Boolean userSelected,
|
||||
Pageable pageable);
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
|||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.model.PersistentIdToForcedIdMap;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
|
||||
|
@ -35,11 +36,12 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
|||
import ca.uhn.fhir.jpa.search.builder.SearchBuilder;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.jpa.util.QueryChunker;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.server.storage.BaseResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.util.TaskChunker;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import com.google.common.collect.MultimapBuilder;
|
||||
|
@ -60,11 +62,12 @@ import org.hl7.fhir.instance.model.api.IAnyResource;
|
|||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
@ -79,6 +82,7 @@ import java.util.Set;
|
|||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.jpa.search.builder.predicate.BaseJoiningPredicateBuilder.replaceDefaultPartitionIdIfNonNull;
|
||||
import static ca.uhn.fhir.model.primitive.IdDt.isValidLong;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
/**
|
||||
|
@ -102,6 +106,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
public class IdHelperService implements IIdHelperService<JpaPid> {
|
||||
public static final Predicate[] EMPTY_PREDICATE_ARRAY = new Predicate[0];
|
||||
public static final String RESOURCE_PID = "RESOURCE_PID";
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(IdHelperService.class);
|
||||
|
||||
@Autowired
|
||||
protected IResourceTableDao myResourceTableDao;
|
||||
|
@ -128,20 +133,6 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
myDontCheckActiveTransactionForUnitTest = theDontCheckActiveTransactionForUnitTest;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a forced ID, convert it to its Long value. Since you are allowed to use string IDs for resources, we need to
|
||||
* convert those to the underlying Long values that are stored, for lookup and comparison purposes.
|
||||
*
|
||||
* @throws ResourceNotFoundException If the ID can not be found
|
||||
*/
|
||||
@Override
|
||||
@Nonnull
|
||||
public IResourceLookup<JpaPid> resolveResourceIdentity(
|
||||
@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theResourceId)
|
||||
throws ResourceNotFoundException {
|
||||
return resolveResourceIdentity(theRequestPartitionId, theResourceType, theResourceId, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a forced ID, convert it to its Long value. Since you are allowed to use string IDs for resources, we need to
|
||||
* convert those to the underlying Long values that are stored, for lookup and comparison purposes.
|
||||
|
@ -153,48 +144,236 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
@Nonnull
|
||||
public IResourceLookup<JpaPid> resolveResourceIdentity(
|
||||
@Nonnull RequestPartitionId theRequestPartitionId,
|
||||
String theResourceType,
|
||||
final String theResourceId,
|
||||
boolean theExcludeDeleted)
|
||||
@Nullable String theResourceType,
|
||||
@Nonnull final String theResourceId,
|
||||
@Nonnull ResolveIdentityMode theMode)
|
||||
throws ResourceNotFoundException {
|
||||
assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive()
|
||||
: "no transaction active";
|
||||
|
||||
String resourceIdToUse = theResourceId;
|
||||
if (resourceIdToUse.contains("/")) {
|
||||
resourceIdToUse = theResourceId.substring(resourceIdToUse.indexOf("/") + 1);
|
||||
IIdType id;
|
||||
if (theResourceType != null) {
|
||||
id = newIdType(theResourceType + "/" + theResourceId);
|
||||
} else {
|
||||
id = newIdType(theResourceId);
|
||||
}
|
||||
IdDt id = new IdDt(theResourceType, resourceIdToUse);
|
||||
Map<String, List<IResourceLookup<JpaPid>>> matches =
|
||||
translateForcedIdToPids(theRequestPartitionId, Collections.singletonList(id), theExcludeDeleted);
|
||||
List<IIdType> ids = List.of(id);
|
||||
Map<IIdType, IResourceLookup<JpaPid>> outcome = resolveResourceIdentities(theRequestPartitionId, ids, theMode);
|
||||
|
||||
// We only pass 1 input in so only 0..1 will come back
|
||||
if (matches.isEmpty() || !matches.containsKey(resourceIdToUse)) {
|
||||
if (!outcome.containsKey(id)) {
|
||||
throw new ResourceNotFoundException(Msg.code(2001) + "Resource " + id + " is not known");
|
||||
}
|
||||
|
||||
if (matches.size() > 1 || matches.get(resourceIdToUse).size() > 1) {
|
||||
/*
|
||||
* This means that:
|
||||
* 1. There are two resources with the exact same resource type and forced id
|
||||
* 2. The unique constraint on this column-pair has been dropped
|
||||
*/
|
||||
String msg = myFhirCtx.getLocalizer().getMessage(IdHelperService.class, "nonUniqueForcedId");
|
||||
throw new PreconditionFailedException(Msg.code(1099) + msg);
|
||||
return outcome.get(id);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public Map<IIdType, IResourceLookup<JpaPid>> resolveResourceIdentities(
|
||||
@Nonnull RequestPartitionId theRequestPartitionId,
|
||||
Collection<IIdType> theIds,
|
||||
ResolveIdentityMode theMode) {
|
||||
assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive()
|
||||
: "no transaction active";
|
||||
|
||||
if (theIds.isEmpty()) {
|
||||
return new HashMap<>();
|
||||
}
|
||||
|
||||
return matches.get(resourceIdToUse).get(0);
|
||||
Collection<IIdType> ids = new ArrayList<>(theIds);
|
||||
ids.forEach(id -> Validate.isTrue(id.hasIdPart()));
|
||||
|
||||
RequestPartitionId requestPartitionId = replaceDefault(theRequestPartitionId);
|
||||
ListMultimap<IIdType, IResourceLookup<JpaPid>> idToLookup =
|
||||
MultimapBuilder.hashKeys(theIds.size()).arrayListValues(1).build();
|
||||
|
||||
// Do we have any FHIR ID lookups cached for any of the IDs
|
||||
if (theMode.isUseCache(myStorageSettings.isDeleteEnabled()) && !ids.isEmpty()) {
|
||||
resolveResourceIdentitiesForFhirIdsUsingCache(requestPartitionId, theMode, ids, idToLookup);
|
||||
}
|
||||
|
||||
// We still haven't found IDs, let's look them up in the DB
|
||||
if (!ids.isEmpty()) {
|
||||
resolveResourceIdentitiesForFhirIdsUsingDatabase(requestPartitionId, ids, idToLookup);
|
||||
}
|
||||
|
||||
// Convert the multimap into a simple map
|
||||
Map<IIdType, IResourceLookup<JpaPid>> retVal = new HashMap<>();
|
||||
for (Map.Entry<IIdType, IResourceLookup<JpaPid>> next : idToLookup.entries()) {
|
||||
if (next.getValue().getDeleted() != null) {
|
||||
if (theMode.isFailOnDeleted()) {
|
||||
String msg = myFhirCtx
|
||||
.getLocalizer()
|
||||
.getMessageSanitized(
|
||||
IdHelperService.class,
|
||||
"deletedId",
|
||||
next.getKey().getValue());
|
||||
throw new ResourceGoneException(Msg.code(2572) + msg);
|
||||
}
|
||||
if (!theMode.isIncludeDeleted()) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
IResourceLookup previousValue = retVal.put(next.getKey(), next.getValue());
|
||||
if (previousValue != null) {
|
||||
/*
|
||||
* This means that either:
|
||||
* 1. There are two resources with the exact same resource type and forced
|
||||
* id. The most likely reason for that is that someone is performing a
|
||||
* multi-partition search and there are resources on each partition
|
||||
* with the same ID.
|
||||
* 2. The unique constraint on the FHIR_ID column has been dropped
|
||||
*/
|
||||
ourLog.warn(
|
||||
"Resource ID[{}] corresponds to lookups: {} and {}",
|
||||
next.getKey(),
|
||||
previousValue,
|
||||
next.getValue());
|
||||
String msg = myFhirCtx.getLocalizer().getMessage(IdHelperService.class, "nonUniqueForcedId");
|
||||
throw new PreconditionFailedException(Msg.code(1099) + msg);
|
||||
}
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a mapping of Id -> IResourcePersistentId.
|
||||
* If any resource is not found, it will throw ResourceNotFound exception (and no map will be returned)
|
||||
* Fetch the resource identity ({@link IResourceLookup}) for a collection of
|
||||
* resource IDs from the internal memory cache if possible. Note that we only
|
||||
* use cached results if deletes are disabled on the server (since it is
|
||||
* therefore not possible that we have an entry in the cache that has since
|
||||
* been deleted but the cache doesn't know about the deletion), or if we
|
||||
* aren't excluding deleted results anyhow.
|
||||
*
|
||||
* @param theRequestPartitionId The partition(s) to search
|
||||
* @param theIdsToResolve The IDs we should look up. Any IDs that are resolved
|
||||
* will be removed from this list. Any IDs remaining in
|
||||
* the list after calling this method still haven't
|
||||
* been attempted to be resolved.
|
||||
* @param theMapToPopulate The results will be populated into this map
|
||||
*/
|
||||
@Override
|
||||
@Nonnull
|
||||
public Map<String, JpaPid> resolveResourcePersistentIds(
|
||||
@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, List<String> theIds) {
|
||||
return resolveResourcePersistentIds(theRequestPartitionId, theResourceType, theIds, false);
|
||||
private void resolveResourceIdentitiesForFhirIdsUsingCache(
|
||||
@Nonnull RequestPartitionId theRequestPartitionId,
|
||||
ResolveIdentityMode theMode,
|
||||
Collection<IIdType> theIdsToResolve,
|
||||
ListMultimap<IIdType, IResourceLookup<JpaPid>> theMapToPopulate) {
|
||||
for (Iterator<IIdType> idIterator = theIdsToResolve.iterator(); idIterator.hasNext(); ) {
|
||||
IIdType nextForcedId = idIterator.next();
|
||||
MemoryCacheService.ForcedIdCacheKey nextKey = new MemoryCacheService.ForcedIdCacheKey(
|
||||
nextForcedId.getResourceType(), nextForcedId.getIdPart(), theRequestPartitionId);
|
||||
if (theMode.isUseCache(myStorageSettings.isDeleteEnabled())) {
|
||||
List<IResourceLookup<JpaPid>> cachedLookups = myMemoryCacheService.getIfPresent(
|
||||
MemoryCacheService.CacheEnum.RESOURCE_LOOKUP_BY_FORCED_ID, nextKey);
|
||||
if (cachedLookups != null && !cachedLookups.isEmpty()) {
|
||||
idIterator.remove();
|
||||
for (IResourceLookup<JpaPid> cachedLookup : cachedLookups) {
|
||||
if (theMode.isIncludeDeleted() || cachedLookup.getDeleted() == null) {
|
||||
theMapToPopulate.put(nextKey.toIdType(myFhirCtx), cachedLookup);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch the resource identity ({@link IResourceLookup}) for a collection of
|
||||
* resource IDs from the database
|
||||
*
|
||||
* @param theRequestPartitionId The partition(s) to search
|
||||
* @param theIdsToResolve The IDs we should look up
|
||||
* @param theMapToPopulate The results will be populated into this map
|
||||
*/
|
||||
private void resolveResourceIdentitiesForFhirIdsUsingDatabase(
|
||||
RequestPartitionId theRequestPartitionId,
|
||||
Collection<IIdType> theIdsToResolve,
|
||||
ListMultimap<IIdType, IResourceLookup<JpaPid>> theMapToPopulate) {
|
||||
|
||||
/*
|
||||
* If we have more than a threshold of IDs, we need to chunk the execution to
|
||||
* avoid having too many parameters in one SQL statement
|
||||
*/
|
||||
int maxPageSize = (SearchBuilder.getMaximumPageSize() / 2) - 10;
|
||||
if (theIdsToResolve.size() > maxPageSize) {
|
||||
TaskChunker.chunk(
|
||||
theIdsToResolve,
|
||||
maxPageSize,
|
||||
chunk -> resolveResourceIdentitiesForFhirIdsUsingDatabase(
|
||||
theRequestPartitionId, chunk, theMapToPopulate));
|
||||
return;
|
||||
}
|
||||
|
||||
CriteriaBuilder cb = myEntityManager.getCriteriaBuilder();
|
||||
CriteriaQuery<Tuple> criteriaQuery = cb.createTupleQuery();
|
||||
Root<ResourceTable> from = criteriaQuery.from(ResourceTable.class);
|
||||
criteriaQuery.multiselect(
|
||||
from.get("myId"),
|
||||
from.get("myResourceType"),
|
||||
from.get("myFhirId"),
|
||||
from.get("myDeleted"),
|
||||
from.get("myPartitionIdValue"));
|
||||
|
||||
List<Predicate> outerAndPredicates = new ArrayList<>(2);
|
||||
if (!theRequestPartitionId.isAllPartitions()) {
|
||||
getOptionalPartitionPredicate(theRequestPartitionId, cb, from).ifPresent(outerAndPredicates::add);
|
||||
}
|
||||
|
||||
// one create one clause per id.
|
||||
List<Predicate> innerIdPredicates = new ArrayList<>(theIdsToResolve.size());
|
||||
boolean haveUntypedIds = false;
|
||||
for (IIdType next : theIdsToResolve) {
|
||||
if (!next.hasResourceType()) {
|
||||
haveUntypedIds = true;
|
||||
}
|
||||
|
||||
List<Predicate> idPredicates = new ArrayList<>(2);
|
||||
|
||||
if (myStorageSettings.getResourceClientIdStrategy() == JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC
|
||||
&& next.isIdPartValidLong()) {
|
||||
Predicate typeCriteria = cb.equal(from.get("myId"), next.getIdPartAsLong());
|
||||
idPredicates.add(typeCriteria);
|
||||
} else {
|
||||
if (isNotBlank(next.getResourceType())) {
|
||||
Predicate typeCriteria = cb.equal(from.get("myResourceType"), next.getResourceType());
|
||||
idPredicates.add(typeCriteria);
|
||||
}
|
||||
Predicate idCriteria = cb.equal(from.get("myFhirId"), next.getIdPart());
|
||||
idPredicates.add(idCriteria);
|
||||
}
|
||||
|
||||
innerIdPredicates.add(cb.and(idPredicates.toArray(EMPTY_PREDICATE_ARRAY)));
|
||||
}
|
||||
outerAndPredicates.add(cb.or(innerIdPredicates.toArray(EMPTY_PREDICATE_ARRAY)));
|
||||
|
||||
criteriaQuery.where(cb.and(outerAndPredicates.toArray(EMPTY_PREDICATE_ARRAY)));
|
||||
TypedQuery<Tuple> query = myEntityManager.createQuery(criteriaQuery);
|
||||
List<Tuple> results = query.getResultList();
|
||||
for (Tuple nextId : results) {
|
||||
// Check if the nextId has a resource ID. It may have a null resource ID if a commit is still pending.
|
||||
Long resourcePid = nextId.get(0, Long.class);
|
||||
String resourceType = nextId.get(1, String.class);
|
||||
String fhirId = nextId.get(2, String.class);
|
||||
Date deletedAd = nextId.get(3, Date.class);
|
||||
Integer partitionId = nextId.get(4, Integer.class);
|
||||
if (resourcePid != null) {
|
||||
JpaResourceLookup lookup = new JpaResourceLookup(
|
||||
resourceType, resourcePid, deletedAd, PartitionablePartitionId.with(partitionId, null));
|
||||
|
||||
MemoryCacheService.ForcedIdCacheKey nextKey =
|
||||
new MemoryCacheService.ForcedIdCacheKey(resourceType, fhirId, theRequestPartitionId);
|
||||
IIdType id = nextKey.toIdType(myFhirCtx);
|
||||
theMapToPopulate.put(id, lookup);
|
||||
|
||||
if (haveUntypedIds) {
|
||||
id = nextKey.toIdTypeWithoutResourceType(myFhirCtx);
|
||||
theMapToPopulate.put(id, lookup);
|
||||
}
|
||||
|
||||
List<IResourceLookup<JpaPid>> valueToCache = theMapToPopulate.get(id);
|
||||
myMemoryCacheService.putAfterCommit(
|
||||
MemoryCacheService.CacheEnum.RESOURCE_LOOKUP_BY_FORCED_ID, nextKey, valueToCache);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -208,7 +387,7 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
@Nonnull RequestPartitionId theRequestPartitionId,
|
||||
String theResourceType,
|
||||
List<String> theIds,
|
||||
boolean theExcludeDeleted) {
|
||||
ResolveIdentityMode theMode) {
|
||||
assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive();
|
||||
Validate.notNull(theIds, "theIds cannot be null");
|
||||
Validate.isTrue(!theIds.isEmpty(), "theIds must not be empty");
|
||||
|
@ -224,7 +403,7 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
// is a forced id
|
||||
// we must resolve!
|
||||
if (myStorageSettings.isDeleteEnabled()) {
|
||||
retVal = resolveResourceIdentity(theRequestPartitionId, theResourceType, id, theExcludeDeleted)
|
||||
retVal = resolveResourceIdentity(theRequestPartitionId, theResourceType, id, theMode)
|
||||
.getPersistentId();
|
||||
retVals.put(id, retVal);
|
||||
} else {
|
||||
|
@ -249,18 +428,6 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
return retVals;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a resource type and ID, determines the internal persistent ID for the resource.
|
||||
*
|
||||
* @throws ResourceNotFoundException If the ID can not be found
|
||||
*/
|
||||
@Override
|
||||
@Nonnull
|
||||
public JpaPid resolveResourcePersistentIds(
|
||||
@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theId) {
|
||||
return resolveResourcePersistentIds(theRequestPartitionId, theResourceType, theId, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a resource type and ID, determines the internal persistent ID for the resource.
|
||||
* Optionally filters out deleted resources.
|
||||
|
@ -273,11 +440,11 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
@Nonnull RequestPartitionId theRequestPartitionId,
|
||||
String theResourceType,
|
||||
String theId,
|
||||
boolean theExcludeDeleted) {
|
||||
ResolveIdentityMode theMode) {
|
||||
Validate.notNull(theId, "theId must not be null");
|
||||
|
||||
Map<String, JpaPid> retVal = resolveResourcePersistentIds(
|
||||
theRequestPartitionId, theResourceType, Collections.singletonList(theId), theExcludeDeleted);
|
||||
theRequestPartitionId, theResourceType, Collections.singletonList(theId), theMode);
|
||||
return retVal.get(theId); // should be only one
|
||||
}
|
||||
|
||||
|
@ -359,11 +526,11 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
|
||||
idsToCheck.add(nextId);
|
||||
}
|
||||
new QueryChunker<IIdType>()
|
||||
.chunk(
|
||||
idsToCheck,
|
||||
SearchBuilder.getMaximumPageSize() / 2,
|
||||
ids -> doResolvePersistentIds(theRequestPartitionId, ids, retVal));
|
||||
new QueryChunker<IIdType>();
|
||||
TaskChunker.chunk(
|
||||
idsToCheck,
|
||||
SearchBuilder.getMaximumPageSize() / 2,
|
||||
ids -> doResolvePersistentIds(theRequestPartitionId, ids, retVal));
|
||||
}
|
||||
|
||||
return retVal;
|
||||
|
@ -430,18 +597,30 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
RequestPartitionId theRequestPartitionId, CriteriaBuilder cb, Root<ResourceTable> from) {
|
||||
if (myPartitionSettings.isAllowUnqualifiedCrossPartitionReference()) {
|
||||
return Optional.empty();
|
||||
} else if (theRequestPartitionId.isDefaultPartition() && myPartitionSettings.getDefaultPartitionId() == null) {
|
||||
Predicate partitionIdCriteria = cb.isNull(from.get("myPartitionIdValue"));
|
||||
return Optional.of(partitionIdCriteria);
|
||||
} else if (!theRequestPartitionId.isAllPartitions()) {
|
||||
} else if (theRequestPartitionId.isAllPartitions()) {
|
||||
return Optional.empty();
|
||||
} else {
|
||||
List<Integer> partitionIds = theRequestPartitionId.getPartitionIds();
|
||||
partitionIds = replaceDefaultPartitionIdIfNonNull(myPartitionSettings, partitionIds);
|
||||
if (partitionIds.size() > 1) {
|
||||
Predicate partitionIdCriteria = from.get("myPartitionIdValue").in(partitionIds);
|
||||
return Optional.of(partitionIdCriteria);
|
||||
} else if (partitionIds.size() == 1) {
|
||||
Predicate partitionIdCriteria = cb.equal(from.get("myPartitionIdValue"), partitionIds.get(0));
|
||||
return Optional.of(partitionIdCriteria);
|
||||
if (partitionIds.contains(null)) {
|
||||
Predicate partitionIdNullCriteria =
|
||||
from.get("myPartitionIdValue").isNull();
|
||||
if (partitionIds.size() == 1) {
|
||||
return Optional.of(partitionIdNullCriteria);
|
||||
} else {
|
||||
Predicate partitionIdCriteria = from.get("myPartitionIdValue")
|
||||
.in(partitionIds.stream().filter(t -> t != null).collect(Collectors.toList()));
|
||||
return Optional.of(cb.or(partitionIdCriteria, partitionIdNullCriteria));
|
||||
}
|
||||
} else {
|
||||
if (partitionIds.size() > 1) {
|
||||
Predicate partitionIdCriteria =
|
||||
from.get("myPartitionIdValue").in(partitionIds);
|
||||
return Optional.of(partitionIdCriteria);
|
||||
} else if (partitionIds.size() == 1) {
|
||||
Predicate partitionIdCriteria = cb.equal(from.get("myPartitionIdValue"), partitionIds.get(0));
|
||||
return Optional.of(partitionIdCriteria);
|
||||
}
|
||||
}
|
||||
}
|
||||
return Optional.empty();
|
||||
|
@ -475,6 +654,7 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
@SuppressWarnings("OptionalAssignedToNull")
|
||||
@Override
|
||||
public Optional<String> translatePidIdToForcedIdWithCache(JpaPid theId) {
|
||||
// do getIfPresent and then put to avoid doing I/O inside the cache.
|
||||
|
@ -492,112 +672,6 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
return forcedId;
|
||||
}
|
||||
|
||||
private ListMultimap<String, String> organizeIdsByResourceType(Collection<IIdType> theIds) {
|
||||
ListMultimap<String, String> typeToIds =
|
||||
MultimapBuilder.hashKeys().arrayListValues().build();
|
||||
for (IIdType nextId : theIds) {
|
||||
if (myStorageSettings.getResourceClientIdStrategy() == JpaStorageSettings.ClientIdStrategyEnum.ANY
|
||||
|| !isValidPid(nextId)) {
|
||||
if (nextId.hasResourceType()) {
|
||||
typeToIds.put(nextId.getResourceType(), nextId.getIdPart());
|
||||
} else {
|
||||
typeToIds.put("", nextId.getIdPart());
|
||||
}
|
||||
}
|
||||
}
|
||||
return typeToIds;
|
||||
}
|
||||
|
||||
private Map<String, List<IResourceLookup<JpaPid>>> translateForcedIdToPids(
|
||||
@Nonnull RequestPartitionId theRequestPartitionId, Collection<IIdType> theId, boolean theExcludeDeleted) {
|
||||
theId.forEach(id -> Validate.isTrue(id.hasIdPart()));
|
||||
|
||||
if (theId.isEmpty()) {
|
||||
return new HashMap<>();
|
||||
}
|
||||
|
||||
Map<String, List<IResourceLookup<JpaPid>>> retVal = new HashMap<>();
|
||||
RequestPartitionId requestPartitionId = replaceDefault(theRequestPartitionId);
|
||||
|
||||
if (myStorageSettings.getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY) {
|
||||
List<Long> pids = theId.stream()
|
||||
.filter(t -> isValidPid(t))
|
||||
.map(IIdType::getIdPartAsLong)
|
||||
.collect(Collectors.toList());
|
||||
if (!pids.isEmpty()) {
|
||||
resolvePids(requestPartitionId, pids, retVal);
|
||||
}
|
||||
}
|
||||
|
||||
// returns a map of resourcetype->id
|
||||
ListMultimap<String, String> typeToIds = organizeIdsByResourceType(theId);
|
||||
for (Map.Entry<String, Collection<String>> nextEntry : typeToIds.asMap().entrySet()) {
|
||||
String nextResourceType = nextEntry.getKey();
|
||||
Collection<String> nextIds = nextEntry.getValue();
|
||||
|
||||
if (!myStorageSettings.isDeleteEnabled()) {
|
||||
for (Iterator<String> forcedIdIterator = nextIds.iterator(); forcedIdIterator.hasNext(); ) {
|
||||
String nextForcedId = forcedIdIterator.next();
|
||||
String nextKey = nextResourceType + "/" + nextForcedId;
|
||||
IResourceLookup<JpaPid> cachedLookup =
|
||||
myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey);
|
||||
if (cachedLookup != null) {
|
||||
forcedIdIterator.remove();
|
||||
retVal.computeIfAbsent(nextForcedId, id -> new ArrayList<>())
|
||||
.add(cachedLookup);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!nextIds.isEmpty()) {
|
||||
Collection<Object[]> views;
|
||||
assert isNotBlank(nextResourceType);
|
||||
|
||||
if (requestPartitionId.isAllPartitions()) {
|
||||
views = myResourceTableDao.findAndResolveByForcedIdWithNoType(
|
||||
nextResourceType, nextIds, theExcludeDeleted);
|
||||
} else {
|
||||
if (requestPartitionId.isDefaultPartition()) {
|
||||
views = myResourceTableDao.findAndResolveByForcedIdWithNoTypeInPartitionNull(
|
||||
nextResourceType, nextIds, theExcludeDeleted);
|
||||
} else if (requestPartitionId.hasDefaultPartitionId()) {
|
||||
views = myResourceTableDao.findAndResolveByForcedIdWithNoTypeInPartitionIdOrNullPartitionId(
|
||||
nextResourceType,
|
||||
nextIds,
|
||||
requestPartitionId.getPartitionIdsWithoutDefault(),
|
||||
theExcludeDeleted);
|
||||
} else {
|
||||
views = myResourceTableDao.findAndResolveByForcedIdWithNoTypeInPartition(
|
||||
nextResourceType, nextIds, requestPartitionId.getPartitionIds(), theExcludeDeleted);
|
||||
}
|
||||
}
|
||||
|
||||
for (Object[] next : views) {
|
||||
String resourceType = (String) next[0];
|
||||
Long resourcePid = (Long) next[1];
|
||||
String forcedId = (String) next[2];
|
||||
Date deletedAt = (Date) next[3];
|
||||
Integer partitionId = (Integer) next[4];
|
||||
LocalDate partitionDate = (LocalDate) next[5];
|
||||
|
||||
JpaResourceLookup lookup = new JpaResourceLookup(
|
||||
resourceType,
|
||||
resourcePid,
|
||||
deletedAt,
|
||||
PartitionablePartitionId.with(partitionId, partitionDate));
|
||||
retVal.computeIfAbsent(forcedId, id -> new ArrayList<>()).add(lookup);
|
||||
|
||||
if (!myStorageSettings.isDeleteEnabled()) {
|
||||
String key = resourceType + "/" + forcedId;
|
||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, key, lookup);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
public RequestPartitionId replaceDefault(RequestPartitionId theRequestPartitionId) {
|
||||
if (myPartitionSettings.getDefaultPartitionId() != null) {
|
||||
if (!theRequestPartitionId.isAllPartitions() && theRequestPartitionId.hasDefaultPartitionId()) {
|
||||
|
@ -610,59 +684,6 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
return theRequestPartitionId;
|
||||
}
|
||||
|
||||
private void resolvePids(
|
||||
@Nonnull RequestPartitionId theRequestPartitionId,
|
||||
List<Long> thePidsToResolve,
|
||||
Map<String, List<IResourceLookup<JpaPid>>> theTargets) {
|
||||
if (!myStorageSettings.isDeleteEnabled()) {
|
||||
for (Iterator<Long> forcedIdIterator = thePidsToResolve.iterator(); forcedIdIterator.hasNext(); ) {
|
||||
Long nextPid = forcedIdIterator.next();
|
||||
String nextKey = Long.toString(nextPid);
|
||||
IResourceLookup<JpaPid> cachedLookup =
|
||||
myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey);
|
||||
if (cachedLookup != null) {
|
||||
forcedIdIterator.remove();
|
||||
theTargets.computeIfAbsent(nextKey, id -> new ArrayList<>()).add(cachedLookup);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!thePidsToResolve.isEmpty()) {
|
||||
Collection<Object[]> lookup;
|
||||
if (theRequestPartitionId.isAllPartitions()) {
|
||||
lookup = myResourceTableDao.findLookupFieldsByResourcePid(thePidsToResolve);
|
||||
} else {
|
||||
if (theRequestPartitionId.isDefaultPartition()) {
|
||||
lookup = myResourceTableDao.findLookupFieldsByResourcePidInPartitionNull(thePidsToResolve);
|
||||
} else if (theRequestPartitionId.hasDefaultPartitionId()) {
|
||||
lookup = myResourceTableDao.findLookupFieldsByResourcePidInPartitionIdsOrNullPartition(
|
||||
thePidsToResolve, theRequestPartitionId.getPartitionIdsWithoutDefault());
|
||||
} else {
|
||||
lookup = myResourceTableDao.findLookupFieldsByResourcePidInPartitionIds(
|
||||
thePidsToResolve, theRequestPartitionId.getPartitionIds());
|
||||
}
|
||||
}
|
||||
lookup.stream()
|
||||
.map(t -> new JpaResourceLookup(
|
||||
(String) t[0],
|
||||
(Long) t[1],
|
||||
(Date) t[2],
|
||||
PartitionablePartitionId.with((Integer) t[3], (LocalDate) t[4])))
|
||||
.forEach(t -> {
|
||||
String id = t.getPersistentId().toString();
|
||||
if (!theTargets.containsKey(id)) {
|
||||
theTargets.put(id, new ArrayList<>());
|
||||
}
|
||||
theTargets.get(id).add(t);
|
||||
if (!myStorageSettings.isDeleteEnabled()) {
|
||||
String nextKey = t.getPersistentId().toString();
|
||||
myMemoryCacheService.putAfterCommit(
|
||||
MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey, t);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public PersistentIdToForcedIdMap<JpaPid> translatePidsToForcedIds(Set<JpaPid> theResourceIds) {
|
||||
assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive();
|
||||
|
@ -673,7 +694,7 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
List<Long> remainingPids =
|
||||
thePids.stream().filter(t -> !retVal.containsKey(t)).collect(Collectors.toList());
|
||||
|
||||
new QueryChunker<Long>().chunk(remainingPids, t -> {
|
||||
QueryChunker.chunk(remainingPids, t -> {
|
||||
List<ResourceTable> resourceEntities = myResourceTableDao.findAllById(t);
|
||||
|
||||
for (ResourceTable nextResourceEntity : resourceEntities) {
|
||||
|
@ -701,33 +722,39 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
* Pre-cache a PID-to-Resource-ID mapping for later retrieval by {@link #translatePidsToForcedIds(Set)} and related methods
|
||||
*/
|
||||
@Override
|
||||
public void addResolvedPidToForcedId(
|
||||
JpaPid theJpaPid,
|
||||
public void addResolvedPidToFhirId(
|
||||
@Nonnull JpaPid theJpaPid,
|
||||
@Nonnull RequestPartitionId theRequestPartitionId,
|
||||
String theResourceType,
|
||||
@Nullable String theForcedId,
|
||||
@Nonnull String theResourceType,
|
||||
@Nonnull String theFhirId,
|
||||
@Nullable Date theDeletedAt) {
|
||||
if (theForcedId != null) {
|
||||
if (theJpaPid.getAssociatedResourceId() == null) {
|
||||
populateAssociatedResourceId(theResourceType, theForcedId, theJpaPid);
|
||||
}
|
||||
|
||||
myMemoryCacheService.putAfterCommit(
|
||||
MemoryCacheService.CacheEnum.PID_TO_FORCED_ID,
|
||||
theJpaPid.getId(),
|
||||
Optional.of(theResourceType + "/" + theForcedId));
|
||||
String key = toForcedIdToPidKey(theRequestPartitionId, theResourceType, theForcedId);
|
||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, theJpaPid);
|
||||
} else {
|
||||
myMemoryCacheService.putAfterCommit(
|
||||
MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, theJpaPid.getId(), Optional.empty());
|
||||
if (theJpaPid.getAssociatedResourceId() == null) {
|
||||
populateAssociatedResourceId(theResourceType, theFhirId, theJpaPid);
|
||||
}
|
||||
|
||||
if (!myStorageSettings.isDeleteEnabled()) {
|
||||
JpaResourceLookup lookup = new JpaResourceLookup(
|
||||
theResourceType, theJpaPid.getId(), theDeletedAt, theJpaPid.getPartitionablePartitionId());
|
||||
String nextKey = theJpaPid.toString();
|
||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey, lookup);
|
||||
myMemoryCacheService.putAfterCommit(
|
||||
MemoryCacheService.CacheEnum.PID_TO_FORCED_ID,
|
||||
theJpaPid.getId(),
|
||||
Optional.of(theResourceType + "/" + theFhirId));
|
||||
String key = toForcedIdToPidKey(theRequestPartitionId, theResourceType, theFhirId);
|
||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, theJpaPid);
|
||||
|
||||
JpaResourceLookup lookup = new JpaResourceLookup(
|
||||
theResourceType, theJpaPid.getId(), theDeletedAt, theJpaPid.getPartitionablePartitionId());
|
||||
|
||||
MemoryCacheService.ForcedIdCacheKey fhirIdKey =
|
||||
new MemoryCacheService.ForcedIdCacheKey(theResourceType, theFhirId, theRequestPartitionId);
|
||||
myMemoryCacheService.putAfterCommit(
|
||||
MemoryCacheService.CacheEnum.RESOURCE_LOOKUP_BY_FORCED_ID, fhirIdKey, List.of(lookup));
|
||||
|
||||
// If it's a pure-numeric ID, store it in the cache without a type as well
|
||||
// so that we can resolve it this way when loading entities for update
|
||||
if (myStorageSettings.getResourceClientIdStrategy() == JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC
|
||||
&& isValidLong(theFhirId)) {
|
||||
MemoryCacheService.ForcedIdCacheKey fhirIdKeyWithoutType =
|
||||
new MemoryCacheService.ForcedIdCacheKey(null, theFhirId, theRequestPartitionId);
|
||||
myMemoryCacheService.putAfterCommit(
|
||||
MemoryCacheService.CacheEnum.RESOURCE_LOOKUP_BY_FORCED_ID, fhirIdKeyWithoutType, List.of(lookup));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -736,19 +763,6 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
myPartitionSettings = thePartitionSettings;
|
||||
}
|
||||
|
||||
public static boolean isValidPid(IIdType theId) {
|
||||
if (theId == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
String idPart = theId.getIdPart();
|
||||
return isValidPid(idPart);
|
||||
}
|
||||
|
||||
public static boolean isValidPid(String theIdPart) {
|
||||
return StringUtils.isNumeric(theIdPart);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nonnull
|
||||
public List<JpaPid> getPidsOrThrowException(
|
||||
|
@ -764,7 +778,11 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
if (resourceId == null) {
|
||||
IIdType id = theResource.getIdElement();
|
||||
try {
|
||||
retVal = resolveResourcePersistentIds(theRequestPartitionId, id.getResourceType(), id.getIdPart());
|
||||
retVal = resolveResourceIdentityPid(
|
||||
theRequestPartitionId,
|
||||
id.getResourceType(),
|
||||
id.getIdPart(),
|
||||
ResolveIdentityMode.includeDeleted().cacheOk());
|
||||
} catch (ResourceNotFoundException e) {
|
||||
retVal = null;
|
||||
}
|
||||
|
@ -836,4 +854,23 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
public JpaPid newPidFromStringIdAndResourceName(String thePid, String theResourceName) {
|
||||
return JpaPid.fromIdAndResourceType(Long.parseLong(thePid), theResourceName);
|
||||
}
|
||||
|
||||
private IIdType newIdType(String theValue) {
|
||||
IIdType retVal = myFhirCtx.getVersion().newIdType();
|
||||
retVal.setValue(theValue);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
public static boolean isValidPid(IIdType theId) {
|
||||
if (theId == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
String idPart = theId.getIdPart();
|
||||
return isValidPid(idPart);
|
||||
}
|
||||
|
||||
public static boolean isValidPid(String theIdPart) {
|
||||
return StringUtils.isNumeric(theIdPart);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -69,6 +69,7 @@ import org.springframework.data.domain.Pageable;
|
|||
import org.springframework.data.history.Revisions;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
@ -150,6 +151,17 @@ public class MdmLinkDaoJpaImpl implements IMdmLinkDao<JpaPid, MdmLink> {
|
|||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<MdmPidTuple<JpaPid>> resolveGoldenResources(List<JpaPid> theSourcePids) {
|
||||
return myMdmLinkDao
|
||||
.expandPidsByGoldenResourcePidsOrSourcePidsAndMatchResult(
|
||||
JpaPid.toLongList(theSourcePids), MdmMatchResultEnum.MATCH)
|
||||
.stream()
|
||||
.map(this::daoTupleToMdmTuple)
|
||||
.distinct()
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<JpaPid> findPidByResourceNameAndThreshold(
|
||||
String theResourceName, Date theHighThreshold, Pageable thePageable) {
|
||||
|
|
|
@ -92,7 +92,8 @@ public class ExtendedHSearchSearchBuilder {
|
|||
String theResourceType, SearchParameterMap myParams, ISearchParamRegistry theSearchParamRegistry) {
|
||||
boolean canUseHibernate = false;
|
||||
|
||||
ResourceSearchParams resourceActiveSearchParams = theSearchParamRegistry.getActiveSearchParams(theResourceType);
|
||||
ResourceSearchParams resourceActiveSearchParams = theSearchParamRegistry.getActiveSearchParams(
|
||||
theResourceType, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
for (String paramName : myParams.keySet()) {
|
||||
// is this parameter supported?
|
||||
if (illegalForHibernateSearch(paramName, resourceActiveSearchParams)) {
|
||||
|
@ -218,7 +219,8 @@ public class ExtendedHSearchSearchBuilder {
|
|||
|
||||
// copy the keys to avoid concurrent modification error
|
||||
ArrayList<String> paramNames = compileParamNames(searchParameterMap);
|
||||
ResourceSearchParams activeSearchParams = searchParamRegistry.getActiveSearchParams(resourceType);
|
||||
ResourceSearchParams activeSearchParams = searchParamRegistry.getActiveSearchParams(
|
||||
resourceType, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
for (String nextParam : paramNames) {
|
||||
if (illegalForHibernateSearch(nextParam, activeSearchParams)) {
|
||||
// ignore magic params handled in JPA
|
||||
|
|
|
@ -151,7 +151,8 @@ public class HSearchSortHelperImpl implements IHSearchSortHelper {
|
|||
*/
|
||||
@VisibleForTesting
|
||||
Optional<RestSearchParameterTypeEnum> getParamType(String theResourceTypeName, String theParamName) {
|
||||
ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams(theResourceTypeName);
|
||||
ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams(
|
||||
theResourceTypeName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
RuntimeSearchParam searchParam = activeSearchParams.get(theParamName);
|
||||
if (searchParam == null) {
|
||||
return Optional.empty();
|
||||
|
|
|
@ -267,13 +267,14 @@ public class TermValueSet implements Serializable {
|
|||
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
|
||||
.append("id", myId)
|
||||
.append("url", myUrl)
|
||||
.append(myResource != null ? ("resource=" + myResource.toString()) : ("resource=(null)"))
|
||||
.append("version", myVersion)
|
||||
.append("resourcePid", myResourcePid)
|
||||
.append("name", myName)
|
||||
.append(myConcepts != null ? ("concepts - size=" + myConcepts.size()) : ("concepts=(null)"))
|
||||
.append(myConcepts != null ? ("conceptCount=" + myConcepts.size()) : ("concepts=(null)"))
|
||||
.append("totalConcepts", myTotalConcepts)
|
||||
.append("totalConceptDesignations", myTotalConceptDesignations)
|
||||
.append("expansionStatus", myExpansionStatus)
|
||||
.append(myResource != null ? ("resId=" + myResource) : ("resource=(null)"))
|
||||
.toString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -181,7 +181,8 @@ public class GraphQLProviderWithIntrospection extends GraphQLProvider {
|
|||
for (String nextResourceType : theResourceTypes) {
|
||||
StructureDefinition sd = fetchStructureDefinition(nextResourceType);
|
||||
List<SearchParameter> parameters = toR5SearchParams(mySearchParamRegistry
|
||||
.getActiveSearchParams(nextResourceType)
|
||||
.getActiveSearchParams(
|
||||
nextResourceType, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH)
|
||||
.values());
|
||||
myGenerator.generateResource(writer, sd, parameters, theOperations);
|
||||
}
|
||||
|
@ -198,7 +199,8 @@ public class GraphQLProviderWithIntrospection extends GraphQLProvider {
|
|||
}
|
||||
if (theOperations.contains(GraphQLSchemaGenerator.FHIROperationType.SEARCH)) {
|
||||
List<SearchParameter> parameters = toR5SearchParams(mySearchParamRegistry
|
||||
.getActiveSearchParams(nextResourceType)
|
||||
.getActiveSearchParams(
|
||||
nextResourceType, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH)
|
||||
.values());
|
||||
myGenerator.generateListAccessQuery(writer, parameters, nextResourceType);
|
||||
myGenerator.generateConnectionAccessQuery(writer, parameters, nextResourceType);
|
||||
|
|
|
@ -125,6 +125,33 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
init700();
|
||||
init720();
|
||||
init740();
|
||||
init780();
|
||||
}
|
||||
|
||||
protected void init780() {
|
||||
final Builder version = forVersion(VersionEnum.V7_8_0);
|
||||
|
||||
version.onTable("NPM_PACKAGE_VER")
|
||||
.addColumn("20241023.10", "PKG_AUTHOR")
|
||||
.nullable()
|
||||
.type(ColumnTypeEnum.STRING, 512);
|
||||
version.onTable("NPM_PACKAGE_VER")
|
||||
.addColumn("20241023.20", "AUTHOR_UPPER")
|
||||
.nullable()
|
||||
.type(ColumnTypeEnum.STRING, 512);
|
||||
version.onTable("NPM_PACKAGE_VER")
|
||||
.modifyColumn("20241023.30", "PKG_DESC")
|
||||
.nullable()
|
||||
.withType(ColumnTypeEnum.STRING, 512);
|
||||
version.onTable("NPM_PACKAGE_VER")
|
||||
.modifyColumn("20241023.40", "DESC_UPPER")
|
||||
.nullable()
|
||||
.withType(ColumnTypeEnum.STRING, 512);
|
||||
|
||||
version.onTable("NPM_PACKAGE")
|
||||
.modifyColumn("20241023.50", "PACKAGE_DESC")
|
||||
.nullable()
|
||||
.withType(ColumnTypeEnum.STRING, 512);
|
||||
}
|
||||
|
||||
protected void init740() {
|
||||
|
|
|
@ -21,6 +21,8 @@ package ca.uhn.fhir.jpa.model.cross;
|
|||
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
|
@ -59,4 +61,14 @@ public class JpaResourceLookup implements IResourceLookup<JpaPid> {
|
|||
|
||||
return jpaPid;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
|
||||
.append("resType", myResourceType)
|
||||
.append("resPid", myResourcePid)
|
||||
.append("deletedAt", myDeletedAt)
|
||||
.append("partId", myPartitionablePartitionId)
|
||||
.toString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -301,15 +301,10 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac
|
|||
|
||||
boolean currentVersion =
|
||||
updateCurrentVersionFlagForAllPackagesBasedOnNewIncomingVersion(packageId, packageVersionId);
|
||||
String packageDesc = null;
|
||||
if (npmPackage.description() != null) {
|
||||
if (npmPackage.description().length() > NpmPackageVersionEntity.PACKAGE_DESC_LENGTH) {
|
||||
packageDesc = npmPackage.description().substring(0, NpmPackageVersionEntity.PACKAGE_DESC_LENGTH - 4)
|
||||
+ "...";
|
||||
} else {
|
||||
packageDesc = npmPackage.description();
|
||||
}
|
||||
}
|
||||
|
||||
String packageDesc = truncateStorageString(npmPackage.description());
|
||||
String packageAuthor = truncateStorageString(npmPackage.getNpm().asString("author"));
|
||||
|
||||
if (currentVersion) {
|
||||
getProcessingMessages(npmPackage)
|
||||
.add("Marking package " + packageId + "#" + initialPackageVersionId + " as current version");
|
||||
|
@ -327,6 +322,7 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac
|
|||
packageVersion.setPackage(pkg);
|
||||
packageVersion.setPackageBinary(persistedPackage);
|
||||
packageVersion.setSavedTime(new Date());
|
||||
packageVersion.setAuthor(packageAuthor);
|
||||
packageVersion.setDescription(packageDesc);
|
||||
packageVersion.setFhirVersionId(npmPackage.fhirVersion());
|
||||
packageVersion.setFhirVersion(fhirVersion);
|
||||
|
@ -625,6 +621,7 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac
|
|||
|
||||
NpmPackageMetadataJson.Version version = new NpmPackageMetadataJson.Version();
|
||||
version.setFhirVersion(next.getFhirVersionId());
|
||||
version.setAuthor(next.getAuthor());
|
||||
version.setDescription(next.getDescription());
|
||||
version.setName(next.getPackageId());
|
||||
version.setVersion(next.getVersionId());
|
||||
|
@ -682,7 +679,8 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac
|
|||
retVal.addObject()
|
||||
.getPackage()
|
||||
.setName(next.getPackageId())
|
||||
.setDescription(next.getPackage().getDescription())
|
||||
.setAuthor(next.getAuthor())
|
||||
.setDescription(next.getDescription())
|
||||
.setVersion(next.getVersionId())
|
||||
.addFhirVersion(next.getFhirVersionId())
|
||||
.setBytes(next.getPackageSizeBytes());
|
||||
|
@ -791,10 +789,21 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac
|
|||
predicates.add(theCb.equal(resources.get("myCanonicalUrl"), thePackageSearchSpec.getResourceUrl()));
|
||||
}
|
||||
|
||||
if (isNotBlank(thePackageSearchSpec.getVersion())) {
|
||||
String searchTerm = thePackageSearchSpec.getVersion() + "%";
|
||||
predicates.add(theCb.like(theRoot.get("myVersionId"), searchTerm));
|
||||
}
|
||||
|
||||
if (isNotBlank(thePackageSearchSpec.getDescription())) {
|
||||
String searchTerm = "%" + thePackageSearchSpec.getDescription() + "%";
|
||||
searchTerm = StringUtil.normalizeStringForSearchIndexing(searchTerm);
|
||||
predicates.add(theCb.like(theRoot.get("myDescriptionUpper"), searchTerm));
|
||||
predicates.add(theCb.like(theCb.upper(theRoot.get("myDescriptionUpper")), searchTerm));
|
||||
}
|
||||
|
||||
if (isNotBlank(thePackageSearchSpec.getAuthor())) {
|
||||
String searchTerm = "%" + thePackageSearchSpec.getAuthor() + "%";
|
||||
searchTerm = StringUtil.normalizeStringForSearchIndexing(searchTerm);
|
||||
predicates.add(theCb.like(theRoot.get("myAuthorUpper"), searchTerm));
|
||||
}
|
||||
|
||||
if (isNotBlank(thePackageSearchSpec.getFhirVersion())) {
|
||||
|
@ -816,4 +825,21 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac
|
|||
return (List<String>)
|
||||
thePackage.getUserData().computeIfAbsent("JpPackageCache_ProcessingMessages", t -> new ArrayList<>());
|
||||
}
|
||||
|
||||
/**
|
||||
* Truncates a string to {@link NpmPackageVersionEntity#PACKAGE_DESC_LENGTH} which is
|
||||
* the maximum length used on several columns in {@link NpmPackageVersionEntity}. If the
|
||||
* string is longer than the maximum allowed, the last 3 characters are replaced with "..."
|
||||
*/
|
||||
private static String truncateStorageString(String theInput) {
|
||||
String retVal = null;
|
||||
if (theInput != null) {
|
||||
if (theInput.length() > NpmPackageVersionEntity.PACKAGE_DESC_LENGTH) {
|
||||
retVal = theInput.substring(0, NpmPackageVersionEntity.PACKAGE_DESC_LENGTH - 4) + "...";
|
||||
} else {
|
||||
retVal = theInput;
|
||||
}
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -115,6 +115,9 @@ public class NpmPackageMetadataJson {
|
|||
@JsonProperty("version")
|
||||
private String myVersion;
|
||||
|
||||
@JsonProperty("author")
|
||||
private String myAuthor;
|
||||
|
||||
@JsonProperty("description")
|
||||
private String myDescription;
|
||||
|
||||
|
@ -125,6 +128,14 @@ public class NpmPackageMetadataJson {
|
|||
@JsonProperty("_bytes")
|
||||
private long myBytes;
|
||||
|
||||
public String getAuthor() {
|
||||
return myAuthor;
|
||||
}
|
||||
|
||||
public void setAuthor(String theAuthor) {
|
||||
myAuthor = theAuthor;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return myName;
|
||||
}
|
||||
|
|
|
@ -111,6 +111,9 @@ public class NpmPackageSearchResultJson {
|
|||
@JsonProperty("version")
|
||||
private String myVersion;
|
||||
|
||||
@JsonProperty("author")
|
||||
private String myAuthor;
|
||||
|
||||
@JsonProperty("description")
|
||||
private String myDescription;
|
||||
|
||||
|
@ -171,5 +174,14 @@ public class NpmPackageSearchResultJson {
|
|||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getAuthor() {
|
||||
return myAuthor;
|
||||
}
|
||||
|
||||
public Package setAuthor(String theAuthor) {
|
||||
myAuthor = theAuthor;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,6 +27,8 @@ public class PackageSearchSpec {
|
|||
private String myResourceUrl;
|
||||
private CharSequence myDescription;
|
||||
private String myFhirVersion;
|
||||
private String myVersion;
|
||||
private String myAuthor;
|
||||
|
||||
public String getFhirVersion() {
|
||||
return myFhirVersion;
|
||||
|
@ -69,4 +71,20 @@ public class PackageSearchSpec {
|
|||
public void setDescription(CharSequence theDescription) {
|
||||
myDescription = theDescription;
|
||||
}
|
||||
|
||||
public String getVersion() {
|
||||
return myVersion;
|
||||
}
|
||||
|
||||
public void setVersion(String theVersion) {
|
||||
myVersion = theVersion;
|
||||
}
|
||||
|
||||
public void setAuthor(String theAuthor) {
|
||||
myAuthor = theAuthor;
|
||||
}
|
||||
|
||||
public String getAuthor() {
|
||||
return myAuthor;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -190,11 +190,12 @@ public class JpaConformanceProviderDstu3 extends org.hl7.fhir.dstu3.hapi.rest.se
|
|||
* global params like _lastUpdated
|
||||
*/
|
||||
ResourceSearchParams searchParams;
|
||||
ResourceSearchParams serverConfigurationActiveSearchParams =
|
||||
myServerConfiguration.getActiveSearchParams(theResourceName);
|
||||
ResourceSearchParams serverConfigurationActiveSearchParams = myServerConfiguration.getActiveSearchParams(
|
||||
theResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
if (mySearchParamRegistry != null) {
|
||||
searchParams =
|
||||
mySearchParamRegistry.getActiveSearchParams(theResourceName).makeCopy();
|
||||
searchParams = mySearchParamRegistry
|
||||
.getActiveSearchParams(theResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH)
|
||||
.makeCopy();
|
||||
if (searchParams == null) {
|
||||
return ResourceSearchParams.empty(theResourceName);
|
||||
}
|
||||
|
@ -229,8 +230,8 @@ public class JpaConformanceProviderDstu3 extends org.hl7.fhir.dstu3.hapi.rest.se
|
|||
if (isBlank(otherResourceType)) {
|
||||
continue;
|
||||
}
|
||||
ResourceSearchParams activeSearchParams =
|
||||
mySearchParamRegistry.getActiveSearchParams(otherResourceType);
|
||||
ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams(
|
||||
otherResourceType, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
activeSearchParams.values().stream()
|
||||
.filter(t -> isNotBlank(t.getName()))
|
||||
.filter(t -> t.getTargets().contains(resourcename))
|
||||
|
|
|
@ -489,8 +489,13 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
|
|||
}
|
||||
|
||||
if (!Constants.INCLUDE_STAR.equals(paramName)
|
||||
&& mySearchParamRegistry.getActiveSearchParam(paramType, paramName) == null) {
|
||||
List<String> validNames = mySearchParamRegistry.getActiveSearchParams(paramType).values().stream()
|
||||
&& mySearchParamRegistry.getActiveSearchParam(
|
||||
paramType, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH)
|
||||
== null) {
|
||||
List<String> validNames = mySearchParamRegistry
|
||||
.getActiveSearchParams(paramType, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH)
|
||||
.values()
|
||||
.stream()
|
||||
.filter(t -> t.getParamType() == RestSearchParameterTypeEnum.REFERENCE)
|
||||
.map(t -> UrlUtil.sanitizeUrlPart(t.getName()))
|
||||
.sorted()
|
||||
|
|
|
@ -318,7 +318,8 @@ public class QueryStack {
|
|||
}
|
||||
|
||||
String targetType = null;
|
||||
RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(theResourceName, theParamName);
|
||||
RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(
|
||||
theResourceName, theParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
if (theReferenceTargetType != null) {
|
||||
targetType = theReferenceTargetType;
|
||||
} else if (param.getTargets().size() > 1) {
|
||||
|
@ -334,17 +335,20 @@ public class QueryStack {
|
|||
+ "' as this parameter as this parameter does not define a target type. Please specify the target type.");
|
||||
}
|
||||
|
||||
RuntimeSearchParam targetSearchParameter = mySearchParamRegistry.getActiveSearchParam(targetType, theChain);
|
||||
RuntimeSearchParam targetSearchParameter = mySearchParamRegistry.getActiveSearchParam(
|
||||
targetType, theChain, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
if (targetSearchParameter == null) {
|
||||
Collection<String> validSearchParameterNames =
|
||||
mySearchParamRegistry.getActiveSearchParams(targetType).values().stream()
|
||||
.filter(t -> t.getParamType() == RestSearchParameterTypeEnum.STRING
|
||||
|| t.getParamType() == RestSearchParameterTypeEnum.TOKEN
|
||||
|| t.getParamType() == RestSearchParameterTypeEnum.DATE)
|
||||
.map(RuntimeSearchParam::getName)
|
||||
.sorted()
|
||||
.distinct()
|
||||
.collect(Collectors.toList());
|
||||
Collection<String> validSearchParameterNames = mySearchParamRegistry
|
||||
.getActiveSearchParams(targetType, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH)
|
||||
.values()
|
||||
.stream()
|
||||
.filter(t -> t.getParamType() == RestSearchParameterTypeEnum.STRING
|
||||
|| t.getParamType() == RestSearchParameterTypeEnum.TOKEN
|
||||
|| t.getParamType() == RestSearchParameterTypeEnum.DATE)
|
||||
.map(RuntimeSearchParam::getName)
|
||||
.sorted()
|
||||
.distinct()
|
||||
.collect(Collectors.toList());
|
||||
String msg = myFhirContext
|
||||
.getLocalizer()
|
||||
.getMessageSanitized(
|
||||
|
@ -1007,10 +1011,11 @@ public class QueryStack {
|
|||
return createPredicateSource(null, Collections.singletonList(param));
|
||||
}
|
||||
default:
|
||||
RuntimeSearchParam searchParam = mySearchParamRegistry.getActiveSearchParam(theResourceName, paramName);
|
||||
RuntimeSearchParam searchParam = mySearchParamRegistry.getActiveSearchParam(
|
||||
theResourceName, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
if (searchParam == null) {
|
||||
Collection<String> validNames =
|
||||
mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName);
|
||||
Collection<String> validNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(
|
||||
theResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
String msg = myFhirContext
|
||||
.getLocalizer()
|
||||
.getMessageSanitized(
|
||||
|
@ -1168,13 +1173,14 @@ public class QueryStack {
|
|||
// Ensure that the name of the search param
|
||||
// (e.g. the `code` in Patient?_has:Observation:subject:code=sys|val)
|
||||
// exists on the target resource type.
|
||||
RuntimeSearchParam owningParameterDef =
|
||||
mySearchParamRegistry.getRuntimeSearchParam(targetResourceType, paramName);
|
||||
RuntimeSearchParam owningParameterDef = mySearchParamRegistry.getRuntimeSearchParam(
|
||||
targetResourceType, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
|
||||
// Ensure that the name of the back-referenced search param on the target (e.g. the `subject` in
|
||||
// Patient?_has:Observation:subject:code=sys|val)
|
||||
// exists on the target resource, or in the top-level Resource resource.
|
||||
mySearchParamRegistry.getRuntimeSearchParam(targetResourceType, paramReference);
|
||||
mySearchParamRegistry.getRuntimeSearchParam(
|
||||
targetResourceType, paramReference, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
|
||||
IQueryParameterAnd<?> parsedParam = JpaParamUtil.parseQueryParams(
|
||||
mySearchParamRegistry, myFhirContext, owningParameterDef, paramName, parameters);
|
||||
|
@ -1364,7 +1370,7 @@ public class QueryStack {
|
|||
theRequestPartitionId));
|
||||
} else {
|
||||
List<QuantityParam> quantityParams =
|
||||
theList.stream().map(t -> QuantityParam.toQuantityParam(t)).collect(Collectors.toList());
|
||||
theList.stream().map(QuantityParam::toQuantityParam).collect(Collectors.toList());
|
||||
|
||||
BaseQuantityPredicateBuilder join = null;
|
||||
boolean normalizedSearchEnabled = myStorageSettings
|
||||
|
@ -1372,8 +1378,8 @@ public class QueryStack {
|
|||
.equals(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_SUPPORTED);
|
||||
if (normalizedSearchEnabled) {
|
||||
List<QuantityParam> normalizedQuantityParams = quantityParams.stream()
|
||||
.map(t -> UcumServiceUtil.toCanonicalQuantityOrNull(t))
|
||||
.filter(t -> t != null)
|
||||
.map(UcumServiceUtil::toCanonicalQuantityOrNull)
|
||||
.filter(Objects::nonNull)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
if (normalizedQuantityParams.size() == quantityParams.size()) {
|
||||
|
@ -2494,7 +2500,8 @@ public class QueryStack {
|
|||
RequestDetails theRequest,
|
||||
RequestPartitionId theRequestPartitionId) {
|
||||
List<Condition> andPredicates = new ArrayList<>();
|
||||
RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam(theResourceName, theParamName);
|
||||
RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam(
|
||||
theResourceName, theParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
if (nextParamDef != null) {
|
||||
|
||||
if (myPartitionSettings.isPartitioningEnabled() && myPartitionSettings.isIncludePartitionInSearchHashes()) {
|
||||
|
@ -2701,15 +2708,33 @@ public class QueryStack {
|
|||
}
|
||||
|
||||
} else {
|
||||
String msg = myFhirContext
|
||||
.getLocalizer()
|
||||
.getMessageSanitized(
|
||||
BaseStorageDao.class,
|
||||
"invalidSearchParameter",
|
||||
theParamName,
|
||||
theResourceName,
|
||||
mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName));
|
||||
throw new InvalidRequestException(Msg.code(1223) + msg);
|
||||
RuntimeSearchParam notEnabledForSearchParam = mySearchParamRegistry.getActiveSearchParam(
|
||||
theResourceName, theParamName, ISearchParamRegistry.SearchParamLookupContextEnum.ALL);
|
||||
if (notEnabledForSearchParam == null) {
|
||||
String msg = myFhirContext
|
||||
.getLocalizer()
|
||||
.getMessageSanitized(
|
||||
BaseStorageDao.class,
|
||||
"invalidSearchParameter",
|
||||
theParamName,
|
||||
theResourceName,
|
||||
mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(
|
||||
theResourceName,
|
||||
ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH));
|
||||
throw new InvalidRequestException(Msg.code(1223) + msg);
|
||||
} else {
|
||||
String msg = myFhirContext
|
||||
.getLocalizer()
|
||||
.getMessageSanitized(
|
||||
BaseStorageDao.class,
|
||||
"invalidSearchParameterNotEnabledForSearch",
|
||||
theParamName,
|
||||
theResourceName,
|
||||
mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(
|
||||
theResourceName,
|
||||
ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH));
|
||||
throw new InvalidRequestException(Msg.code(2540) + msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2740,8 +2765,8 @@ public class QueryStack {
|
|||
ReferenceParam param = (ReferenceParam) nextAnd.get(0);
|
||||
if (isNotBlank(param.getChain())) {
|
||||
String fullName = theParamName + "." + param.getChain();
|
||||
RuntimeSearchParam fullChainParam =
|
||||
mySearchParamRegistry.getActiveSearchParam(theResourceName, fullName);
|
||||
RuntimeSearchParam fullChainParam = mySearchParamRegistry.getActiveSearchParam(
|
||||
theResourceName, fullName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
if (fullChainParam != null) {
|
||||
List<IQueryParameterType> swappedParamTypes = nextAnd.stream()
|
||||
.map(t -> newParameterInstance(fullChainParam, null, t.getValueAsQueryToken(myFhirContext)))
|
||||
|
@ -2808,8 +2833,10 @@ public class QueryStack {
|
|||
if (indexOnContainedResources) {
|
||||
return true;
|
||||
}
|
||||
RuntimeSearchParam param =
|
||||
mySearchParamRegistry.getActiveSearchParam(theResourceType, theParameterName);
|
||||
RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(
|
||||
theResourceType,
|
||||
theParameterName,
|
||||
ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
return param != null && param.hasUpliftRefchain(t);
|
||||
});
|
||||
|
||||
|
@ -3049,7 +3076,8 @@ public class QueryStack {
|
|||
for (String nextTarget : thePreviousSearchParam.getTargets()) {
|
||||
RuntimeSearchParam nextSearchParam = null;
|
||||
if (isBlank(theResourceType) || theResourceType.equals(nextTarget)) {
|
||||
nextSearchParam = mySearchParamRegistry.getActiveSearchParam(nextTarget, nextParamName);
|
||||
nextSearchParam = mySearchParamRegistry.getActiveSearchParam(
|
||||
nextTarget, nextParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
}
|
||||
if (nextSearchParam != null) {
|
||||
searchParamFound = true;
|
||||
|
|
|
@ -36,6 +36,7 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
|||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
|
||||
import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean;
|
||||
import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider;
|
||||
import ca.uhn.fhir.jpa.dao.BaseStorageDao;
|
||||
|
@ -49,6 +50,7 @@ import ca.uhn.fhir.jpa.dao.search.ResourceNotFoundInIndexException;
|
|||
import ca.uhn.fhir.jpa.entity.ResourceSearchView;
|
||||
import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
|
||||
import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity;
|
||||
|
@ -84,6 +86,7 @@ import ca.uhn.fhir.rest.api.SortOrderEnum;
|
|||
import ca.uhn.fhir.rest.api.SortSpec;
|
||||
import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.param.BaseParamWithPrefix;
|
||||
import ca.uhn.fhir.rest.param.DateParam;
|
||||
import ca.uhn.fhir.rest.param.DateRangeParam;
|
||||
|
@ -117,6 +120,7 @@ import org.apache.commons.lang3.math.NumberUtils;
|
|||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
@ -554,10 +558,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
throw new InvalidRequestException(Msg.code(2027)
|
||||
+ "LastN operation is not enabled on this service, can not process this request");
|
||||
}
|
||||
return myFulltextSearchSvc.lastN(myParams, theMaximumResults).stream()
|
||||
.map(lastNResourceId -> myIdHelperService.resolveResourcePersistentIds(
|
||||
myRequestPartitionId, myResourceName, String.valueOf(lastNResourceId)))
|
||||
.collect(Collectors.toList());
|
||||
List<IResourcePersistentId> persistentIds = myFulltextSearchSvc.lastN(myParams, theMaximumResults);
|
||||
return persistentIds.stream().map(t -> (JpaPid) t).collect(Collectors.toList());
|
||||
} else {
|
||||
throw new InvalidRequestException(
|
||||
Msg.code(2033) + "LastN operation is not enabled on this service, can not process this request");
|
||||
|
@ -578,7 +580,13 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
idParamValue = idParm.getValue();
|
||||
}
|
||||
|
||||
pid = myIdHelperService.resolveResourcePersistentIds(myRequestPartitionId, myResourceName, idParamValue);
|
||||
pid = myIdHelperService
|
||||
.resolveResourceIdentity(
|
||||
myRequestPartitionId,
|
||||
myResourceName,
|
||||
idParamValue,
|
||||
ResolveIdentityMode.includeDeleted().cacheOk())
|
||||
.getPersistentId();
|
||||
}
|
||||
return myFulltextSearchSvc.everything(myResourceName, myParams, pid, theRequestDetails);
|
||||
}
|
||||
|
@ -600,37 +608,46 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
|
||||
/**
|
||||
* Combs through the params for any _id parameters and extracts the PIDs for them
|
||||
*
|
||||
* @param theTargetPids
|
||||
*/
|
||||
private void extractTargetPidsFromIdParams(Set<Long> theTargetPids) {
|
||||
// get all the IQueryParameterType objects
|
||||
// for _id -> these should all be StringParam values
|
||||
HashSet<String> ids = new HashSet<>();
|
||||
HashSet<IIdType> ids = new HashSet<>();
|
||||
List<List<IQueryParameterType>> params = myParams.get(IAnyResource.SP_RES_ID);
|
||||
for (List<IQueryParameterType> paramList : params) {
|
||||
for (IQueryParameterType param : paramList) {
|
||||
String id;
|
||||
if (param instanceof StringParam) {
|
||||
// we expect all _id values to be StringParams
|
||||
ids.add(((StringParam) param).getValue());
|
||||
id = ((StringParam) param).getValue();
|
||||
} else if (param instanceof TokenParam) {
|
||||
ids.add(((TokenParam) param).getValue());
|
||||
id = ((TokenParam) param).getValue();
|
||||
} else {
|
||||
// we do not expect the _id parameter to be a non-string value
|
||||
throw new IllegalArgumentException(
|
||||
Msg.code(1193) + "_id parameter must be a StringParam or TokenParam");
|
||||
}
|
||||
|
||||
IIdType idType = myContext.getVersion().newIdType();
|
||||
if (id.contains("/")) {
|
||||
idType.setValue(id);
|
||||
} else {
|
||||
idType.setValue(myResourceName + "/" + id);
|
||||
}
|
||||
ids.add(idType);
|
||||
}
|
||||
}
|
||||
|
||||
// fetch our target Pids
|
||||
// this will throw if an id is not found
|
||||
Map<String, JpaPid> idToPid = myIdHelperService.resolveResourcePersistentIds(
|
||||
myRequestPartitionId, myResourceName, new ArrayList<>(ids));
|
||||
Map<IIdType, IResourceLookup<JpaPid>> idToIdentity = myIdHelperService.resolveResourceIdentities(
|
||||
myRequestPartitionId,
|
||||
new ArrayList<>(ids),
|
||||
ResolveIdentityMode.failOnDeleted().noCacheUnlessDeletesDisabled());
|
||||
|
||||
// add the pids to targetPids
|
||||
for (JpaPid pid : idToPid.values()) {
|
||||
theTargetPids.add(pid.getId());
|
||||
for (IResourceLookup pid : idToIdentity.values()) {
|
||||
theTargetPids.add((Long) pid.getPersistentId().getId());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -676,8 +693,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
|| theParams.getSort() != null
|
||||
|| theParams.keySet().contains(Constants.PARAM_HAS)
|
||||
|| isPotentiallyContainedReferenceParameterExistsAtRoot(theParams)) {
|
||||
List<RuntimeSearchParam> activeComboParams =
|
||||
mySearchParamRegistry.getActiveComboSearchParams(myResourceName, theParams.keySet());
|
||||
List<RuntimeSearchParam> activeComboParams = mySearchParamRegistry.getActiveComboSearchParams(
|
||||
myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
if (activeComboParams.isEmpty()) {
|
||||
sqlBuilder.setNeedResourceTableRoot(true);
|
||||
}
|
||||
|
@ -952,8 +969,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
theQueryStack.addSortOnLastUpdated(ascending);
|
||||
|
||||
} else {
|
||||
RuntimeSearchParam param =
|
||||
mySearchParamRegistry.getActiveSearchParam(myResourceName, theSort.getParamName());
|
||||
RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(
|
||||
myResourceName, theSort.getParamName(), ISearchParamRegistry.SearchParamLookupContextEnum.SORT);
|
||||
|
||||
/*
|
||||
* If we have a sort like _sort=subject.name and we have an
|
||||
|
@ -977,8 +994,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
referenceParamTargetType = referenceParam.substring(0, colonIdx);
|
||||
referenceParam = referenceParam.substring(colonIdx + 1);
|
||||
}
|
||||
RuntimeSearchParam outerParam =
|
||||
mySearchParamRegistry.getActiveSearchParam(myResourceName, referenceParam);
|
||||
RuntimeSearchParam outerParam = mySearchParamRegistry.getActiveSearchParam(
|
||||
myResourceName, referenceParam, ISearchParamRegistry.SearchParamLookupContextEnum.SORT);
|
||||
if (outerParam == null) {
|
||||
throwInvalidRequestExceptionForUnknownSortParameter(myResourceName, referenceParam);
|
||||
} else if (outerParam.hasUpliftRefchain(targetParam)) {
|
||||
|
@ -986,8 +1003,10 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
if (referenceParamTargetType != null && !referenceParamTargetType.equals(nextTargetType)) {
|
||||
continue;
|
||||
}
|
||||
RuntimeSearchParam innerParam =
|
||||
mySearchParamRegistry.getActiveSearchParam(nextTargetType, targetParam);
|
||||
RuntimeSearchParam innerParam = mySearchParamRegistry.getActiveSearchParam(
|
||||
nextTargetType,
|
||||
targetParam,
|
||||
ISearchParamRegistry.SearchParamLookupContextEnum.SORT);
|
||||
if (innerParam != null) {
|
||||
param = innerParam;
|
||||
break;
|
||||
|
@ -1021,7 +1040,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
}
|
||||
|
||||
if (param == null) {
|
||||
param = mySearchParamRegistry.getActiveSearchParam(myResourceName, paramName);
|
||||
param = mySearchParamRegistry.getActiveSearchParam(
|
||||
myResourceName, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT);
|
||||
}
|
||||
|
||||
if (param == null) {
|
||||
|
@ -1100,8 +1120,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
}
|
||||
|
||||
private void throwInvalidRequestExceptionForUnknownSortParameter(String theResourceName, String theParamName) {
|
||||
Collection<String> validSearchParameterNames =
|
||||
mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName);
|
||||
Collection<String> validSearchParameterNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(
|
||||
theResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT);
|
||||
String msg = myContext
|
||||
.getLocalizer()
|
||||
.getMessageSanitized(
|
||||
|
@ -1564,7 +1584,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
|
||||
String paramName = nextInclude.getParamName();
|
||||
if (isNotBlank(paramName)) {
|
||||
param = mySearchParamRegistry.getActiveSearchParam(resType, paramName);
|
||||
param = mySearchParamRegistry.getActiveSearchParam(
|
||||
resType, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
} else {
|
||||
param = null;
|
||||
}
|
||||
|
@ -1903,7 +1924,10 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
// in this context, so let's just assume it could be anything.
|
||||
targetResourceTypes = possibleTypes;
|
||||
} else {
|
||||
for (var next : mySearchParamRegistry.getActiveSearchParams(myResourceName).values().stream()
|
||||
for (var next : mySearchParamRegistry
|
||||
.getActiveSearchParams(myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH)
|
||||
.values()
|
||||
.stream()
|
||||
.filter(t -> t.getParamType().equals(RestSearchParameterTypeEnum.REFERENCE))
|
||||
.collect(Collectors.toList())) {
|
||||
|
||||
|
@ -1986,16 +2010,16 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
QueryStack theQueryStack, @Nonnull SearchParameterMap theParams, RequestDetails theRequest) {
|
||||
RuntimeSearchParam comboParam = null;
|
||||
List<String> comboParamNames = null;
|
||||
List<RuntimeSearchParam> exactMatchParams =
|
||||
mySearchParamRegistry.getActiveComboSearchParams(myResourceName, theParams.keySet());
|
||||
List<RuntimeSearchParam> exactMatchParams = mySearchParamRegistry.getActiveComboSearchParams(
|
||||
myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
if (!exactMatchParams.isEmpty()) {
|
||||
comboParam = exactMatchParams.get(0);
|
||||
comboParamNames = new ArrayList<>(theParams.keySet());
|
||||
}
|
||||
|
||||
if (comboParam == null) {
|
||||
List<RuntimeSearchParam> candidateComboParams =
|
||||
mySearchParamRegistry.getActiveComboSearchParams(myResourceName);
|
||||
List<RuntimeSearchParam> candidateComboParams = mySearchParamRegistry.getActiveComboSearchParams(
|
||||
myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
for (RuntimeSearchParam nextCandidate : candidateComboParams) {
|
||||
List<String> nextCandidateParamNames =
|
||||
JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, nextCandidate).stream()
|
||||
|
@ -2064,8 +2088,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
IQueryParameterType nextOr = nextPermutation.get(paramIndex);
|
||||
String nextOrValue = nextOr.getValueAsQueryToken(myContext);
|
||||
|
||||
RuntimeSearchParam nextParamDef =
|
||||
mySearchParamRegistry.getActiveSearchParam(myResourceName, nextParamName);
|
||||
RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam(
|
||||
myResourceName, nextParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
if (theComboParam.getComboSearchParamType() == ComboSearchParamType.NON_UNIQUE) {
|
||||
if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.STRING) {
|
||||
nextOrValue = StringUtil.normalizeStringForSearchIndexing(nextOrValue);
|
||||
|
@ -2180,7 +2204,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
|
||||
// Reference params are only eligible for using a composite index if they
|
||||
// are qualified
|
||||
RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam(myResourceName, nextParamName);
|
||||
RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam(
|
||||
myResourceName, nextParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.REFERENCE) {
|
||||
ReferenceParam param = (ReferenceParam) nextValues.get(0).get(0);
|
||||
if (isBlank(param.getResourceType())) {
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package ca.uhn.fhir.jpa.search.builder.predicate;
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboStringUnique;
|
||||
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
|
||||
import ca.uhn.fhir.jpa.util.QueryParameterUtils;
|
||||
import com.healthmarketscience.sqlbuilder.Condition;
|
||||
|
@ -35,7 +36,9 @@ public class ComboUniqueSearchParameterPredicateBuilder extends BaseSearchParamP
|
|||
* Constructor
|
||||
*/
|
||||
public ComboUniqueSearchParameterPredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) {
|
||||
super(theSearchSqlBuilder, theSearchSqlBuilder.addTable("HFJ_IDX_CMP_STRING_UNIQ"));
|
||||
super(
|
||||
theSearchSqlBuilder,
|
||||
theSearchSqlBuilder.addTable(ResourceIndexedComboStringUnique.HFJ_IDX_CMP_STRING_UNIQ));
|
||||
|
||||
myColumnString = getTable().addColumn("IDX_STRING");
|
||||
}
|
||||
|
|
|
@ -21,24 +21,27 @@ package ca.uhn.fhir.jpa.search.builder.predicate;
|
|||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
|
||||
import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser;
|
||||
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
|
||||
import ca.uhn.fhir.jpa.util.QueryParameterUtils;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParamModifier;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import com.healthmarketscience.sqlbuilder.Condition;
|
||||
import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn;
|
||||
import jakarta.annotation.Nullable;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
|
||||
|
@ -68,9 +71,8 @@ public class ResourceIdPredicateBuilder extends BasePredicateBuilder {
|
|||
Set<JpaPid> allOrPids = null;
|
||||
SearchFilterParser.CompareOperation defaultOperation = SearchFilterParser.CompareOperation.eq;
|
||||
|
||||
boolean allIdsAreForcedIds = true;
|
||||
for (List<? extends IQueryParameterType> nextValue : theValues) {
|
||||
Set<JpaPid> orPids = new HashSet<>();
|
||||
Set<IIdType> ids = new LinkedHashSet<>();
|
||||
boolean haveValue = false;
|
||||
for (IQueryParameterType next : nextValue) {
|
||||
String value = next.getValueAsQueryToken(getFhirContext());
|
||||
|
@ -78,21 +80,14 @@ public class ResourceIdPredicateBuilder extends BasePredicateBuilder {
|
|||
value = value.substring(1);
|
||||
}
|
||||
|
||||
IdType valueAsId = new IdType(value);
|
||||
if (isNotBlank(value)) {
|
||||
if (!myIdHelperService.idRequiresForcedId(valueAsId.getIdPart()) && allIdsAreForcedIds) {
|
||||
allIdsAreForcedIds = false;
|
||||
}
|
||||
haveValue = true;
|
||||
try {
|
||||
boolean excludeDeleted = true;
|
||||
JpaPid pid = myIdHelperService.resolveResourcePersistentIds(
|
||||
theRequestPartitionId, theResourceName, valueAsId.getIdPart(), excludeDeleted);
|
||||
orPids.add(pid);
|
||||
} catch (ResourceNotFoundException e) {
|
||||
// This is not an error in a search, it just results in no matches
|
||||
ourLog.debug("Resource ID {} was requested but does not exist", valueAsId.getIdPart());
|
||||
if (!value.contains("/")) {
|
||||
value = theResourceName + "/" + value;
|
||||
}
|
||||
IIdType id = getFhirContext().getVersion().newIdType();
|
||||
id.setValue(value);
|
||||
ids.add(id);
|
||||
}
|
||||
|
||||
if (next instanceof TokenParam) {
|
||||
|
@ -101,6 +96,20 @@ public class ResourceIdPredicateBuilder extends BasePredicateBuilder {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
Set<JpaPid> orPids = new HashSet<>();
|
||||
|
||||
// We're joining this to a query that will explicitly ask for non-deleted,
|
||||
// so we really only want the PID and can safely cache (even if a previously
|
||||
// deleted status was cached, since it might now be undeleted)
|
||||
Map<IIdType, IResourceLookup<JpaPid>> resolvedPids = myIdHelperService.resolveResourceIdentities(
|
||||
theRequestPartitionId,
|
||||
ids,
|
||||
ResolveIdentityMode.includeDeleted().cacheOk());
|
||||
for (IResourceLookup<JpaPid> lookup : resolvedPids.values()) {
|
||||
orPids.add(lookup.getPersistentId());
|
||||
}
|
||||
|
||||
if (haveValue) {
|
||||
if (allOrPids == null) {
|
||||
allOrPids = orPids;
|
||||
|
@ -122,17 +131,19 @@ public class ResourceIdPredicateBuilder extends BasePredicateBuilder {
|
|||
|
||||
List<Long> resourceIds = JpaPid.toLongList(allOrPids);
|
||||
if (theSourceJoinColumn == null) {
|
||||
BaseJoiningPredicateBuilder queryRootTable = super.getOrCreateQueryRootTable(!allIdsAreForcedIds);
|
||||
BaseJoiningPredicateBuilder queryRootTable = super.getOrCreateQueryRootTable(true);
|
||||
Condition predicate;
|
||||
switch (operation) {
|
||||
default:
|
||||
case eq:
|
||||
predicate = queryRootTable.createPredicateResourceIds(false, resourceIds);
|
||||
return queryRootTable.combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate);
|
||||
break;
|
||||
case ne:
|
||||
predicate = queryRootTable.createPredicateResourceIds(true, resourceIds);
|
||||
return queryRootTable.combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate);
|
||||
break;
|
||||
}
|
||||
predicate = queryRootTable.combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate);
|
||||
return predicate;
|
||||
} else {
|
||||
DbColumn resIdColumn = getResourceIdColumn(theSourceJoinColumn);
|
||||
return QueryParameterUtils.toEqualToOrInPredicate(
|
||||
|
|
|
@ -497,7 +497,8 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder im
|
|||
|
||||
RuntimeSearchParam param = null;
|
||||
if (!isMeta) {
|
||||
param = mySearchParamRegistry.getActiveSearchParam(nextType, chain);
|
||||
param = mySearchParamRegistry.getActiveSearchParam(
|
||||
nextType, chain, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
if (param == null) {
|
||||
ourLog.debug("Type {} doesn't have search param {}", nextType, param);
|
||||
continue;
|
||||
|
@ -586,8 +587,8 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder im
|
|||
resourceTypes = determineResourceTypes(Collections.singleton(theResourceName), theParamName);
|
||||
|
||||
if (resourceTypes.isEmpty()) {
|
||||
RuntimeSearchParam searchParamByName =
|
||||
mySearchParamRegistry.getActiveSearchParam(theResourceName, theParamName);
|
||||
RuntimeSearchParam searchParamByName = mySearchParamRegistry.getActiveSearchParam(
|
||||
theResourceName, theParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
if (searchParamByName == null) {
|
||||
throw new InternalErrorException(Msg.code(1244) + "Could not find parameter " + theParamName);
|
||||
}
|
||||
|
@ -659,7 +660,8 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder im
|
|||
if (linkIndex == -1) {
|
||||
Set<Class<? extends IBaseResource>> resourceTypes = new HashSet<>();
|
||||
for (String resourceName : theResourceNames) {
|
||||
RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(resourceName, theParamNameChain);
|
||||
RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(
|
||||
resourceName, theParamNameChain, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
|
||||
if (param != null && param.hasTargets()) {
|
||||
Set<String> targetTypes = param.getTargets();
|
||||
|
@ -675,7 +677,8 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder im
|
|||
String paramNameTail = theParamNameChain.substring(linkIndex + 1);
|
||||
Set<String> targetResourceTypeNames = new HashSet<>();
|
||||
for (String resourceName : theResourceNames) {
|
||||
RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(resourceName, paramNameHead);
|
||||
RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(
|
||||
resourceName, paramNameHead, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
|
||||
if (param != null && param.hasTargets()) {
|
||||
targetResourceTypeNames.addAll(param.getTargets());
|
||||
|
@ -687,7 +690,8 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder im
|
|||
|
||||
public List<String> createResourceLinkPaths(
|
||||
String theResourceName, String theParamName, List<String> theParamQualifiers) {
|
||||
RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(theResourceName, theParamName);
|
||||
RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(
|
||||
theResourceName, theParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
if (param != null) {
|
||||
List<String> path = param.getPathsSplit();
|
||||
|
||||
|
@ -718,7 +722,8 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder im
|
|||
? theParamQualifiers.subList(1, theParamQualifiers.size())
|
||||
: List.of();
|
||||
|
||||
param = mySearchParamRegistry.getActiveSearchParam(theResourceName, paramNameHead);
|
||||
param = mySearchParamRegistry.getActiveSearchParam(
|
||||
theResourceName, paramNameHead, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
if (param != null) {
|
||||
Set<String> tailPaths = param.getTargets().stream()
|
||||
.filter(t -> isBlank(qualifier) || qualifier.equals(t))
|
||||
|
@ -832,8 +837,8 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder im
|
|||
subquery.addFromTable(getTable());
|
||||
|
||||
String resourceType = theParams.getResourceTablePredicateBuilder().getResourceType();
|
||||
RuntimeSearchParam paramDefinition =
|
||||
mySearchParamRegistry.getRuntimeSearchParam(resourceType, theParams.getParamName());
|
||||
RuntimeSearchParam paramDefinition = mySearchParamRegistry.getRuntimeSearchParam(
|
||||
resourceType, theParams.getParamName(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
|
||||
List<String> pathList = paramDefinition.getPathsSplitForResourceType(resourceType);
|
||||
|
||||
Condition subQueryCondition = ComboCondition.and(
|
||||
|
|
|
@ -1,3 +1,22 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.search.builder.sql;
|
||||
|
||||
import com.healthmarketscience.common.util.AppendableExt;
|
||||
|
|
|
@ -158,7 +158,7 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService {
|
|||
return myVersionCanonicalizer.parametersFromCanonical(retValCanonical);
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
@SuppressWarnings({"rawtypes"})
|
||||
@Nonnull
|
||||
private Parameters reindexInTransaction(RequestDetails theRequestDetails, IIdType theResourceId) {
|
||||
StopWatch sw = new StopWatch();
|
||||
|
@ -441,7 +441,8 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService {
|
|||
private void fillInParamNames(
|
||||
ResourceTable theEntity, Collection<SearchParamPresentEntity> theTarget, String theResourceName) {
|
||||
Map<Long, String> hashes = new HashMap<>();
|
||||
ResourceSearchParams searchParams = mySearchParamRegistry.getActiveSearchParams(theResourceName);
|
||||
ResourceSearchParams searchParams = mySearchParamRegistry.getActiveSearchParams(
|
||||
theResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.ALL);
|
||||
for (RuntimeSearchParam next : searchParams.values()) {
|
||||
hashes.put(
|
||||
SearchParamPresentEntity.calculateHashPresence(
|
||||
|
|
|
@ -25,6 +25,7 @@ import ca.uhn.fhir.i18n.Msg;
|
|||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
|
||||
|
@ -382,8 +383,11 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
// Note that this creates the TermCodeSystem and TermCodeSystemVersion entities if needed
|
||||
IIdType csId = myTerminologyVersionAdapterSvc.createOrUpdateCodeSystem(theCodeSystemResource, theRequest);
|
||||
|
||||
JpaPid codeSystemResourcePid = myIdHelperService.resolveResourcePersistentIds(
|
||||
RequestPartitionId.allPartitions(), csId.getResourceType(), csId.getIdPart());
|
||||
JpaPid codeSystemResourcePid = myIdHelperService.resolveResourceIdentityPid(
|
||||
RequestPartitionId.allPartitions(),
|
||||
csId.getResourceType(),
|
||||
csId.getIdPart(),
|
||||
ResolveIdentityMode.includeDeleted().cacheOk());
|
||||
ResourceTable resource = myResourceTableDao.getOne(codeSystemResourcePid.getId());
|
||||
|
||||
ourLog.info("CodeSystem resource has ID: {}", csId.getValue());
|
||||
|
|
|
@ -34,6 +34,7 @@ import ca.uhn.fhir.jpa.api.dao.IDao;
|
|||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
|
||||
import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider;
|
||||
import ca.uhn.fhir.jpa.config.util.ConnectionPoolInfoProvider;
|
||||
import ca.uhn.fhir.jpa.config.util.IConnectionPoolInfoProvider;
|
||||
|
@ -470,7 +471,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
myCodeSystemCurrentVersionCache.invalidateAll();
|
||||
}
|
||||
|
||||
public void deleteValueSetForResource(ResourceTable theResourceTable) {
|
||||
public Optional<TermValueSet> deleteValueSetForResource(ResourceTable theResourceTable) {
|
||||
// Get existing entity so it can be deleted.
|
||||
Optional<TermValueSet> optionalExistingTermValueSetById =
|
||||
myTermValueSetDao.findByResourcePid(theResourceTable.getId());
|
||||
|
@ -481,8 +482,19 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
ourLog.info("Deleting existing TermValueSet[{}] and its children...", existingTermValueSet.getId());
|
||||
deletePreCalculatedValueSetContents(existingTermValueSet);
|
||||
myTermValueSetDao.deleteById(existingTermValueSet.getId());
|
||||
|
||||
/*
|
||||
* If we're updating an existing ValueSet within a transaction, we need to make
|
||||
* sure to manually flush now since otherwise we'll try to create a new
|
||||
* TermValueSet entity and fail with a constraint error on the URL, since
|
||||
* this one won't be deleted yet
|
||||
*/
|
||||
myTermValueSetDao.flush();
|
||||
|
||||
ourLog.info("Done deleting existing TermValueSet[{}] and its children.", existingTermValueSet.getId());
|
||||
}
|
||||
|
||||
return optionalExistingTermValueSetById;
|
||||
}
|
||||
|
||||
private void deletePreCalculatedValueSetContents(TermValueSet theValueSet) {
|
||||
|
@ -2081,10 +2093,11 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
}
|
||||
|
||||
private JpaPid getValueSetResourcePersistentId(ValueSet theValueSet) {
|
||||
return myIdHelperService.resolveResourcePersistentIds(
|
||||
return myIdHelperService.resolveResourceIdentityPid(
|
||||
RequestPartitionId.allPartitions(),
|
||||
theValueSet.getIdElement().getResourceType(),
|
||||
theValueSet.getIdElement().getIdPart());
|
||||
theValueSet.getIdElement().getIdPart(),
|
||||
ResolveIdentityMode.includeDeleted().cacheOk());
|
||||
}
|
||||
|
||||
protected IValidationSupport.CodeValidationResult validateCodeIsInPreExpandedValueSet(
|
||||
|
@ -2527,6 +2540,13 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
@Override
|
||||
@Transactional
|
||||
public void storeTermValueSet(ResourceTable theResourceTable, ValueSet theValueSet) {
|
||||
// If we're in a transaction, we need to flush now so that we can correctly detect
|
||||
// duplicates if there are multiple ValueSets in the same TX with the same URL
|
||||
// (which is an error, but we need to catch it). It'd be better to catch this by
|
||||
// inspecting the URLs in the bundle or something, since flushing hurts performance
|
||||
// but it's not expected that loading valuesets is going to be a huge high frequency
|
||||
// thing so it probably doesn't matter
|
||||
myEntityManager.flush();
|
||||
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(theResourceTable != null, "No resource supplied");
|
||||
if (isPlaceholder(theValueSet)) {
|
||||
|
@ -2552,7 +2572,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
termValueSet.setName(theValueSet.hasName() ? theValueSet.getName() : null);
|
||||
|
||||
// Delete version being replaced
|
||||
deleteValueSetForResource(theResourceTable);
|
||||
Optional<TermValueSet> deletedTrmValueSet = deleteValueSetForResource(theResourceTable);
|
||||
|
||||
/*
|
||||
* Do the upload.
|
||||
|
@ -2560,11 +2580,17 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
String url = termValueSet.getUrl();
|
||||
String version = termValueSet.getVersion();
|
||||
Optional<TermValueSet> optionalExistingTermValueSetByUrl;
|
||||
if (version != null) {
|
||||
optionalExistingTermValueSetByUrl = myTermValueSetDao.findTermValueSetByUrlAndVersion(url, version);
|
||||
|
||||
if (deletedTrmValueSet.isPresent()
|
||||
&& Objects.equals(deletedTrmValueSet.get().getUrl(), url)
|
||||
&& Objects.equals(deletedTrmValueSet.get().getVersion(), version)) {
|
||||
// If we just deleted the valueset marker, we don't need to check if it exists
|
||||
// in the database
|
||||
optionalExistingTermValueSetByUrl = Optional.empty();
|
||||
} else {
|
||||
optionalExistingTermValueSetByUrl = myTermValueSetDao.findTermValueSetByUrlAndNullVersion(url);
|
||||
optionalExistingTermValueSetByUrl = getTermValueSet(version, url);
|
||||
}
|
||||
|
||||
if (optionalExistingTermValueSetByUrl.isEmpty()) {
|
||||
|
||||
myTermValueSetDao.save(termValueSet);
|
||||
|
@ -2602,6 +2628,16 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
}
|
||||
}
|
||||
|
||||
private Optional<TermValueSet> getTermValueSet(String version, String url) {
|
||||
Optional<TermValueSet> optionalExistingTermValueSetByUrl;
|
||||
if (version != null) {
|
||||
optionalExistingTermValueSetByUrl = myTermValueSetDao.findTermValueSetByUrlAndVersion(url, version);
|
||||
} else {
|
||||
optionalExistingTermValueSetByUrl = myTermValueSetDao.findTermValueSetByUrlAndNullVersion(url);
|
||||
}
|
||||
return optionalExistingTermValueSetByUrl;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public IFhirResourceDaoCodeSystem.SubsumesResult subsumes(
|
||||
|
|
|
@ -35,11 +35,11 @@ import java.util.stream.Stream;
|
|||
*/
|
||||
public class QueryChunker<T> extends TaskChunker<T> {
|
||||
|
||||
public void chunk(Collection<T> theInput, Consumer<List<T>> theBatchConsumer) {
|
||||
public static <T> void chunk(Collection<T> theInput, Consumer<List<T>> theBatchConsumer) {
|
||||
chunk(theInput, SearchBuilder.getMaximumPageSize(), theBatchConsumer);
|
||||
}
|
||||
|
||||
public Stream<List<T>> chunk(Stream<T> theStream) {
|
||||
public static <T> Stream<List<T>> chunk(Stream<T> theStream) {
|
||||
return chunk(theStream, SearchBuilder.getMaximumPageSize());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -423,7 +423,7 @@ public class JpaBulkExportProcessorTest {
|
|||
|
||||
// when
|
||||
RuntimeSearchParam searchParam = new RuntimeSearchParam(new IdType("1"), "", "", "", "", RestSearchParameterTypeEnum.STRING, Collections.singleton(""), Collections.singleton(""), RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE, Collections.singleton(""));
|
||||
when(mySearchParamRegistry.getActiveSearchParam(any(), any())).thenReturn(searchParam);
|
||||
when(mySearchParamRegistry.getActiveSearchParam(any(), any(), any())).thenReturn(searchParam);
|
||||
// expandAllPatientPidsFromGroup
|
||||
when(myDaoRegistry.getResourceDao(eq("Group")))
|
||||
.thenReturn(groupDao);
|
||||
|
|
|
@ -3,36 +3,39 @@ package ca.uhn.fhir.jpa.dao.index;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import jakarta.persistence.EntityManager;
|
||||
import jakarta.persistence.Tuple;
|
||||
import jakarta.persistence.criteria.Path;
|
||||
import jakarta.persistence.criteria.Root;
|
||||
import jakarta.persistence.TypedQuery;
|
||||
import jakarta.persistence.criteria.CriteriaQuery;
|
||||
import org.hibernate.sql.results.internal.TupleImpl;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.ArgumentMatchers;
|
||||
import org.mockito.Answers;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Spy;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.lenient;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
|
@ -40,7 +43,6 @@ import static org.junit.jupiter.api.Assertions.assertNull;
|
|||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import static org.mockito.Mockito.eq;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public class IdHelperServiceTest {
|
||||
|
@ -60,29 +62,33 @@ public class IdHelperServiceTest {
|
|||
@Mock
|
||||
private MemoryCacheService myMemoryCacheService;
|
||||
|
||||
@Mock
|
||||
@Mock(answer = Answers.RETURNS_DEEP_STUBS)
|
||||
private EntityManager myEntityManager;
|
||||
|
||||
@Mock
|
||||
private PartitionSettings myPartitionSettings;
|
||||
|
||||
@BeforeEach
|
||||
@Mock
|
||||
private TypedQuery myTypedQuery;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
myHelperSvc.setDontCheckActiveTransactionForUnitTest(true);
|
||||
|
||||
// lenient because some tests require this setup, and others do not
|
||||
lenient().doReturn(true).when(myStorageSettings).isDeleteEnabled();
|
||||
lenient().doReturn(JpaStorageSettings.ClientIdStrategyEnum.ANY).when(myStorageSettings).getResourceClientIdStrategy();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testResolveResourcePersistentIds() {
|
||||
lenient().doReturn(JpaStorageSettings.ClientIdStrategyEnum.ANY).when(myStorageSettings).getResourceClientIdStrategy();
|
||||
|
||||
//prepare params
|
||||
RequestPartitionId requestPartitionId = RequestPartitionId.fromPartitionIdAndName(1, "Partition-A");
|
||||
String resourceType = "Patient";
|
||||
Long id = 123L;
|
||||
List<String> ids = List.of(String.valueOf(id));
|
||||
boolean theExcludeDeleted = false;
|
||||
ResolveIdentityMode mode = ResolveIdentityMode.includeDeleted().noCacheUnlessDeletesDisabled();
|
||||
|
||||
//prepare results
|
||||
Patient expectedPatient = new Patient();
|
||||
|
@ -91,19 +97,21 @@ public class IdHelperServiceTest {
|
|||
// configure mock behaviour
|
||||
when(myStorageSettings.isDeleteEnabled()).thenReturn(true);
|
||||
|
||||
final ResourceNotFoundException resourceNotFoundException = assertThrows(ResourceNotFoundException.class, () -> myHelperSvc.resolveResourcePersistentIds(requestPartitionId, resourceType, ids, theExcludeDeleted));
|
||||
final ResourceNotFoundException resourceNotFoundException = assertThrows(ResourceNotFoundException.class, () -> myHelperSvc.resolveResourcePersistentIds(requestPartitionId, resourceType, ids, mode));
|
||||
assertEquals("HAPI-2001: Resource Patient/123 is not known", resourceNotFoundException.getMessage());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testResolveResourcePersistentIdsDeleteFalse() {
|
||||
lenient().doReturn(JpaStorageSettings.ClientIdStrategyEnum.ANY).when(myStorageSettings).getResourceClientIdStrategy();
|
||||
|
||||
//prepare Params
|
||||
RequestPartitionId requestPartitionId = RequestPartitionId.fromPartitionIdAndName(1, "Partition-A");
|
||||
Long id = 123L;
|
||||
String resourceType = "Patient";
|
||||
List<String> ids = List.of(String.valueOf(id));
|
||||
String forcedId = "(all)/" + resourceType + "/" + id;
|
||||
boolean theExcludeDeleted = false;
|
||||
ResolveIdentityMode mode = ResolveIdentityMode.includeDeleted().noCacheUnlessDeletesDisabled();
|
||||
|
||||
//prepare results
|
||||
Patient expectedPatient = new Patient();
|
||||
|
@ -112,7 +120,7 @@ public class IdHelperServiceTest {
|
|||
// configure mock behaviour
|
||||
when(myStorageSettings.isDeleteEnabled()).thenReturn(false);
|
||||
|
||||
Map<String, JpaPid> actualIds = myHelperSvc.resolveResourcePersistentIds(requestPartitionId, resourceType, ids, theExcludeDeleted);
|
||||
Map<String, JpaPid> actualIds = myHelperSvc.resolveResourcePersistentIds(requestPartitionId, resourceType, ids, mode);
|
||||
|
||||
//verifyResult
|
||||
assertFalse(actualIds.isEmpty());
|
||||
|
@ -120,20 +128,35 @@ public class IdHelperServiceTest {
|
|||
}
|
||||
|
||||
|
||||
private Root<ResourceTable> getMockedFrom() {
|
||||
@SuppressWarnings("unchecked")
|
||||
Path<Object> path = mock(Path.class);
|
||||
@SuppressWarnings("unchecked")
|
||||
Root<ResourceTable> from = mock(Root.class);
|
||||
when(from.get(ArgumentMatchers.<String>any())).thenReturn(path);
|
||||
return from;
|
||||
}
|
||||
|
||||
private List<Tuple> getMockedTupleList(Long idNumber, String resourceType, String id) {
|
||||
Tuple tuple = mock(Tuple.class);
|
||||
when(tuple.get(eq(0), eq(Long.class))).thenReturn(idNumber);
|
||||
when(tuple.get(eq(1), eq(String.class))).thenReturn(resourceType);
|
||||
when(tuple.get(eq(2), eq(String.class))).thenReturn(id);
|
||||
return List.of(tuple);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testResolveResourceIdentity_defaultFunctionality(){
|
||||
lenient().doReturn(JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC).when(myStorageSettings).getResourceClientIdStrategy();
|
||||
|
||||
RequestPartitionId partitionId = RequestPartitionId.fromPartitionIdAndName(1, "partition");
|
||||
String resourceType = "Patient";
|
||||
String resourceForcedId = "AAA";
|
||||
|
||||
Object[] tuple = new Object[] {
|
||||
1L,
|
||||
"Patient",
|
||||
"AAA",
|
||||
new Date(),
|
||||
null
|
||||
};
|
||||
|
||||
when(myEntityManager.createQuery(any(CriteriaQuery.class))).thenReturn(myTypedQuery);
|
||||
when(myTypedQuery.getResultList()).thenReturn(List.of(
|
||||
new TupleImpl(null, tuple)
|
||||
));
|
||||
|
||||
IResourceLookup<JpaPid> result = myHelperSvc.resolveResourceIdentity(partitionId, resourceType, resourceForcedId, ResolveIdentityMode.includeDeleted().noCacheUnlessDeletesDisabled());
|
||||
assertEquals(tuple[0], result.getPersistentId().getId());
|
||||
assertEquals(tuple[1], result.getResourceType());
|
||||
assertEquals(tuple[3], result.getDeleted());
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -23,6 +23,8 @@ import java.util.Optional;
|
|||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.doReturn;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
@ -68,13 +70,13 @@ class HSearchSortHelperImplTest {
|
|||
void testGetParamType() {
|
||||
SortSpec sortSpec = new SortSpec();
|
||||
sortSpec.setParamName("_tag");
|
||||
when(mockSearchParamRegistry.getActiveSearchParams("Observation")).thenReturn(mockResourceSearchParams);
|
||||
when(mockSearchParamRegistry.getActiveSearchParams(eq("Observation"), any())).thenReturn(mockResourceSearchParams);
|
||||
when(mockResourceSearchParams.get("the-param-name")).thenReturn(mockRuntimeSearchParam);
|
||||
when(mockRuntimeSearchParam.getParamType()).thenReturn(RestSearchParameterTypeEnum.TOKEN);
|
||||
|
||||
Optional<RestSearchParameterTypeEnum> paramType = tested.getParamType("Observation", "the-param-name");
|
||||
|
||||
verify(mockSearchParamRegistry, times(1)).getActiveSearchParams("Observation");
|
||||
verify(mockSearchParamRegistry, times(1)).getActiveSearchParams(eq("Observation"), any());
|
||||
verify(mockResourceSearchParams, times(1)).get("the-param-name");
|
||||
assertFalse(paramType.isEmpty());
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.0-SNAPSHOT</version>
|
||||
<version>7.7.7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -72,7 +72,7 @@ abstract public class BaseR4SearchLastN extends BaseJpaTest {
|
|||
protected static IIdType patient2Id = null;
|
||||
// Using static variables including the flag below so that we can initalize the database and indexes once
|
||||
// (all of the tests only read from the DB and indexes and so no need to re-initialze them for each test).
|
||||
private static Calendar observationDate = new GregorianCalendar();
|
||||
private static final Calendar observationDate = new GregorianCalendar();
|
||||
protected final String observationCd0 = "code0";
|
||||
protected final String observationCd1 = "code1";
|
||||
protected final String observationCd2 = "code2";
|
||||
|
@ -120,9 +120,10 @@ abstract public class BaseR4SearchLastN extends BaseJpaTest {
|
|||
// enabled to also create extended lucene index during creation of test data
|
||||
boolean hsearchSaved = myStorageSettings.isAdvancedHSearchIndexing();
|
||||
myStorageSettings.setAdvancedHSearchIndexing(true);
|
||||
myStorageSettings.setDeleteEnabled(false);
|
||||
|
||||
// Using a static flag to ensure that test data and elasticsearch index is only created once.
|
||||
// Creating this data and the index is time consuming and as such want to avoid having to repeat for each test.
|
||||
// Creating this data and the index is time-consuming and as such want to avoid having to repeat for each test.
|
||||
// Normally would use a static @BeforeClass method for this purpose, but Autowired objects cannot be accessed in static methods.
|
||||
|
||||
Patient pt = new Patient();
|
||||
|
|
|
@ -120,9 +120,10 @@ public class FhirResourceDaoR4SearchLastNAsyncIT extends BaseR4SearchLastN {
|
|||
|
||||
ourLog.info("Queries:\n * " + String.join("\n * ", queries));
|
||||
|
||||
// 1 query to resolve the subject PIDs
|
||||
// 3 queries to actually perform the search
|
||||
// 1 query to lookup up Search from cache, and 2 chunked queries to retrieve resources by PID.
|
||||
assertThat(queries).hasSize(6);
|
||||
assertThat(queries).hasSize(7);
|
||||
|
||||
// The first chunked query should have a full complement of PIDs
|
||||
StringBuilder firstQueryPattern = new StringBuilder(".*RES_ID in \\('[0-9]+'");
|
||||
|
@ -130,7 +131,7 @@ public class FhirResourceDaoR4SearchLastNAsyncIT extends BaseR4SearchLastN {
|
|||
firstQueryPattern.append(",'[0-9]+'");
|
||||
}
|
||||
firstQueryPattern.append("\\).*");
|
||||
assertThat(queries.get(4)).matches(firstQueryPattern.toString());
|
||||
assertThat(queries.get(5)).matches(firstQueryPattern.toString());
|
||||
|
||||
// the second chunked query should be padded with "-1".
|
||||
StringBuilder secondQueryPattern = new StringBuilder(".*RES_ID in \\('[0-9]+'");
|
||||
|
@ -141,7 +142,7 @@ public class FhirResourceDaoR4SearchLastNAsyncIT extends BaseR4SearchLastN {
|
|||
secondQueryPattern.append(",'-1'");
|
||||
}
|
||||
secondQueryPattern.append("\\).*");
|
||||
assertThat(queries.get(5)).matches(secondQueryPattern.toString());
|
||||
assertThat(queries.get(6)).matches(secondQueryPattern.toString());
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -79,6 +79,10 @@ public class FhirResourceDaoR4SearchLastNIT extends BaseR4SearchLastN {
|
|||
// Set chunk size to 50
|
||||
SearchBuilder.setMaxPageSizeForTest(50);
|
||||
|
||||
// Run once to fill caches
|
||||
toUnqualifiedVersionlessIdValues(myObservationDao.observationsLastN(params, mockSrd(), null));
|
||||
|
||||
// Actually test
|
||||
myCaptureQueriesListener.clear();
|
||||
List<String> results = toUnqualifiedVersionlessIdValues(myObservationDao.observationsLastN(params, mockSrd(), null));
|
||||
assertThat(results).hasSize(75);
|
||||
|
|
|
@ -277,8 +277,7 @@ public class ValueSetExpansionR4ElasticsearchIT extends BaseJpaTest implements I
|
|||
myTermCodeSystemStorageSvc.storeNewCodeSystemVersion(codeSystem, codeSystemVersion,
|
||||
new SystemRequestDetails(), Collections.singletonList(valueSet), Collections.emptyList());
|
||||
|
||||
// myTerminologyDeferredStorageSvc.saveAllDeferred();
|
||||
await().atMost(10, SECONDS).until(() -> {
|
||||
await().atMost(20, SECONDS).until(() -> {
|
||||
myTerminologyDeferredStorageSvc.saveDeferred();
|
||||
return myTerminologyDeferredStorageSvc.isStorageQueueEmpty(true);
|
||||
});
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue