merge master in
This commit is contained in:
commit
d706caf949
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -225,6 +225,14 @@ public class FhirContext {
|
|||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @since 5.6.0
|
||||
*/
|
||||
public static FhirContext forDstu2Cached() {
|
||||
return forCached(FhirVersionEnum.DSTU2);
|
||||
}
|
||||
|
||||
/**
|
||||
* @since 5.5.0
|
||||
*/
|
||||
|
|
|
@ -73,7 +73,6 @@ import static org.apache.commons.lang3.StringUtils.isBlank;
|
|||
|
||||
class ModelScanner {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ModelScanner.class);
|
||||
|
||||
private Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> myClassToElementDefinitions = new HashMap<>();
|
||||
private FhirContext myContext;
|
||||
private Map<String, RuntimeResourceDefinition> myIdToResourceDefinition = new HashMap<>();
|
||||
|
@ -90,6 +89,7 @@ class ModelScanner {
|
|||
@Nonnull Collection<Class<? extends IBase>> theResourceTypes) throws ConfigurationException {
|
||||
myContext = theContext;
|
||||
myVersion = theVersion;
|
||||
|
||||
Set<Class<? extends IBase>> toScan = new HashSet<>(theResourceTypes);
|
||||
init(theExistingDefinitions, toScan);
|
||||
}
|
||||
|
@ -405,8 +405,8 @@ class ModelScanner {
|
|||
List<RuntimeSearchParam.Component> components = null;
|
||||
if (paramType == RestSearchParameterTypeEnum.COMPOSITE) {
|
||||
components = new ArrayList<>();
|
||||
for (String next : searchParam.compositeOf()) {
|
||||
String ref = "http://hl7.org/fhir/SearchParameter/" + theResourceDef.getName().toLowerCase() + "-" + next;
|
||||
for (String name : searchParam.compositeOf()) {
|
||||
String ref = toCanonicalSearchParameterUri(theResourceDef, name);
|
||||
components.add(new RuntimeSearchParam.Component(null, ref));
|
||||
}
|
||||
}
|
||||
|
@ -414,7 +414,8 @@ class ModelScanner {
|
|||
Collection<String> base = Collections.singletonList(theResourceDef.getName());
|
||||
String url = null;
|
||||
if (theResourceDef.isStandardType()) {
|
||||
url = "http://hl7.org/fhir/SearchParameter/" + theResourceDef.getName().toLowerCase() + "-" + searchParam.name();
|
||||
String name = searchParam.name();
|
||||
url = toCanonicalSearchParameterUri(theResourceDef, name);
|
||||
}
|
||||
RuntimeSearchParam param = new RuntimeSearchParam(null, url, searchParam.name(), searchParam.description(), searchParam.path(), paramType, providesMembershipInCompartments, toTargetList(searchParam.target()), RuntimeSearchParamStatusEnum.ACTIVE, null, components, base);
|
||||
theResourceDef.addSearchParam(param);
|
||||
|
@ -424,6 +425,10 @@ class ModelScanner {
|
|||
|
||||
}
|
||||
|
||||
private String toCanonicalSearchParameterUri(RuntimeResourceDefinition theResourceDef, String theName) {
|
||||
return "http://hl7.org/fhir/SearchParameter/" + theResourceDef.getName() + "-" + theName;
|
||||
}
|
||||
|
||||
private Set<String> toTargetList(Class<? extends IBaseResource>[] theTarget) {
|
||||
HashSet<String> retVal = new HashSet<>();
|
||||
|
||||
|
|
|
@ -233,18 +233,7 @@ public class RuntimeSearchParam {
|
|||
}
|
||||
|
||||
public List<String> getPathsSplit() {
|
||||
String path = getPath();
|
||||
if (path.indexOf('|') == -1) {
|
||||
return Collections.singletonList(path);
|
||||
}
|
||||
|
||||
List<String> retVal = new ArrayList<>();
|
||||
StringTokenizer tok = new StringTokenizer(path, "|");
|
||||
while (tok.hasMoreElements()) {
|
||||
String nextPath = tok.nextToken().trim();
|
||||
retVal.add(nextPath.trim());
|
||||
}
|
||||
return retVal;
|
||||
return getPathsSplitForResourceType(null);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -266,6 +255,41 @@ public class RuntimeSearchParam {
|
|||
return myPhoneticEncoder.encode(theString);
|
||||
}
|
||||
|
||||
public List<String> getPathsSplitForResourceType(@Nullable String theResourceName) {
|
||||
String path = getPath();
|
||||
if (path.indexOf('|') == -1) {
|
||||
if (theResourceName != null && !pathMatchesResourceType(theResourceName, path)) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
return Collections.singletonList(path);
|
||||
}
|
||||
|
||||
List<String> retVal = new ArrayList<>();
|
||||
StringTokenizer tok = new StringTokenizer(path, "|");
|
||||
while (tok.hasMoreElements()) {
|
||||
String nextPath = tok.nextToken().trim();
|
||||
if (theResourceName != null && !pathMatchesResourceType(theResourceName, nextPath)) {
|
||||
continue;
|
||||
}
|
||||
retVal.add(nextPath.trim());
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private boolean pathMatchesResourceType(String theResourceName, String thePath) {
|
||||
if (thePath.startsWith(theResourceName + ".")) {
|
||||
return true;
|
||||
}
|
||||
if (thePath.startsWith("Resouce.") || thePath.startsWith("DomainResource.")) {
|
||||
return true;
|
||||
}
|
||||
if (Character.isLowerCase(thePath.charAt(0))) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
public enum RuntimeSearchParamStatusEnum {
|
||||
ACTIVE,
|
||||
DRAFT,
|
||||
|
|
|
@ -1986,13 +1986,17 @@ public enum Pointcut implements IPointcut {
|
|||
* <ul>
|
||||
* <li>ca.uhn.fhir.rest.server.messaging.ResourceOperationMessage - This parameter should not be modified as processing is complete when this hook is invoked.</li>
|
||||
* <li>ca.uhn.fhir.rest.server.TransactionLogMessages - This parameter is for informational messages provided by the MDM module during MDM processing.</li>
|
||||
* <li>ca.uhn.fhir.mdm.api.MdmLinkChangeEvent - Contains information about the change event, including target and golden resource IDs and the operation type.</li>
|
||||
* </ul>
|
||||
* </p>
|
||||
* <p>
|
||||
* Hooks should return <code>void</code>.
|
||||
* </p>
|
||||
*/
|
||||
MDM_AFTER_PERSISTED_RESOURCE_CHECKED(void.class, "ca.uhn.fhir.rest.server.messaging.ResourceOperationMessage", "ca.uhn.fhir.rest.server.TransactionLogMessages"),
|
||||
MDM_AFTER_PERSISTED_RESOURCE_CHECKED(void.class,
|
||||
"ca.uhn.fhir.rest.server.messaging.ResourceOperationMessage",
|
||||
"ca.uhn.fhir.rest.server.TransactionLogMessages",
|
||||
"ca.uhn.fhir.mdm.api.MdmLinkEvent"),
|
||||
|
||||
/**
|
||||
* <b>Performance Tracing Hook:</b>
|
||||
|
|
|
@ -961,6 +961,7 @@ public class FhirTerser {
|
|||
for (BaseRuntimeChildDefinition nextChild : childDef.getChildrenAndExtension()) {
|
||||
|
||||
List<?> values = nextChild.getAccessor().getValues(theElement);
|
||||
|
||||
if (values != null) {
|
||||
for (Object nextValueObject : values) {
|
||||
IBase nextValue;
|
||||
|
|
|
@ -75,6 +75,7 @@ public enum VersionEnum {
|
|||
V5_4_1,
|
||||
V5_4_2,
|
||||
V5_5_0,
|
||||
V5_5_1,
|
||||
V5_6_0
|
||||
|
||||
;
|
||||
|
|
|
@ -28,13 +28,6 @@ import ca.uhn.fhir.rest.gclient.TokenClientParam;
|
|||
*/
|
||||
public interface IAnyResource extends IBaseResource {
|
||||
|
||||
/**
|
||||
* Search parameter constant for <b>_language</b>
|
||||
*/
|
||||
@SearchParamDefinition(name="_language", path="", description="The language of the resource", type="string" )
|
||||
String SP_RES_LANGUAGE = "_language";
|
||||
|
||||
|
||||
/**
|
||||
* Search parameter constant for <b>_id</b>
|
||||
*/
|
||||
|
|
|
@ -3,14 +3,14 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -282,7 +282,7 @@ public abstract class BaseApp {
|
|||
}
|
||||
|
||||
private Optional<BaseCommand> parseCommand(String[] theArgs) {
|
||||
Optional<BaseCommand> commandOpt = getNextCommand(theArgs);
|
||||
Optional<BaseCommand> commandOpt = getNextCommand(theArgs, 0);
|
||||
|
||||
if (! commandOpt.isPresent()) {
|
||||
String message = "Unrecognized command: " + ansi().bold().fg(Ansi.Color.RED) + theArgs[0] + ansi().boldOff().fg(Ansi.Color.WHITE);
|
||||
|
@ -294,8 +294,8 @@ public abstract class BaseApp {
|
|||
return commandOpt;
|
||||
}
|
||||
|
||||
private Optional<BaseCommand> getNextCommand(String[] theArgs) {
|
||||
return ourCommands.stream().filter(cmd -> cmd.getCommandName().equals(theArgs[0])).findFirst();
|
||||
private Optional<BaseCommand> getNextCommand(String[] theArgs, int thePosition) {
|
||||
return ourCommands.stream().filter(cmd -> cmd.getCommandName().equals(theArgs[thePosition])).findFirst();
|
||||
}
|
||||
|
||||
private void processHelp(String[] theArgs) {
|
||||
|
@ -303,7 +303,7 @@ public abstract class BaseApp {
|
|||
logUsage();
|
||||
return;
|
||||
}
|
||||
Optional<BaseCommand> commandOpt = getNextCommand(theArgs);
|
||||
Optional<BaseCommand> commandOpt = getNextCommand(theArgs, 1);
|
||||
if (! commandOpt.isPresent()) {
|
||||
String message = "Unknown command: " + theArgs[1];
|
||||
System.err.println(message);
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
package ca.uhn.fhir.cli;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
|
||||
public class BaseAppTest {
|
||||
|
||||
private final PrintStream standardOut = System.out;
|
||||
private final ByteArrayOutputStream outputStreamCaptor = new ByteArrayOutputStream();
|
||||
|
||||
@BeforeEach
|
||||
public void setUp() {
|
||||
System.setOut(new PrintStream(outputStreamCaptor));
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void tearDown() {
|
||||
System.setOut(standardOut);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHelpOption() {
|
||||
App.main(new String[]{"help", "create-package"});
|
||||
assertThat(outputStreamCaptor.toString().trim(), outputStreamCaptor.toString().trim(), containsString("Usage"));
|
||||
}
|
||||
}
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.apache.commons.lang3.time.DateUtils;
|
|||
import org.springframework.beans.factory.annotation.Autowire;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
|
@ -65,8 +66,8 @@ public class FhirServerConfig extends BaseJavaConfigDstu2 {
|
|||
|
||||
@Override
|
||||
@Bean
|
||||
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
|
||||
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
|
||||
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
|
||||
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
|
||||
retVal.setPersistenceUnitName("HAPI_PU");
|
||||
retVal.setDataSource(myDataSource);
|
||||
retVal.setJpaProperties(myJpaProperties);
|
||||
|
|
|
@ -30,6 +30,7 @@ import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
|
|||
import org.springframework.beans.factory.annotation.Autowire;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
|
@ -62,8 +63,8 @@ public class FhirServerConfigDstu3 extends BaseJavaConfigDstu3 {
|
|||
|
||||
@Override
|
||||
@Bean
|
||||
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
|
||||
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
|
||||
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
|
||||
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
|
||||
retVal.setPersistenceUnitName("HAPI_PU");
|
||||
retVal.setDataSource(myDataSource);
|
||||
retVal.setJpaProperties(myJpaProperties);
|
||||
|
|
|
@ -28,6 +28,7 @@ import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
|
|||
import org.springframework.beans.factory.annotation.Autowire;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
|
@ -60,8 +61,8 @@ public class FhirServerConfigR4 extends BaseJavaConfigR4 {
|
|||
|
||||
@Override
|
||||
@Bean
|
||||
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
|
||||
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
|
||||
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
|
||||
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
|
||||
retVal.setPersistenceUnitName("HAPI_PU");
|
||||
retVal.setDataSource(myDataSource);
|
||||
retVal.setJpaProperties(myJpaProperties);
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: perf
|
||||
issue: 2457
|
||||
title: "A regression in HAPI FHIR 5.3.0 resulted in concurrent searches being executed in a sequential
|
||||
(and not parallel) fashion in some circumstances."
|
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
release-date: "2021-08-30"
|
||||
codename: "Quasar"
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2790
|
||||
title: "The SearchParameter canonical URLs exported by the JPA server have been adjusted to match the URLs
|
||||
specified in the FHIR specification."
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: change
|
||||
issue: 2790
|
||||
title: "Support for the `_language` search parameter has been dropped from the JPA server. This search parameter
|
||||
was specified in FHIR DSTU1 but was dropped in later versions. It is rarely used in practice and imposes
|
||||
an indexing cost, so it has now been removed. A custom search parameter may be used in order to achieve
|
||||
the same functionality if needed."
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2793
|
||||
title: "Previously, when using the Expunge Everything operation, caches could retain old invalid values. This has been corrected. Thanks to [Ben Li-Sauerwine](https://github.com/theGOTOguy) for the fix!"
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2837
|
||||
title: "The :not modifier does not currently work for observations with multiple codes for the search. This is fixed."
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2850
|
||||
title: "Updated handling of MDM_AFTER_PERSISTED_RESOURCE_CHECKED pointcut to include additional MDM related info."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2901
|
||||
jira: SMILE-3004
|
||||
title: "Processing transactions with AutoversionAtPaths set should create those resources (if AutoCreatePlaceholders is set) and use latest version as expected"
|
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
type: change
|
||||
title: "The $mdm-clear operation has been changed to use Spring Batch."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2923
|
||||
title: "$lookup operation cache was based on system and code, it becomes a defect
|
||||
after adding displayLanguage support. Problem is now fixed."
|
|
@ -1,6 +1,7 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2933
|
||||
backport: 5.5.1
|
||||
jira: SMILE-3056
|
||||
title: "Fixed a regression which causes transactions with multiple identical ifNoneExist clauses to create duplicate data."
|
||||
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2935
|
||||
jira: SMILE-3022
|
||||
title: "No resource returned when search with percent sign. Problem is now fixed"
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2958
|
||||
jira: SMILE-643
|
||||
title: "Fixed issue where the processing of queries like Procedure?patient= before a cache search would cause the parameter key to be removed.
|
||||
Additionally, ensured that requests like Procedure?patient= cause HTTP 400 Bad Request instead of HTTP 500 Internal Error."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2962
|
||||
jira: SMILE-720
|
||||
title: "Added a new DaoConfig setting called `setElasticSearchIndexPrefix(String prefix)` which will cause Hibernate search to prefix all of its tables with the provided value."
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2967
|
||||
jira: SMILE-2899
|
||||
title: "Previously, the system would only traverse references to discrete resources while performing a chained search.
|
||||
This fix adds support for traversing references to contained resources as well, with the limitation that the reference
|
||||
to the contained resource must be the last reference in the chain."
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2973
|
||||
title: "CLI `smileutil help {command}` returns `Unknown command` which should return the usage of `command`. This has been corrected."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2975
|
||||
title: "Two improvements have been made to the connection to Elasticsearch. First, null username and password values are now permitted. Second, multiple hosts are now permitted via the `setHosts()` method on the ElasticHibernatePropertiesBuilder, allowing you to
|
||||
connect to multiple elasticsearch clusters at once. Thanks to Dušan Marković for the contribution!"
|
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
type: fix
|
||||
title: "Fixed a bug where two identical tags in parallel entries being created in a batch would fail."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: change
|
||||
jira: SMILE-2927
|
||||
title: "During transactions, any resources that were PUT or POSTed with a conditional URL now receive extra validation. There is now a final
|
||||
storage step which ensures that the stored resource actually matches the conditional URL."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: change
|
||||
issue: 2991
|
||||
title: "This PR eliminates the search coordinator threadpool, and executes searches synchronously on the HTTP client
|
||||
thread. The idea of using a separate pool was supposed to help improve server scalability, but ultimately created
|
||||
false bottlenecks and reduced the utility of monitoring infrastructure so it has been eliminated."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2995
|
||||
title: "CodeSystem version is copied to ValueSet.compose.include.version on loinc terminology upload
|
||||
to support versioned ValueSet expansion."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: add
|
||||
issue: 3005
|
||||
jira: SMILE-723
|
||||
title: "Open up the visibility of some methods in the generation of the Open API definition files to allow extenders to add support for OIDC authorization."
|
|
@ -38,7 +38,7 @@ section.server_plain.title=Plain Server
|
|||
page.server_plain.server_types=REST Server Types
|
||||
page.server_plain.introduction=Plain Server Introduction
|
||||
page.server_plain.get_started=Get Started ⚡
|
||||
page.server_plain.resource_providers=Resource Providers and Plan Providers
|
||||
page.server_plain.resource_providers=Resource Providers and Plain Providers
|
||||
page.server_plain.rest_operations=REST Operations: Overview
|
||||
page.server_plain.rest_operations_search=REST Operations: Search
|
||||
page.server_plain.rest_operations_operations=REST Operations: Extended Operations
|
||||
|
|
|
@ -31,12 +31,11 @@ In addition, the Elasticsearch client service, `ElasticsearchSvcImpl` will need
|
|||
```java
|
||||
@Bean()
|
||||
public ElasticsearchSvcImpl elasticsearchSvc() {
|
||||
String elasticsearchHost = "localhost";
|
||||
String elasticsearchUserId = "elastic";
|
||||
String elasticsearchHost = "localhost:9200";
|
||||
String elasticsearchUsername = "elastic";
|
||||
String elasticsearchPassword = "changeme";
|
||||
int elasticsearchPort = 9301;
|
||||
|
||||
return new ElasticsearchSvcImpl(elasticsearchHost, elasticsearchPort, elasticsearchUserId, elasticsearchPassword);
|
||||
return new ElasticsearchSvcImpl(elasticsearchHost, elasticsearchUsername, elasticsearchPassword);
|
||||
}
|
||||
```
|
||||
|
||||
|
|
|
@ -302,6 +302,14 @@ If the server has been configured with a [Resource Server ID Strategy](/apidocs/
|
|||
Contains the specific version (starting with 1) of the resource that this row corresponds to.
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>RESOURCE_TYPE</td>
|
||||
<td></td>
|
||||
<td>String</td>
|
||||
<td>
|
||||
Contains the string specifying the type of the resource (Patient, Observation, etc).
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
|
@ -476,7 +484,7 @@ The following columns are common to **all HFJ_SPIDX_xxx tables**.
|
|||
<tr>
|
||||
<td>RES_ID</td>
|
||||
<td>FK to <a href="#HFJ_RESOURCE">HFJ_RESOURCE</a></td>
|
||||
<td>String</td>
|
||||
<td>Long</td>
|
||||
<td></td>
|
||||
<td>
|
||||
Contains the PID of the resource being indexed.
|
||||
|
|
|
@ -567,11 +567,15 @@ Note that the request goes to the root of the FHIR server, and not the `Organiza
|
|||
|
||||
## Clearing MDM Links
|
||||
|
||||
The `$mdm-clear` operation is used to batch-delete MDM links and related Golden Resources from the database. This operation is meant to be used during the rules-tuning phase of the MDM implementation so that you can quickly test your ruleset. It permits the user to reset the state of their MDM system without manual deletion of all related links and Golden Resources.
|
||||
The `$mdm-clear` operation is used to batch-delete MDM links and related Golden Resources from the database. This
|
||||
operation is intended to be used during the rules-tuning phase of the MDM implementation so that you can quickly test
|
||||
your ruleset. It permits the user to reset the state of their MDM system without manual deletion of all related links
|
||||
and Golden Resources.
|
||||
|
||||
After the operation is complete, all targeted MDM links are removed from the system, and their related Golden Resources are deleted and expunged from the server.
|
||||
After the operation is complete, all targeted MDM links are removed from the system, and their related Golden Resources
|
||||
are deleted and expunged from the server.
|
||||
|
||||
This operation takes a single optional Parameter.
|
||||
This operation takes two optional Parameters.
|
||||
|
||||
<table class="table table-striped table-condensed">
|
||||
<thead>
|
||||
|
@ -584,11 +588,21 @@ This operation takes a single optional Parameter.
|
|||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>sourceType</td>
|
||||
<td>resourceType</td>
|
||||
<td>String</td>
|
||||
<td>0..*</td>
|
||||
<td>
|
||||
The Source resource types you would like to clear. If omitted, all resource types will be cleared.
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>batchSize</td>
|
||||
<td>Integer</td>
|
||||
<td>0..1</td>
|
||||
<td>
|
||||
The Source Resource type you would like to clear. If omitted, will operate over all links.
|
||||
The number of links that should be deleted at a time. If ommitted, then the batch size will be determined by the value
|
||||
of [Expunge Batch Size](/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getExpungeBatchSize())
|
||||
property.
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
|
@ -598,33 +612,27 @@ This operation takes a single optional Parameter.
|
|||
|
||||
Use an HTTP POST to the following URL to invoke this operation:
|
||||
|
||||
```url
|
||||
http://example.com/$mdm-clear
|
||||
```
|
||||
```http
|
||||
POST /$mdm-clear
|
||||
Content-Type: application/fhir+json
|
||||
|
||||
The following request body could be used:
|
||||
|
||||
```json
|
||||
{
|
||||
"resourceType": "Parameters",
|
||||
"parameter": [ {
|
||||
"name": "sourceType",
|
||||
"name": "resourceType",
|
||||
"valueString": "Patient"
|
||||
}, {
|
||||
"name": "resourceType",
|
||||
"valueString": "Practitioner"
|
||||
}, {
|
||||
"name": "batchSize",
|
||||
"valueDecimal": 1000
|
||||
} ]
|
||||
}
|
||||
```
|
||||
|
||||
This operation returns the number of MDM links that were cleared. The following is a sample response:
|
||||
|
||||
```json
|
||||
{
|
||||
"resourceType": "Parameters",
|
||||
"parameter": [ {
|
||||
"name": "reset",
|
||||
"valueDecimal": 5
|
||||
} ]
|
||||
}
|
||||
```
|
||||
This operation returns the job execution id of the Spring Batch job that will be run to remove all the links and their
|
||||
golden resources.
|
||||
|
||||
## Batch-creating MDM Links
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -258,6 +258,11 @@ public class DaoConfig {
|
|||
private boolean myAccountForDateIndexNulls;
|
||||
private boolean myTriggerSubscriptionsForNonVersioningChanges;
|
||||
|
||||
/**
|
||||
* @since 5.6.0
|
||||
*/
|
||||
private String myElasicSearchIndexPrefix;
|
||||
|
||||
/**
|
||||
* @since 5.6.0
|
||||
*/
|
||||
|
@ -269,6 +274,7 @@ public class DaoConfig {
|
|||
private Integer myBundleBatchPoolSize = DEFAULT_BUNDLE_BATCH_POOL_SIZE;
|
||||
private Integer myBundleBatchMaxPoolSize = DEFAULT_BUNDLE_BATCH_MAX_POOL_SIZE;
|
||||
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
|
@ -2643,7 +2649,29 @@ public class DaoConfig {
|
|||
return retval;
|
||||
}
|
||||
|
||||
public enum StoreMetaSourceInformationEnum {
|
||||
/**
|
||||
*
|
||||
* Sets a prefix for any indexes created when interacting with elasticsearch. This will apply to fulltext search indexes
|
||||
* and terminology expansion indexes.
|
||||
*
|
||||
* @since 5.6.0
|
||||
*/
|
||||
public String getElasticSearchIndexPrefix() {
|
||||
return myElasicSearchIndexPrefix;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* Sets a prefix for any indexes created when interacting with elasticsearch. This will apply to fulltext search indexes
|
||||
* and terminology expansion indexes.
|
||||
*
|
||||
* @since 5.6.0
|
||||
*/
|
||||
public void setElasticSearchIndexPrefix(String thePrefix) {
|
||||
myElasicSearchIndexPrefix = thePrefix;
|
||||
}
|
||||
|
||||
public enum StoreMetaSourceInformationEnum {
|
||||
NONE(false, false),
|
||||
SOURCE_URI(true, false),
|
||||
REQUEST_ID(false, true),
|
||||
|
|
|
@ -22,7 +22,6 @@ package ca.uhn.fhir.jpa.api.dao;
|
|||
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
|
||||
import ca.uhn.fhir.rest.annotation.Offset;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
|
|
|
@ -52,13 +52,11 @@ public class LazyDaoMethodOutcome extends DaoMethodOutcome {
|
|||
|
||||
private void tryToRunSupplier() {
|
||||
if (myEntitySupplier != null) {
|
||||
|
||||
EntityAndResource entityAndResource = myEntitySupplier.get();
|
||||
setEntity(entityAndResource.getEntity());
|
||||
setResource(entityAndResource.getResource());
|
||||
setId(entityAndResource.getResource().getIdElement());
|
||||
myEntitySupplierUseCallback.run();
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.6.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.6.0-PRE5-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -815,7 +815,7 @@
|
|||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</plugins>
|
||||
<resources>
|
||||
<resource>
|
||||
<directory>${project.basedir}/src/main/resources</directory>
|
||||
|
|
|
@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.batch;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.batch.mdm.job.MdmClearJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig;
|
||||
import ca.uhn.fhir.jpa.delete.job.DeleteExpungeJobConfig;
|
||||
|
@ -40,7 +41,8 @@ import java.util.Set;
|
|||
BulkImportJobConfig.class,
|
||||
DeleteExpungeJobConfig.class,
|
||||
ReindexJobConfig.class,
|
||||
ReindexEverythingJobConfig.class
|
||||
ReindexEverythingJobConfig.class,
|
||||
MdmClearJobConfig.class
|
||||
})
|
||||
public class BatchJobsConfig {
|
||||
|
||||
|
@ -94,4 +96,9 @@ public class BatchJobsConfig {
|
|||
*/
|
||||
public static final String REINDEX_EVERYTHING_JOB_NAME = "reindexEverythingJob";
|
||||
|
||||
/**
|
||||
* MDM Clear
|
||||
*/
|
||||
public static final String MDM_CLEAR_JOB_NAME = "mdmClearJob";
|
||||
|
||||
}
|
||||
|
|
|
@ -20,15 +20,46 @@ package ca.uhn.fhir.jpa.batch;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
|
||||
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
|
||||
import ca.uhn.fhir.jpa.batch.processor.GoldenResourceAnnotatingProcessor;
|
||||
import ca.uhn.fhir.jpa.batch.processor.PidToIBaseResourceProcessor;
|
||||
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
|
||||
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
|
||||
import ca.uhn.fhir.jpa.reindex.job.ReindexWriter;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import org.springframework.batch.core.JobParametersValidator;
|
||||
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
@Configuration
|
||||
public class CommonBatchJobConfig {
|
||||
public static final int MINUTES_IN_FUTURE_TO_PROCESS_FROM = 1;
|
||||
|
||||
@Bean
|
||||
public MultiUrlJobParameterValidator multiUrlProcessorParameterValidator(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) {
|
||||
return new MultiUrlJobParameterValidator(theMatchUrlService, theDaoRegistry);
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public SqlExecutorWriter sqlExecutorWriter() {
|
||||
return new SqlExecutorWriter();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public PidReaderCounterListener pidCountRecorderListener() {
|
||||
return new PidReaderCounterListener();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public ReverseCronologicalBatchResourcePidReader reverseCronologicalBatchResourcePidReader() {
|
||||
return new ReverseCronologicalBatchResourcePidReader();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
|
|
|
@ -1,57 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.batch.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
|
||||
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
|
||||
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import org.springframework.batch.core.JobParametersValidator;
|
||||
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
|
||||
public class MultiUrlProcessorJobConfig {
|
||||
public static final int MINUTES_IN_FUTURE_TO_PROCESS_FROM = 1;
|
||||
|
||||
@Bean
|
||||
public JobParametersValidator multiUrlProcessorParameterValidator(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) {
|
||||
return new MultiUrlJobParameterValidator(theMatchUrlService, theDaoRegistry);
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public SqlExecutorWriter sqlExecutorWriter() {
|
||||
return new SqlExecutorWriter();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public PidReaderCounterListener pidCountRecorderListener() {
|
||||
return new PidReaderCounterListener();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public ReverseCronologicalBatchResourcePidReader reverseCronologicalBatchResourcePidReader() {
|
||||
return new ReverseCronologicalBatchResourcePidReader();
|
||||
}
|
||||
}
|
|
@ -43,6 +43,9 @@ public class PartitionedUrlValidator {
|
|||
@Autowired
|
||||
FhirContext myFhirContext;
|
||||
|
||||
public PartitionedUrlValidator() {
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will throw an exception if the user is not allowed to access the requested resource type on the partition determined by the request
|
||||
*/
|
||||
|
|
|
@ -54,4 +54,8 @@ public class PartitionedUrl implements IModelJson {
|
|||
public void setRequestPartitionId(RequestPartitionId theRequestPartitionId) {
|
||||
myRequestPartitionId = theRequestPartitionId;
|
||||
}
|
||||
|
||||
public boolean isPartitioned() {
|
||||
return myRequestPartitionId != null && !myRequestPartitionId.isDefaultPartition();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
package ca.uhn.fhir.jpa.batch.mdm;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.mdm.api.IMdmBatchJobSubmitterFactory;
|
||||
import ca.uhn.fhir.mdm.api.IMdmClearJobSubmitter;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
public class MdmBatchJobSubmitterFactoryImpl implements IMdmBatchJobSubmitterFactory {
|
||||
@Autowired
|
||||
IMdmClearJobSubmitter myMdmClearJobSubmitter;
|
||||
|
||||
@Override
|
||||
public IMdmClearJobSubmitter getClearJobSubmitter() {
|
||||
return myMdmClearJobSubmitter;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,84 @@
|
|||
package ca.uhn.fhir.jpa.batch.mdm;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
|
||||
import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator;
|
||||
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
|
||||
import ca.uhn.fhir.jpa.batch.mdm.job.ReverseCronologicalBatchMdmLinkPidReader;
|
||||
import ca.uhn.fhir.mdm.api.IMdmClearJobSubmitter;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import org.springframework.batch.core.Job;
|
||||
import org.springframework.batch.core.JobExecution;
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
import org.springframework.batch.core.JobParametersInvalidException;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
|
||||
import javax.transaction.Transactional;
|
||||
import java.util.List;
|
||||
|
||||
public class MdmClearJobSubmitterImpl implements IMdmClearJobSubmitter {
|
||||
@Autowired
|
||||
DaoConfig myDaoConfig;
|
||||
@Autowired
|
||||
PartitionedUrlValidator myPartitionedUrlValidator;
|
||||
@Autowired
|
||||
IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||
@Autowired
|
||||
private IBatchJobSubmitter myBatchJobSubmitter;
|
||||
@Autowired
|
||||
@Qualifier(BatchJobsConfig.MDM_CLEAR_JOB_NAME)
|
||||
private Job myMdmClearJob;
|
||||
|
||||
@Override
|
||||
@Transactional(Transactional.TxType.NEVER)
|
||||
public JobExecution submitJob(Integer theBatchSize, List<String> theUrls, RequestDetails theRequest) throws JobParametersInvalidException {
|
||||
if (theBatchSize == null) {
|
||||
theBatchSize = myDaoConfig.getExpungeBatchSize();
|
||||
}
|
||||
if (!myDaoConfig.canDeleteExpunge()) {
|
||||
throw new ForbiddenOperationException("Delete Expunge not allowed: " + myDaoConfig.cannotDeleteExpungeReason());
|
||||
}
|
||||
|
||||
RequestListJson requestListJson = myPartitionedUrlValidator.buildRequestListJson(theRequest, theUrls);
|
||||
|
||||
for (String url : theUrls) {
|
||||
HookParams params = new HookParams()
|
||||
.add(RequestDetails.class, theRequest)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequest)
|
||||
.add(String.class, url);
|
||||
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRE_DELETE_EXPUNGE, params);
|
||||
}
|
||||
|
||||
JobParameters jobParameters = ReverseCronologicalBatchMdmLinkPidReader.buildJobParameters(ProviderConstants.OPERATION_MDM_CLEAR, theBatchSize, requestListJson);
|
||||
return myBatchJobSubmitter.runJob(myMdmClearJob, jobParameters);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,125 @@
|
|||
package ca.uhn.fhir.jpa.batch.mdm.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
|
||||
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
|
||||
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
|
||||
import ca.uhn.fhir.jpa.delete.job.DeleteExpungeProcessor;
|
||||
import org.springframework.batch.core.Job;
|
||||
import org.springframework.batch.core.Step;
|
||||
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||
import org.springframework.batch.core.listener.ExecutionContextPromotionListener;
|
||||
import org.springframework.batch.item.ItemProcessor;
|
||||
import org.springframework.batch.item.support.CompositeItemProcessor;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Lazy;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.MDM_CLEAR_JOB_NAME;
|
||||
|
||||
/**
|
||||
* Spring batch Job configuration file. Contains all necessary plumbing to run a
|
||||
* $mdm-clear job.
|
||||
*/
|
||||
@Configuration
|
||||
public class MdmClearJobConfig {
|
||||
public static final String MDM_CLEAR_RESOURCE_LIST_STEP_NAME = "mdm-clear-resource-list-step";
|
||||
|
||||
@Autowired
|
||||
private StepBuilderFactory myStepBuilderFactory;
|
||||
@Autowired
|
||||
private JobBuilderFactory myJobBuilderFactory;
|
||||
@Autowired
|
||||
private DeleteExpungeProcessor myDeleteExpungeProcessor;
|
||||
|
||||
@Autowired
|
||||
@Qualifier("deleteExpungePromotionListener")
|
||||
private ExecutionContextPromotionListener myDeleteExpungePromotionListener;
|
||||
|
||||
@Autowired
|
||||
private MultiUrlJobParameterValidator myMultiUrlProcessorParameterValidator;
|
||||
|
||||
@Autowired
|
||||
private PidReaderCounterListener myPidCountRecorderListener;
|
||||
|
||||
@Autowired
|
||||
private SqlExecutorWriter mySqlExecutorWriter;
|
||||
|
||||
@Bean(name = MDM_CLEAR_JOB_NAME)
|
||||
@Lazy
|
||||
public Job mdmClearJob() {
|
||||
return myJobBuilderFactory.get(MDM_CLEAR_JOB_NAME)
|
||||
.validator(myMultiUrlProcessorParameterValidator)
|
||||
.start(mdmClearUrlListStep())
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public Step mdmClearUrlListStep() {
|
||||
return myStepBuilderFactory.get(MDM_CLEAR_RESOURCE_LIST_STEP_NAME)
|
||||
.<List<Long>, List<String>>chunk(1)
|
||||
.reader(reverseCronologicalBatchMdmLinkPidReader())
|
||||
.processor(deleteThenExpungeCompositeProcessor())
|
||||
.writer(mySqlExecutorWriter)
|
||||
.listener(myPidCountRecorderListener)
|
||||
.listener(myDeleteExpungePromotionListener)
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public ItemProcessor<List<Long>, List<String>> deleteThenExpungeCompositeProcessor() {
|
||||
CompositeItemProcessor<List<Long>, List<String>> compositeProcessor = new CompositeItemProcessor<>();
|
||||
List itemProcessors = new ArrayList<>();
|
||||
itemProcessors.add(mdmLinkDeleter());
|
||||
itemProcessors.add(myDeleteExpungeProcessor);
|
||||
compositeProcessor.setDelegates(itemProcessors);
|
||||
return compositeProcessor;
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public ReverseCronologicalBatchMdmLinkPidReader reverseCronologicalBatchMdmLinkPidReader() {
|
||||
return new ReverseCronologicalBatchMdmLinkPidReader();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public MdmLinkDeleter mdmLinkDeleter() {
|
||||
return new MdmLinkDeleter();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ExecutionContextPromotionListener mdmClearPromotionListener() {
|
||||
ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener();
|
||||
|
||||
listener.setKeys(new String[]{PidReaderCounterListener.RESOURCE_TOTAL_PROCESSED});
|
||||
|
||||
return listener;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,85 @@
|
|||
package ca.uhn.fhir.jpa.batch.mdm.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.PartitionRunner;
|
||||
import ca.uhn.fhir.jpa.entity.MdmLink;
|
||||
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.item.ItemProcessor;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.SliceImpl;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentLinkedQueue;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Take MdmLink pids in and output golden resource pids out
|
||||
*/
|
||||
|
||||
public class MdmLinkDeleter implements ItemProcessor<List<Long>, List<Long>> {
|
||||
public static final String PROCESS_NAME = "MdmClear";
|
||||
public static final String THREAD_PREFIX = "mdmClear";
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(MdmLinkDeleter.class);
|
||||
@Autowired
|
||||
protected PlatformTransactionManager myTxManager;
|
||||
@Autowired
|
||||
IMdmLinkDao myMdmLinkDao;
|
||||
@Autowired
|
||||
DaoConfig myDaoConfig;
|
||||
|
||||
@Override
|
||||
public List<Long> process(List<Long> thePidList) throws Exception {
|
||||
ConcurrentLinkedQueue<Long> goldenPidAggregator = new ConcurrentLinkedQueue<>();
|
||||
PartitionRunner partitionRunner = new PartitionRunner(PROCESS_NAME, THREAD_PREFIX, myDaoConfig.getReindexBatchSize(), myDaoConfig.getReindexThreadCount());
|
||||
partitionRunner.runInPartitionedThreads(new SliceImpl<>(thePidList), pids -> removeLinks(thePidList, goldenPidAggregator));
|
||||
return new ArrayList<>(goldenPidAggregator);
|
||||
}
|
||||
|
||||
private void removeLinks(List<Long> pidList, ConcurrentLinkedQueue<Long> theGoldenPidAggregator) {
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myTxManager);
|
||||
|
||||
txTemplate.executeWithoutResult(t -> theGoldenPidAggregator.addAll(deleteMdmLinksAndReturnGoldenResourcePids(pidList)));
|
||||
}
|
||||
|
||||
public List<Long> deleteMdmLinksAndReturnGoldenResourcePids(List<Long> thePids) {
|
||||
List<MdmLink> links = myMdmLinkDao.findAllById(thePids);
|
||||
Set<Long> goldenResources = links.stream().map(MdmLink::getGoldenResourcePid).collect(Collectors.toSet());
|
||||
//TODO GGG this is probably invalid... we are essentially looking for GOLDEN -> GOLDEN links, which are either POSSIBLE_DUPLICATE
|
||||
//and REDIRECT
|
||||
goldenResources.addAll(links.stream()
|
||||
.filter(link -> link.getMatchResult().equals(MdmMatchResultEnum.REDIRECT)
|
||||
|| link.getMatchResult().equals(MdmMatchResultEnum.POSSIBLE_DUPLICATE))
|
||||
.map(MdmLink::getSourcePid).collect(Collectors.toSet()));
|
||||
ourLog.info("Deleting {} MDM link records...", links.size());
|
||||
myMdmLinkDao.deleteAll(links);
|
||||
ourLog.info("{} MDM link records deleted", links.size());
|
||||
return new ArrayList<>(goldenResources);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,52 @@
|
|||
package ca.uhn.fhir.jpa.batch.mdm.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.batch.reader.BaseReverseCronologicalBatchPidReader;
|
||||
import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* This is the same as the parent class, except it operates on MdmLink entities instead of resource entities
|
||||
*/
|
||||
public class ReverseCronologicalBatchMdmLinkPidReader extends BaseReverseCronologicalBatchPidReader {
|
||||
@Autowired
|
||||
IMdmLinkDao myMdmLinkDao;
|
||||
|
||||
@Override
|
||||
protected Set<Long> getNextPidBatch(ResourceSearch resourceSearch) {
|
||||
String resourceName = resourceSearch.getResourceName();
|
||||
Pageable pageable = PageRequest.of(0, getBatchSize());
|
||||
//Expand out the list to handle the REDIRECT/POSSIBLE DUPLICATE ones.
|
||||
return new HashSet<>(myMdmLinkDao.findPidByResourceNameAndThreshold(resourceName, getCurrentHighThreshold(), pageable));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setDateFromPidFunction(ResourceSearch resourceSearch) {
|
||||
setDateExtractorFunction(pid -> myMdmLinkDao.findById(pid).get().getCreated());
|
||||
}
|
||||
}
|
|
@ -69,7 +69,7 @@ public class PidToIBaseResourceProcessor implements ItemProcessor<List<ResourceP
|
|||
List<IBaseResource> outgoing = new ArrayList<>();
|
||||
sb.loadResourcesByPid(theResourcePersistentId, Collections.emptyList(), outgoing, false, null);
|
||||
|
||||
ourLog.trace("Loaded resources: {}", outgoing.stream().map(t->t.getIdElement().getValue()).collect(Collectors.joining(", ")));
|
||||
ourLog.trace("Loaded resources: {}", outgoing.stream().filter(t -> t != null).map(t -> t.getIdElement().getValue()).collect(Collectors.joining(", ")));
|
||||
|
||||
return outgoing;
|
||||
|
||||
|
|
|
@ -0,0 +1,213 @@
|
|||
package ca.uhn.fhir.jpa.batch.reader;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.batch.CommonBatchJobConfig;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
|
||||
import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl;
|
||||
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.SortOrderEnum;
|
||||
import ca.uhn.fhir.rest.api.SortSpec;
|
||||
import ca.uhn.fhir.rest.param.DateRangeParam;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.core.JobParameter;
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
import org.springframework.batch.item.ExecutionContext;
|
||||
import org.springframework.batch.item.ItemReader;
|
||||
import org.springframework.batch.item.ItemStream;
|
||||
import org.springframework.batch.item.ItemStreamException;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
|
||||
/**
|
||||
* This Spring Batch reader takes 4 parameters:
|
||||
* {@link #JOB_PARAM_REQUEST_LIST}: A list of URLs to search for along with the partitions those searches should be performed on
|
||||
* {@link #JOB_PARAM_BATCH_SIZE}: The number of resources to return with each search. If ommitted, {@link DaoConfig#getExpungeBatchSize} will be used.
|
||||
* {@link #JOB_PARAM_START_TIME}: The latest timestamp of entities to search for
|
||||
* <p>
|
||||
* The reader will return at most {@link #JOB_PARAM_BATCH_SIZE} pids every time it is called, or null
|
||||
* once no more matching entities are available. It returns the resources in reverse chronological order
|
||||
* and stores where it's at in the Spring Batch execution context with the key {@link #CURRENT_THRESHOLD_HIGH}
|
||||
* appended with "." and the index number of the url list item it has gotten up to. This is to permit
|
||||
* restarting jobs that use this reader so it can pick up where it left off.
|
||||
*/
|
||||
public abstract class BaseReverseCronologicalBatchPidReader implements ItemReader<List<Long>>, ItemStream {
|
||||
public static final String JOB_PARAM_REQUEST_LIST = "url-list";
|
||||
public static final String JOB_PARAM_BATCH_SIZE = "batch-size";
|
||||
public static final String JOB_PARAM_START_TIME = "start-time";
|
||||
public static final String CURRENT_URL_INDEX = "current.url-index";
|
||||
public static final String CURRENT_THRESHOLD_HIGH = "current.threshold-high";
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ReverseCronologicalBatchResourcePidReader.class);
|
||||
private final BatchDateThresholdUpdater myBatchDateThresholdUpdater = new BatchDateThresholdUpdater();
|
||||
private final Map<Integer, Date> myThresholdHighByUrlIndex = new HashMap<>();
|
||||
private final Map<Integer, Set<Long>> myAlreadyProcessedPidsWithHighDate = new HashMap<>();
|
||||
@Autowired
|
||||
private FhirContext myFhirContext;
|
||||
@Autowired
|
||||
private MatchUrlService myMatchUrlService;
|
||||
private List<PartitionedUrl> myPartitionedUrls;
|
||||
private Integer myBatchSize;
|
||||
private int myUrlIndex = 0;
|
||||
private Date myStartTime;
|
||||
|
||||
private static String highKey(int theIndex) {
|
||||
return CURRENT_THRESHOLD_HIGH + "." + theIndex;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static JobParameters buildJobParameters(String theOperationName, Integer theBatchSize, RequestListJson theRequestListJson) {
|
||||
Map<String, JobParameter> map = new HashMap<>();
|
||||
map.put(MultiUrlJobParameterValidator.JOB_PARAM_OPERATION_NAME, new JobParameter(theOperationName));
|
||||
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_REQUEST_LIST, new JobParameter(theRequestListJson.toJson()));
|
||||
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), CommonBatchJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
|
||||
if (theBatchSize != null) {
|
||||
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue()));
|
||||
}
|
||||
JobParameters parameters = new JobParameters(map);
|
||||
return parameters;
|
||||
}
|
||||
|
||||
@Autowired
|
||||
public void setRequestListJson(@Value("#{jobParameters['" + JOB_PARAM_REQUEST_LIST + "']}") String theRequestListJson) {
|
||||
RequestListJson requestListJson = RequestListJson.fromJson(theRequestListJson);
|
||||
myPartitionedUrls = requestListJson.getPartitionedUrls();
|
||||
}
|
||||
|
||||
@Autowired
|
||||
public void setStartTime(@Value("#{jobParameters['" + JOB_PARAM_START_TIME + "']}") Date theStartTime) {
|
||||
myStartTime = theStartTime;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Long> read() throws Exception {
|
||||
while (myUrlIndex < myPartitionedUrls.size()) {
|
||||
List<Long> nextBatch = getNextBatch();
|
||||
if (nextBatch.isEmpty()) {
|
||||
++myUrlIndex;
|
||||
continue;
|
||||
}
|
||||
|
||||
return nextBatch;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
protected List<Long> getNextBatch() {
|
||||
RequestPartitionId requestPartitionId = myPartitionedUrls.get(myUrlIndex).getRequestPartitionId();
|
||||
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(myPartitionedUrls.get(myUrlIndex).getUrl(), requestPartitionId);
|
||||
myAlreadyProcessedPidsWithHighDate.putIfAbsent(myUrlIndex, new HashSet<>());
|
||||
Set<Long> newPids = getNextPidBatch(resourceSearch);
|
||||
|
||||
if (ourLog.isDebugEnabled()) {
|
||||
ourLog.debug("Search for {}{} returned {} results", resourceSearch.getResourceName(), resourceSearch.getSearchParameterMap().toNormalizedQueryString(myFhirContext), newPids.size());
|
||||
ourLog.debug("Results: {}", newPids);
|
||||
}
|
||||
|
||||
setDateFromPidFunction(resourceSearch);
|
||||
|
||||
List<Long> retval = new ArrayList<>(newPids);
|
||||
Date newThreshold = myBatchDateThresholdUpdater.updateThresholdAndCache(getCurrentHighThreshold(), myAlreadyProcessedPidsWithHighDate.get(myUrlIndex), retval);
|
||||
myThresholdHighByUrlIndex.put(myUrlIndex, newThreshold);
|
||||
|
||||
return retval;
|
||||
}
|
||||
|
||||
protected Date getCurrentHighThreshold() {
|
||||
return myThresholdHighByUrlIndex.get(myUrlIndex);
|
||||
}
|
||||
|
||||
protected void setDateExtractorFunction(Function<Long, Date> theDateExtractorFunction) {
|
||||
myBatchDateThresholdUpdater.setDateFromPid(theDateExtractorFunction);
|
||||
}
|
||||
|
||||
protected void addDateCountAndSortToSearch(ResourceSearch resourceSearch) {
|
||||
SearchParameterMap map = resourceSearch.getSearchParameterMap();
|
||||
map.setLastUpdated(new DateRangeParam().setUpperBoundInclusive(getCurrentHighThreshold()));
|
||||
map.setLoadSynchronousUpTo(myBatchSize);
|
||||
map.setSort(new SortSpec(Constants.PARAM_LASTUPDATED, SortOrderEnum.DESC));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void open(ExecutionContext executionContext) throws ItemStreamException {
|
||||
if (executionContext.containsKey(CURRENT_URL_INDEX)) {
|
||||
myUrlIndex = new Long(executionContext.getLong(CURRENT_URL_INDEX)).intValue();
|
||||
}
|
||||
for (int index = 0; index < myPartitionedUrls.size(); ++index) {
|
||||
String key = highKey(index);
|
||||
if (executionContext.containsKey(key)) {
|
||||
myThresholdHighByUrlIndex.put(index, new Date(executionContext.getLong(key)));
|
||||
} else {
|
||||
myThresholdHighByUrlIndex.put(index, myStartTime);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(ExecutionContext executionContext) throws ItemStreamException {
|
||||
executionContext.putLong(CURRENT_URL_INDEX, myUrlIndex);
|
||||
for (int index = 0; index < myPartitionedUrls.size(); ++index) {
|
||||
Date date = myThresholdHighByUrlIndex.get(index);
|
||||
if (date != null) {
|
||||
executionContext.putLong(highKey(index), date.getTime());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws ItemStreamException {
|
||||
}
|
||||
|
||||
protected Integer getBatchSize() {
|
||||
return myBatchSize;
|
||||
}
|
||||
|
||||
@Autowired
|
||||
public void setBatchSize(@Value("#{jobParameters['" + JOB_PARAM_BATCH_SIZE + "']}") Integer theBatchSize) {
|
||||
myBatchSize = theBatchSize;
|
||||
}
|
||||
|
||||
protected Set<Long> getAlreadySeenPids() {
|
||||
return myAlreadyProcessedPidsWithHighDate.get(myUrlIndex);
|
||||
}
|
||||
|
||||
protected abstract Set<Long> getNextPidBatch(ResourceSearch resourceSearch);
|
||||
|
||||
protected abstract void setDateFromPidFunction(ResourceSearch resourceSearch);
|
||||
}
|
|
@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.batch.reader;
|
|||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlProcessorJobConfig;
|
||||
import ca.uhn.fhir.jpa.batch.CommonBatchJobConfig;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
|
@ -93,7 +93,7 @@ public class CronologicalBatchAllResourcePidReader implements ItemReader<List<Lo
|
|||
public static JobParameters buildJobParameters(Integer theBatchSize, RequestPartitionId theRequestPartitionId) {
|
||||
Map<String, JobParameter> map = new HashMap<>();
|
||||
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_REQUEST_PARTITION, new JobParameter(theRequestPartitionId.toJson()));
|
||||
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), MultiUrlProcessorJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
|
||||
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), CommonBatchJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
|
||||
if (theBatchSize != null) {
|
||||
map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue()));
|
||||
}
|
||||
|
|
|
@ -20,209 +20,52 @@ package ca.uhn.fhir.jpa.batch.reader;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlProcessorJobConfig;
|
||||
import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl;
|
||||
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
|
||||
import ca.uhn.fhir.jpa.dao.IResultIterator;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.SortOrderEnum;
|
||||
import ca.uhn.fhir.rest.api.SortSpec;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.param.DateRangeParam;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.core.JobParameter;
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
import org.springframework.batch.item.ExecutionContext;
|
||||
import org.springframework.batch.item.ItemReader;
|
||||
import org.springframework.batch.item.ItemStream;
|
||||
import org.springframework.batch.item.ItemStreamException;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* This Spring Batch reader takes 4 parameters:
|
||||
* {@link #JOB_PARAM_REQUEST_LIST}: A list of URLs to search for along with the partitions those searches should be performed on
|
||||
* {@link #JOB_PARAM_BATCH_SIZE}: The number of resources to return with each search. If ommitted, {@link DaoConfig#getExpungeBatchSize} will be used.
|
||||
* {@link #JOB_PARAM_START_TIME}: The latest timestamp of resources to search for
|
||||
* <p>
|
||||
* The reader will return at most {@link #JOB_PARAM_BATCH_SIZE} pids every time it is called, or null
|
||||
* once no more matching resources are available. It returns the resources in reverse chronological order
|
||||
* and stores where it's at in the Spring Batch execution context with the key {@link #CURRENT_THRESHOLD_HIGH}
|
||||
* appended with "." and the index number of the url list item it has gotten up to. This is to permit
|
||||
* restarting jobs that use this reader so it can pick up where it left off.
|
||||
*/
|
||||
public class ReverseCronologicalBatchResourcePidReader implements ItemReader<List<Long>>, ItemStream {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ReverseCronologicalBatchResourcePidReader.class);
|
||||
|
||||
public static final String JOB_PARAM_REQUEST_LIST = "url-list";
|
||||
public static final String JOB_PARAM_BATCH_SIZE = "batch-size";
|
||||
public static final String JOB_PARAM_START_TIME = "start-time";
|
||||
|
||||
public static final String CURRENT_URL_INDEX = "current.url-index";
|
||||
public static final String CURRENT_THRESHOLD_HIGH = "current.threshold-high";
|
||||
|
||||
@Autowired
|
||||
private FhirContext myFhirContext;
|
||||
@Autowired
|
||||
private MatchUrlService myMatchUrlService;
|
||||
public class ReverseCronologicalBatchResourcePidReader extends BaseReverseCronologicalBatchPidReader {
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
@Autowired
|
||||
private BatchResourceSearcher myBatchResourceSearcher;
|
||||
|
||||
private final BatchDateThresholdUpdater myBatchDateThresholdUpdater = new BatchDateThresholdUpdater();
|
||||
|
||||
private List<PartitionedUrl> myPartitionedUrls;
|
||||
private Integer myBatchSize;
|
||||
private final Map<Integer, Date> myThresholdHighByUrlIndex = new HashMap<>();
|
||||
private final Map<Integer, Set<Long>> myAlreadyProcessedPidsWithHighDate = new HashMap<>();
|
||||
|
||||
private int myUrlIndex = 0;
|
||||
private Date myStartTime;
|
||||
|
||||
@Autowired
|
||||
public void setRequestListJson(@Value("#{jobParameters['" + JOB_PARAM_REQUEST_LIST + "']}") String theRequestListJson) {
|
||||
RequestListJson requestListJson = RequestListJson.fromJson(theRequestListJson);
|
||||
myPartitionedUrls = requestListJson.getPartitionedUrls();
|
||||
}
|
||||
|
||||
@Autowired
|
||||
public void setBatchSize(@Value("#{jobParameters['" + JOB_PARAM_BATCH_SIZE + "']}") Integer theBatchSize) {
|
||||
myBatchSize = theBatchSize;
|
||||
}
|
||||
|
||||
@Autowired
|
||||
public void setStartTime(@Value("#{jobParameters['" + JOB_PARAM_START_TIME + "']}") Date theStartTime) {
|
||||
myStartTime = theStartTime;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Long> read() throws Exception {
|
||||
while (myUrlIndex < myPartitionedUrls.size()) {
|
||||
List<Long> nextBatch = getNextBatch();
|
||||
if (nextBatch.isEmpty()) {
|
||||
++myUrlIndex;
|
||||
continue;
|
||||
}
|
||||
|
||||
return nextBatch;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private List<Long> getNextBatch() {
|
||||
RequestPartitionId requestPartitionId = myPartitionedUrls.get(myUrlIndex).getRequestPartitionId();
|
||||
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(myPartitionedUrls.get(myUrlIndex).getUrl(), requestPartitionId);
|
||||
protected Set<Long> getNextPidBatch(ResourceSearch resourceSearch) {
|
||||
Set<Long> retval = new LinkedHashSet<>();
|
||||
addDateCountAndSortToSearch(resourceSearch);
|
||||
|
||||
// Perform the search
|
||||
IResultIterator resultIter = myBatchResourceSearcher.performSearch(resourceSearch, myBatchSize);
|
||||
Set<Long> newPids = new LinkedHashSet<>();
|
||||
Set<Long> alreadySeenPids = myAlreadyProcessedPidsWithHighDate.computeIfAbsent(myUrlIndex, i -> new HashSet<>());
|
||||
Integer batchSize = getBatchSize();
|
||||
IResultIterator resultIter = myBatchResourceSearcher.performSearch(resourceSearch, batchSize);
|
||||
Set<Long> alreadySeenPids = getAlreadySeenPids();
|
||||
|
||||
do {
|
||||
List<Long> pids = resultIter.getNextResultBatch(myBatchSize).stream().map(ResourcePersistentId::getIdAsLong).collect(Collectors.toList());
|
||||
newPids.addAll(pids);
|
||||
newPids.removeAll(alreadySeenPids);
|
||||
} while (newPids.size() < myBatchSize && resultIter.hasNext());
|
||||
|
||||
if (ourLog.isDebugEnabled()) {
|
||||
ourLog.debug("Search for {}{} returned {} results", resourceSearch.getResourceName(), resourceSearch.getSearchParameterMap().toNormalizedQueryString(myFhirContext), newPids.size());
|
||||
ourLog.debug("Results: {}", newPids);
|
||||
}
|
||||
|
||||
setDateFromPidFunction(resourceSearch);
|
||||
|
||||
List<Long> retval = new ArrayList<>(newPids);
|
||||
Date newThreshold = myBatchDateThresholdUpdater.updateThresholdAndCache(myThresholdHighByUrlIndex.get(myUrlIndex), myAlreadyProcessedPidsWithHighDate.get(myUrlIndex), retval);
|
||||
myThresholdHighByUrlIndex.put(myUrlIndex, newThreshold);
|
||||
List<Long> pids = resultIter.getNextResultBatch(batchSize).stream().map(ResourcePersistentId::getIdAsLong).collect(Collectors.toList());
|
||||
retval.addAll(pids);
|
||||
retval.removeAll(alreadySeenPids);
|
||||
} while (retval.size() < batchSize && resultIter.hasNext());
|
||||
|
||||
return retval;
|
||||
}
|
||||
|
||||
private void setDateFromPidFunction(ResourceSearch resourceSearch) {
|
||||
@Override
|
||||
protected void setDateFromPidFunction(ResourceSearch resourceSearch) {
|
||||
final IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceSearch.getResourceName());
|
||||
|
||||
myBatchDateThresholdUpdater.setDateFromPid(pid -> {
|
||||
setDateExtractorFunction(pid -> {
|
||||
IBaseResource oldestResource = dao.readByPid(new ResourcePersistentId(pid));
|
||||
return oldestResource.getMeta().getLastUpdated();
|
||||
});
|
||||
}
|
||||
|
||||
private void addDateCountAndSortToSearch(ResourceSearch resourceSearch) {
|
||||
SearchParameterMap map = resourceSearch.getSearchParameterMap();
|
||||
map.setLastUpdated(new DateRangeParam().setUpperBoundInclusive(myThresholdHighByUrlIndex.get(myUrlIndex)));
|
||||
map.setLoadSynchronousUpTo(myBatchSize);
|
||||
map.setSort(new SortSpec(Constants.PARAM_LASTUPDATED, SortOrderEnum.DESC));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void open(ExecutionContext executionContext) throws ItemStreamException {
|
||||
if (executionContext.containsKey(CURRENT_URL_INDEX)) {
|
||||
myUrlIndex = new Long(executionContext.getLong(CURRENT_URL_INDEX)).intValue();
|
||||
}
|
||||
for (int index = 0; index < myPartitionedUrls.size(); ++index) {
|
||||
String key = highKey(index);
|
||||
if (executionContext.containsKey(key)) {
|
||||
myThresholdHighByUrlIndex.put(index, new Date(executionContext.getLong(key)));
|
||||
} else {
|
||||
myThresholdHighByUrlIndex.put(index, myStartTime);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static String highKey(int theIndex) {
|
||||
return CURRENT_THRESHOLD_HIGH + "." + theIndex;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(ExecutionContext executionContext) throws ItemStreamException {
|
||||
executionContext.putLong(CURRENT_URL_INDEX, myUrlIndex);
|
||||
for (int index = 0; index < myPartitionedUrls.size(); ++index) {
|
||||
Date date = myThresholdHighByUrlIndex.get(index);
|
||||
if (date != null) {
|
||||
executionContext.putLong(highKey(index), date.getTime());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws ItemStreamException {
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static JobParameters buildJobParameters(String theOperationName, Integer theBatchSize, RequestListJson theRequestListJson) {
|
||||
Map<String, JobParameter> map = new HashMap<>();
|
||||
map.put(MultiUrlJobParameterValidator.JOB_PARAM_OPERATION_NAME, new JobParameter(theOperationName));
|
||||
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_REQUEST_LIST, new JobParameter(theRequestListJson.toJson()));
|
||||
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), MultiUrlProcessorJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
|
||||
if (theBatchSize != null) {
|
||||
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue()));
|
||||
}
|
||||
JobParameters parameters = new JobParameters(map);
|
||||
return parameters;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,18 +24,23 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
|||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.util.QueryChunker;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.slf4j.Logger;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.slf4j.LoggerFactory.getLogger;
|
||||
|
@ -52,17 +57,19 @@ public class ResourceVersionSvcDaoImpl implements IResourceVersionSvc {
|
|||
DaoRegistry myDaoRegistry;
|
||||
@Autowired
|
||||
IResourceTableDao myResourceTableDao;
|
||||
@Autowired
|
||||
IdHelperService myIdHelperService;
|
||||
|
||||
@Override
|
||||
@Nonnull
|
||||
public ResourceVersionMap getVersionMap(String theResourceName, SearchParameterMap theSearchParamMap) {
|
||||
public ResourceVersionMap getVersionMap(RequestPartitionId theRequestPartitionId, String theResourceName, SearchParameterMap theSearchParamMap) {
|
||||
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(theResourceName);
|
||||
|
||||
if (ourLog.isDebugEnabled()) {
|
||||
ourLog.debug("About to retrieve version map for resource type: {}", theResourceName);
|
||||
}
|
||||
|
||||
List<Long> matchingIds = dao.searchForIds(theSearchParamMap, new SystemRequestDetails().setRequestPartitionId(RequestPartitionId.allPartitions())).stream()
|
||||
List<Long> matchingIds = dao.searchForIds(theSearchParamMap, new SystemRequestDetails().setRequestPartitionId(theRequestPartitionId)).stream()
|
||||
.map(ResourcePersistentId::getIdAsLong)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
|
@ -74,4 +81,95 @@ public class ResourceVersionSvcDaoImpl implements IResourceVersionSvc {
|
|||
|
||||
return ResourceVersionMap.fromResourceTableEntities(allById);
|
||||
}
|
||||
|
||||
@Override
|
||||
/**
|
||||
* Retrieves the latest versions for any resourceid that are found.
|
||||
* If they are not found, they will not be contained in the returned map.
|
||||
* The key should be the same value that was passed in to allow
|
||||
* consumer to look up the value using the id they already have.
|
||||
*
|
||||
* This method should not throw, so it can safely be consumed in
|
||||
* transactions.
|
||||
*
|
||||
* @param theRequestPartitionId - request partition id
|
||||
* @param theIds - list of IIdTypes for resources of interest.
|
||||
* @return
|
||||
*/
|
||||
public ResourcePersistentIdMap getLatestVersionIdsForResourceIds(RequestPartitionId theRequestPartitionId, List<IIdType> theIds) {
|
||||
ResourcePersistentIdMap idToPID = new ResourcePersistentIdMap();
|
||||
HashMap<String, List<IIdType>> resourceTypeToIds = new HashMap<>();
|
||||
|
||||
for (IIdType id : theIds) {
|
||||
String resourceType = id.getResourceType();
|
||||
if (!resourceTypeToIds.containsKey(resourceType)) {
|
||||
resourceTypeToIds.put(resourceType, new ArrayList<>());
|
||||
}
|
||||
resourceTypeToIds.get(resourceType).add(id);
|
||||
}
|
||||
|
||||
for (String resourceType : resourceTypeToIds.keySet()) {
|
||||
ResourcePersistentIdMap idAndPID = getIdsOfExistingResources(theRequestPartitionId,
|
||||
resourceTypeToIds.get(resourceType));
|
||||
idToPID.putAll(idAndPID);
|
||||
}
|
||||
|
||||
return idToPID;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to determine if some resources exist in the DB (without throwing).
|
||||
* Returns a set that contains the IIdType for every resource found.
|
||||
* If it's not found, it won't be included in the set.
|
||||
*
|
||||
* @param theIds - list of IIdType ids (for the same resource)
|
||||
* @return
|
||||
*/
|
||||
private ResourcePersistentIdMap getIdsOfExistingResources(RequestPartitionId thePartitionId,
|
||||
Collection<IIdType> theIds) {
|
||||
// these are the found Ids that were in the db
|
||||
ResourcePersistentIdMap retval = new ResourcePersistentIdMap();
|
||||
|
||||
if (theIds == null || theIds.isEmpty()) {
|
||||
return retval;
|
||||
}
|
||||
|
||||
List<ResourcePersistentId> resourcePersistentIds = myIdHelperService.resolveResourcePersistentIdsWithCache(thePartitionId,
|
||||
theIds.stream().collect(Collectors.toList()));
|
||||
|
||||
// we'll use this map to fetch pids that require versions
|
||||
HashMap<Long, ResourcePersistentId> pidsToVersionToResourcePid = new HashMap<>();
|
||||
|
||||
// fill in our map
|
||||
for (ResourcePersistentId pid : resourcePersistentIds) {
|
||||
if (pid.getVersion() == null) {
|
||||
pidsToVersionToResourcePid.put(pid.getIdAsLong(), pid);
|
||||
}
|
||||
Optional<IIdType> idOp = theIds.stream()
|
||||
.filter(i -> i.getIdPart().equals(pid.getAssociatedResourceId().getIdPart()))
|
||||
.findFirst();
|
||||
// this should always be present
|
||||
// since it was passed in.
|
||||
// but land of optionals...
|
||||
idOp.ifPresent(id -> {
|
||||
retval.put(id, pid);
|
||||
});
|
||||
}
|
||||
|
||||
// set any versions we don't already have
|
||||
if (!pidsToVersionToResourcePid.isEmpty()) {
|
||||
Collection<Object[]> resourceEntries = myResourceTableDao
|
||||
.getResourceVersionsForPid(new ArrayList<>(pidsToVersionToResourcePid.keySet()));
|
||||
|
||||
for (Object[] record : resourceEntries) {
|
||||
// order matters!
|
||||
Long retPid = (Long) record[0];
|
||||
String resType = (String) record[1];
|
||||
Long version = (Long) record[2];
|
||||
pidsToVersionToResourcePid.get(retPid).setVersion(version);
|
||||
}
|
||||
}
|
||||
|
||||
return retval;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,6 +16,8 @@ import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
|||
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
|
||||
import ca.uhn.fhir.jpa.batch.config.NonPersistedBatchConfigurer;
|
||||
import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator;
|
||||
import ca.uhn.fhir.jpa.batch.mdm.MdmBatchJobSubmitterFactoryImpl;
|
||||
import ca.uhn.fhir.jpa.batch.mdm.MdmClearJobSubmitterImpl;
|
||||
import ca.uhn.fhir.jpa.batch.reader.BatchResourceSearcher;
|
||||
import ca.uhn.fhir.jpa.batch.svc.BatchJobSubmitterImpl;
|
||||
import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider;
|
||||
|
@ -35,7 +37,6 @@ import ca.uhn.fhir.jpa.dao.LegacySearchBuilder;
|
|||
import ca.uhn.fhir.jpa.dao.MatchResourceUrlService;
|
||||
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
|
||||
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.DeleteExpungeService;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ExpungeEverythingService;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ExpungeOperation;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ExpungeService;
|
||||
|
@ -121,6 +122,7 @@ import ca.uhn.fhir.jpa.search.cache.DatabaseSearchCacheSvcImpl;
|
|||
import ca.uhn.fhir.jpa.search.cache.DatabaseSearchResultCacheSvcImpl;
|
||||
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
|
||||
import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc;
|
||||
import ca.uhn.fhir.jpa.search.elastic.IndexNamePrefixLayoutStrategy;
|
||||
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
||||
import ca.uhn.fhir.jpa.search.reindex.ResourceReindexer;
|
||||
import ca.uhn.fhir.jpa.search.reindex.ResourceReindexingSvcImpl;
|
||||
|
@ -136,6 +138,8 @@ import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc;
|
|||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.jpa.validation.JpaResourceLoader;
|
||||
import ca.uhn.fhir.jpa.validation.ValidationSettings;
|
||||
import ca.uhn.fhir.mdm.api.IMdmBatchJobSubmitterFactory;
|
||||
import ca.uhn.fhir.mdm.api.IMdmClearJobSubmitter;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IReindexJobSubmitter;
|
||||
|
@ -152,6 +156,7 @@ import org.hl7.fhir.utilities.npm.FilesystemPackageCacheManager;
|
|||
import org.springframework.batch.core.configuration.annotation.BatchConfigurer;
|
||||
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
@ -285,8 +290,8 @@ public abstract class BaseConfig {
|
|||
* bean, but it provides a partially completed entity manager
|
||||
* factory with HAPI FHIR customizations
|
||||
*/
|
||||
protected LocalContainerEntityManagerFactoryBean entityManagerFactory() {
|
||||
LocalContainerEntityManagerFactoryBean retVal = new HapiFhirLocalContainerEntityManagerFactoryBean();
|
||||
protected LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory myConfigurableListableBeanFactory) {
|
||||
LocalContainerEntityManagerFactoryBean retVal = new HapiFhirLocalContainerEntityManagerFactoryBean(myConfigurableListableBeanFactory);
|
||||
configureEntityManagerFactory(retVal, fhirContext());
|
||||
return retVal;
|
||||
}
|
||||
|
@ -375,17 +380,6 @@ public abstract class BaseConfig {
|
|||
return new TermConceptMappingSvcImpl();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ThreadPoolTaskExecutor searchCoordinatorThreadFactory() {
|
||||
final ThreadPoolTaskExecutor threadPoolTaskExecutor = new ThreadPoolTaskExecutor();
|
||||
threadPoolTaskExecutor.setThreadNamePrefix("search_coord_");
|
||||
threadPoolTaskExecutor.setCorePoolSize(searchCoordCorePoolSize);
|
||||
threadPoolTaskExecutor.setMaxPoolSize(searchCoordMaxPoolSize);
|
||||
threadPoolTaskExecutor.setQueueCapacity(searchCoordQueueCapacity);
|
||||
threadPoolTaskExecutor.initialize();
|
||||
return threadPoolTaskExecutor;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public TaskScheduler taskScheduler() {
|
||||
ConcurrentTaskScheduler retVal = new ConcurrentTaskScheduler();
|
||||
|
@ -515,10 +509,20 @@ public abstract class BaseConfig {
|
|||
}
|
||||
|
||||
@Bean
|
||||
public MdmLinkExpandSvc myMdmLinkExpandSvc() {
|
||||
public MdmLinkExpandSvc mdmLinkExpandSvc() {
|
||||
return new MdmLinkExpandSvc();
|
||||
}
|
||||
|
||||
@Bean
|
||||
IMdmBatchJobSubmitterFactory mdmBatchJobSubmitterFactory() {
|
||||
return new MdmBatchJobSubmitterFactoryImpl();
|
||||
}
|
||||
|
||||
@Bean
|
||||
IMdmClearJobSubmitter mdmClearJobSubmitter() {
|
||||
return new MdmClearJobSubmitterImpl();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
public TerminologyUploaderProvider terminologyUploaderProvider() {
|
||||
|
@ -836,8 +840,8 @@ public abstract class BaseConfig {
|
|||
}
|
||||
|
||||
@Bean
|
||||
public ISearchCoordinatorSvc searchCoordinatorSvc(ThreadPoolTaskExecutor searchCoordinatorThreadFactory) {
|
||||
return new SearchCoordinatorSvcImpl(searchCoordinatorThreadFactory);
|
||||
public ISearchCoordinatorSvc searchCoordinatorSvc() {
|
||||
return new SearchCoordinatorSvcImpl();
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
@ -891,11 +895,6 @@ public abstract class BaseConfig {
|
|||
return new DaoSearchParamSynchronizer();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public DeleteExpungeService deleteExpungeService() {
|
||||
return new DeleteExpungeService();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ResourceTableFKProvider resourceTableFKProvider() {
|
||||
return new ResourceTableFKProvider();
|
||||
|
@ -911,6 +910,11 @@ public abstract class BaseConfig {
|
|||
return new PredicateBuilderFactory(theApplicationContext);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public IndexNamePrefixLayoutStrategy indexLayoutStrategy() {
|
||||
return new IndexNamePrefixLayoutStrategy();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public JpaResourceLoader jpaResourceLoader() {
|
||||
return new JpaResourceLoader();
|
||||
|
|
|
@ -23,6 +23,9 @@ package ca.uhn.fhir.jpa.config;
|
|||
import org.hibernate.cfg.AvailableSettings;
|
||||
import org.hibernate.query.criteria.LiteralHandlingMode;
|
||||
import org.hibernate.resource.jdbc.spi.PhysicalConnectionHandlingMode;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
|
||||
import org.springframework.orm.hibernate5.SpringBeanContainer;
|
||||
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
|
||||
|
||||
import java.util.Map;
|
||||
|
@ -32,6 +35,14 @@ import java.util.Map;
|
|||
* that sets some sensible default property values
|
||||
*/
|
||||
public class HapiFhirLocalContainerEntityManagerFactoryBean extends LocalContainerEntityManagerFactoryBean {
|
||||
|
||||
//https://stackoverflow.com/questions/57902388/how-to-inject-spring-beans-into-the-hibernate-envers-revisionlistener
|
||||
ConfigurableListableBeanFactory myConfigurableListableBeanFactory;
|
||||
|
||||
public HapiFhirLocalContainerEntityManagerFactoryBean(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
|
||||
myConfigurableListableBeanFactory = theConfigurableListableBeanFactory;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> getJpaPropertyMap() {
|
||||
Map<String, Object> retVal = super.getJpaPropertyMap();
|
||||
|
@ -63,6 +74,11 @@ public class HapiFhirLocalContainerEntityManagerFactoryBean extends LocalContain
|
|||
if (!retVal.containsKey(AvailableSettings.BATCH_VERSIONED_DATA)) {
|
||||
retVal.put(AvailableSettings.BATCH_VERSIONED_DATA, "true");
|
||||
}
|
||||
// Why is this here, you ask? LocalContainerEntityManagerFactoryBean actually clobbers the setting hibernate needs
|
||||
// in order to be able to resolve beans, so we add it back in manually here
|
||||
if (!retVal.containsKey(AvailableSettings.BEAN_CONTAINER)) {
|
||||
retVal.put(AvailableSettings.BEAN_CONTAINER, new SpringBeanContainer(myConfigurableListableBeanFactory));
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
|
|
@ -618,49 +618,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
skipUpdatingTags |= myConfig.getTagStorageMode() == DaoConfig.TagStorageModeEnum.INLINE;
|
||||
|
||||
if (!skipUpdatingTags) {
|
||||
Set<ResourceTag> allDefs = new HashSet<>();
|
||||
Set<ResourceTag> allTagsOld = getAllTagDefinitions(theEntity);
|
||||
|
||||
if (theResource instanceof IResource) {
|
||||
extractTagsHapi(theTransactionDetails, (IResource) theResource, theEntity, allDefs);
|
||||
} else {
|
||||
extractTagsRi(theTransactionDetails, (IAnyResource) theResource, theEntity, allDefs);
|
||||
}
|
||||
|
||||
RuntimeResourceDefinition def = myContext.getResourceDefinition(theResource);
|
||||
if (def.isStandardType() == false) {
|
||||
String profile = def.getResourceProfile("");
|
||||
if (isNotBlank(profile)) {
|
||||
TagDefinition profileDef = getTagOrNull(theTransactionDetails, TagTypeEnum.PROFILE, NS_JPA_PROFILE, profile, null);
|
||||
|
||||
ResourceTag tag = theEntity.addTag(profileDef);
|
||||
allDefs.add(tag);
|
||||
theEntity.setHasTags(true);
|
||||
}
|
||||
}
|
||||
|
||||
Set<ResourceTag> allTagsNew = getAllTagDefinitions(theEntity);
|
||||
Set<TagDefinition> allDefsPresent = new HashSet<>();
|
||||
allTagsNew.forEach(tag -> {
|
||||
|
||||
// Don't keep duplicate tags
|
||||
if (!allDefsPresent.add(tag.getTag())) {
|
||||
theEntity.getTags().remove(tag);
|
||||
}
|
||||
|
||||
// Drop any tags that have been removed
|
||||
if (!allDefs.contains(tag)) {
|
||||
if (shouldDroppedTagBeRemovedOnUpdate(theRequest, tag)) {
|
||||
theEntity.getTags().remove(tag);
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
if (!allTagsOld.equals(allTagsNew)) {
|
||||
changed = true;
|
||||
}
|
||||
theEntity.setHasTags(!allTagsNew.isEmpty());
|
||||
changed |= updateTags(theTransactionDetails, theRequest, theResource, theEntity);
|
||||
}
|
||||
|
||||
} else {
|
||||
|
@ -669,7 +627,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
encoding = ResourceEncodingEnum.DEL;
|
||||
}
|
||||
|
||||
if (thePerformIndexing && changed == false) {
|
||||
if (thePerformIndexing && !changed) {
|
||||
if (theEntity.getId() == null) {
|
||||
changed = true;
|
||||
} else if (myConfig.isMassIngestionMode()) {
|
||||
|
@ -701,6 +659,50 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
return retVal;
|
||||
}
|
||||
|
||||
private boolean updateTags(TransactionDetails theTransactionDetails, RequestDetails theRequest, IBaseResource theResource, ResourceTable theEntity) {
|
||||
Set<ResourceTag> allDefs = new HashSet<>();
|
||||
Set<ResourceTag> allTagsOld = getAllTagDefinitions(theEntity);
|
||||
|
||||
if (theResource instanceof IResource) {
|
||||
extractTagsHapi(theTransactionDetails, (IResource) theResource, theEntity, allDefs);
|
||||
} else {
|
||||
extractTagsRi(theTransactionDetails, (IAnyResource) theResource, theEntity, allDefs);
|
||||
}
|
||||
|
||||
RuntimeResourceDefinition def = myContext.getResourceDefinition(theResource);
|
||||
if (def.isStandardType() == false) {
|
||||
String profile = def.getResourceProfile("");
|
||||
if (isNotBlank(profile)) {
|
||||
TagDefinition profileDef = getTagOrNull(theTransactionDetails, TagTypeEnum.PROFILE, NS_JPA_PROFILE, profile, null);
|
||||
|
||||
ResourceTag tag = theEntity.addTag(profileDef);
|
||||
allDefs.add(tag);
|
||||
theEntity.setHasTags(true);
|
||||
}
|
||||
}
|
||||
|
||||
Set<ResourceTag> allTagsNew = getAllTagDefinitions(theEntity);
|
||||
Set<TagDefinition> allDefsPresent = new HashSet<>();
|
||||
allTagsNew.forEach(tag -> {
|
||||
|
||||
// Don't keep duplicate tags
|
||||
if (!allDefsPresent.add(tag.getTag())) {
|
||||
theEntity.getTags().remove(tag);
|
||||
}
|
||||
|
||||
// Drop any tags that have been removed
|
||||
if (!allDefs.contains(tag)) {
|
||||
if (shouldDroppedTagBeRemovedOnUpdate(theRequest, tag)) {
|
||||
theEntity.getTags().remove(tag);
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
theEntity.setHasTags(!allTagsNew.isEmpty());
|
||||
return !allTagsOld.equals(allTagsNew);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private <R extends IBaseResource> R populateResourceMetadataHapi(Class<R> theResourceType, IBaseResourceEntity theEntity, @Nullable Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IResource res, Long theVersion) {
|
||||
R retVal = (R) res;
|
||||
|
@ -1205,7 +1207,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
if (thePerformIndexing || ((ResourceTable) theEntity).getVersion() == 1) {
|
||||
|
||||
newParams = new ResourceIndexedSearchParams();
|
||||
|
||||
mySearchParamWithInlineReferencesExtractor.populateFromResource(newParams, theTransactionDetails, entity, theResource, existingParams, theRequest, thePerformIndexing);
|
||||
|
||||
changed = populateResourceIntoEntity(theTransactionDetails, theRequest, theResource, entity, true);
|
||||
|
@ -1227,12 +1228,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
}
|
||||
|
||||
entity.setUpdated(theTransactionDetails.getTransactionDate());
|
||||
if (theResource instanceof IResource) {
|
||||
entity.setLanguage(((IResource) theResource).getLanguage().getValue());
|
||||
} else {
|
||||
entity.setLanguage(((IAnyResource) theResource).getLanguageElement().getValue());
|
||||
}
|
||||
|
||||
newParams.populateResourceTableSearchParamsPresentFlags(entity);
|
||||
entity.setIndexStatus(INDEX_STATUS_INDEXED);
|
||||
}
|
||||
|
@ -1288,52 +1283,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
postUpdate(entity, (T) theResource);
|
||||
}
|
||||
|
||||
/*
|
||||
* Create history entry
|
||||
*/
|
||||
if (theCreateNewHistoryEntry) {
|
||||
boolean versionedTags = getConfig().getTagStorageMode() == DaoConfig.TagStorageModeEnum.VERSIONED;
|
||||
final ResourceHistoryTable historyEntry = entity.toHistory(versionedTags);
|
||||
historyEntry.setEncoding(changed.getEncoding());
|
||||
historyEntry.setResource(changed.getResource());
|
||||
|
||||
ourLog.debug("Saving history entry {}", historyEntry.getIdDt());
|
||||
myResourceHistoryTableDao.save(historyEntry);
|
||||
|
||||
// Save resource source
|
||||
String source = null;
|
||||
String requestId = theRequest != null ? theRequest.getRequestId() : null;
|
||||
if (theResource != null) {
|
||||
if (myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.R4)) {
|
||||
IBaseMetaType meta = theResource.getMeta();
|
||||
source = MetaUtil.getSource(myContext, meta);
|
||||
}
|
||||
if (myContext.getVersion().getVersion().equals(FhirVersionEnum.DSTU3)) {
|
||||
source = ((IBaseHasExtensions) theResource.getMeta())
|
||||
.getExtension()
|
||||
.stream()
|
||||
.filter(t -> HapiExtensions.EXT_META_SOURCE.equals(t.getUrl()))
|
||||
.filter(t -> t.getValue() instanceof IPrimitiveType)
|
||||
.map(t -> ((IPrimitiveType<?>) t.getValue()).getValueAsString())
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
}
|
||||
}
|
||||
boolean haveSource = isNotBlank(source) && myConfig.getStoreMetaSourceInformation().isStoreSourceUri();
|
||||
boolean haveRequestId = isNotBlank(requestId) && myConfig.getStoreMetaSourceInformation().isStoreRequestId();
|
||||
if (haveSource || haveRequestId) {
|
||||
ResourceHistoryProvenanceEntity provenance = new ResourceHistoryProvenanceEntity();
|
||||
provenance.setResourceHistoryTable(historyEntry);
|
||||
provenance.setResourceTable(entity);
|
||||
provenance.setPartitionId(entity.getPartitionId());
|
||||
if (haveRequestId) {
|
||||
provenance.setRequestId(left(requestId, Constants.REQUEST_ID_LENGTH));
|
||||
}
|
||||
if (haveSource) {
|
||||
provenance.setSourceUri(source);
|
||||
}
|
||||
myEntityManager.persist(provenance);
|
||||
}
|
||||
createHistoryEntry(theRequest, theResource, entity, changed);
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -1415,6 +1366,51 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
return entity;
|
||||
}
|
||||
|
||||
private void createHistoryEntry(RequestDetails theRequest, IBaseResource theResource, ResourceTable theEntity, EncodedResource theChanged) {
|
||||
boolean versionedTags = getConfig().getTagStorageMode() == DaoConfig.TagStorageModeEnum.VERSIONED;
|
||||
final ResourceHistoryTable historyEntry = theEntity.toHistory(versionedTags);
|
||||
historyEntry.setEncoding(theChanged.getEncoding());
|
||||
historyEntry.setResource(theChanged.getResource());
|
||||
|
||||
ourLog.debug("Saving history entry {}", historyEntry.getIdDt());
|
||||
myResourceHistoryTableDao.save(historyEntry);
|
||||
|
||||
// Save resource source
|
||||
String source = null;
|
||||
String requestId = theRequest != null ? theRequest.getRequestId() : null;
|
||||
if (theResource != null) {
|
||||
if (myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.R4)) {
|
||||
IBaseMetaType meta = theResource.getMeta();
|
||||
source = MetaUtil.getSource(myContext, meta);
|
||||
}
|
||||
if (myContext.getVersion().getVersion().equals(FhirVersionEnum.DSTU3)) {
|
||||
source = ((IBaseHasExtensions) theResource.getMeta())
|
||||
.getExtension()
|
||||
.stream()
|
||||
.filter(t -> HapiExtensions.EXT_META_SOURCE.equals(t.getUrl()))
|
||||
.filter(t -> t.getValue() instanceof IPrimitiveType)
|
||||
.map(t -> ((IPrimitiveType<?>) t.getValue()).getValueAsString())
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
}
|
||||
}
|
||||
boolean haveSource = isNotBlank(source) && myConfig.getStoreMetaSourceInformation().isStoreSourceUri();
|
||||
boolean haveRequestId = isNotBlank(requestId) && myConfig.getStoreMetaSourceInformation().isStoreRequestId();
|
||||
if (haveSource || haveRequestId) {
|
||||
ResourceHistoryProvenanceEntity provenance = new ResourceHistoryProvenanceEntity();
|
||||
provenance.setResourceHistoryTable(historyEntry);
|
||||
provenance.setResourceTable(theEntity);
|
||||
provenance.setPartitionId(theEntity.getPartitionId());
|
||||
if (haveRequestId) {
|
||||
provenance.setRequestId(left(requestId, Constants.REQUEST_ID_LENGTH));
|
||||
}
|
||||
if (haveSource) {
|
||||
provenance.setSourceUri(source);
|
||||
}
|
||||
myEntityManager.persist(provenance);
|
||||
}
|
||||
}
|
||||
|
||||
private void validateIncomingResourceTypeMatchesExisting(IBaseResource theResource, ResourceTable entity) {
|
||||
String resourceType = myContext.getResourceType(theResource);
|
||||
if (!resourceType.equals(entity.getResourceType())) {
|
||||
|
|
|
@ -136,9 +136,12 @@ import java.util.ArrayList;
|
|||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
@ -1406,7 +1409,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
}
|
||||
|
||||
translateSearchParams(theParams);
|
||||
translateListSearchParams(theParams);
|
||||
|
||||
notifySearchInterceptors(theParams, theRequest);
|
||||
|
||||
|
@ -1431,7 +1434,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
return retVal;
|
||||
}
|
||||
|
||||
private void translateSearchParams(SearchParameterMap theParams) {
|
||||
private void translateListSearchParams(SearchParameterMap theParams) {
|
||||
Iterator<String> keyIterator = theParams.keySet().iterator();
|
||||
|
||||
// Translate _list=42 to _has=List:item:_id=42
|
||||
|
|
|
@ -7,6 +7,7 @@ import ca.uhn.fhir.jpa.util.ResourceCountCache;
|
|||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
|
|
@ -25,19 +25,18 @@ import ca.uhn.fhir.context.RuntimeSearchParam;
|
|||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.api.model.LazyDaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
|
||||
import ca.uhn.fhir.jpa.cache.ResourcePersistentIdMap;
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterAnd;
|
||||
import ca.uhn.fhir.rest.api.QualifiedParamList;
|
||||
import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails;
|
||||
|
@ -45,12 +44,16 @@ import ca.uhn.fhir.rest.api.server.IPreResourceShowDetails;
|
|||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.SimplePreResourceAccessDetails;
|
||||
import ca.uhn.fhir.rest.api.server.SimplePreResourceShowDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.rest.param.QualifierDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.util.BundleUtil;
|
||||
import ca.uhn.fhir.util.FhirTerser;
|
||||
import ca.uhn.fhir.util.OperationOutcomeUtil;
|
||||
|
@ -91,6 +94,10 @@ public abstract class BaseStorageDao {
|
|||
protected DaoRegistry myDaoRegistry;
|
||||
@Autowired
|
||||
protected ModelConfig myModelConfig;
|
||||
@Autowired
|
||||
protected IResourceVersionSvc myResourceVersionSvc;
|
||||
@Autowired
|
||||
protected DaoConfig myDaoConfig;
|
||||
|
||||
@VisibleForTesting
|
||||
public void setSearchParamRegistry(ISearchParamRegistry theSearchParamRegistry) {
|
||||
|
@ -204,10 +211,33 @@ public abstract class BaseStorageDao {
|
|||
for (IBaseReference nextReference : referencesToVersion) {
|
||||
IIdType referenceElement = nextReference.getReferenceElement();
|
||||
if (!referenceElement.hasBaseUrl()) {
|
||||
String resourceType = referenceElement.getResourceType();
|
||||
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(resourceType);
|
||||
String targetVersionId = dao.getCurrentVersionId(referenceElement);
|
||||
String newTargetReference = referenceElement.withVersion(targetVersionId).getValue();
|
||||
|
||||
ResourcePersistentIdMap resourceVersionMap = myResourceVersionSvc.getLatestVersionIdsForResourceIds(RequestPartitionId.allPartitions(),
|
||||
Collections.singletonList(referenceElement)
|
||||
);
|
||||
|
||||
// 3 cases:
|
||||
// 1) there exists a resource in the db with some version (use this version)
|
||||
// 2) no resource exists, but we will create one (eventually). The version is 1
|
||||
// 3) no resource exists, and none will be made -> throw
|
||||
Long version;
|
||||
if (resourceVersionMap.containsKey(referenceElement)) {
|
||||
// the resource exists... latest id
|
||||
// will be the value in the ResourcePersistentId
|
||||
version = resourceVersionMap.getResourcePersistentId(referenceElement).getVersion();
|
||||
} else if (myDaoConfig.isAutoCreatePlaceholderReferenceTargets()) {
|
||||
// if idToPID doesn't contain object
|
||||
// but autcreateplaceholders is on
|
||||
// then the version will be 1 (the first version)
|
||||
version = 1L;
|
||||
}
|
||||
else {
|
||||
// resource not found
|
||||
// and no autocreateplaceholders set...
|
||||
// we throw
|
||||
throw new ResourceNotFoundException(referenceElement);
|
||||
}
|
||||
String newTargetReference = referenceElement.withVersion(version.toString()).getValue();
|
||||
nextReference.setReference(newTargetReference);
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,38 @@
|
|||
package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
/**
|
||||
* Utility class to help identify classes of failure.
|
||||
*/
|
||||
public class DaoFailureUtil {
|
||||
|
||||
public static boolean isTagStorageFailure(Throwable t) {
|
||||
if (StringUtils.isBlank(t.getMessage())) {
|
||||
return false;
|
||||
} else {
|
||||
String msg = t.getMessage().toLowerCase();
|
||||
return msg.contains("hfj_tag_def") || msg.contains("hfj_res_tag");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -20,15 +20,16 @@ package ca.uhn.fhir.jpa.dao.data;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
|
||||
import ca.uhn.fhir.jpa.entity.MdmLink;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
@Repository
|
||||
|
@ -70,4 +71,6 @@ public interface IMdmLinkDao extends JpaRepository<MdmLink, Long> {
|
|||
@Query("SELECT ml.myGoldenResourcePid as goldenPid, ml.mySourcePid as sourcePid FROM MdmLink ml WHERE ml.myGoldenResourcePid = :goldenPid and ml.myMatchResult = :matchResult")
|
||||
List<MdmPidTuple> expandPidsByGoldenResourcePidAndMatchResult(@Param("goldenPid") Long theSourcePid, @Param("matchResult") MdmMatchResultEnum theMdmMatchResultEnum);
|
||||
|
||||
@Query("SELECT ml.myId FROM MdmLink ml WHERE ml.myMdmSourceType = :resourceName AND ml.myCreated <= :highThreshold ORDER BY ml.myCreated DESC")
|
||||
List<Long> findPidByResourceNameAndThreshold(@Param("resourceName") String theResourceName, @Param("highThreshold") Date theHighThreshold, Pageable thePageable);
|
||||
}
|
||||
|
|
|
@ -100,6 +100,16 @@ public interface IResourceTableDao extends JpaRepository<ResourceTable, Long> {
|
|||
@Query("SELECT t.myVersion FROM ResourceTable t WHERE t.myId = :pid")
|
||||
Long findCurrentVersionByPid(@Param("pid") Long thePid);
|
||||
|
||||
/**
|
||||
* This query will return rows with the following values:
|
||||
* Id (resource pid - long), ResourceType (Patient, etc), version (long)
|
||||
* Order matters!
|
||||
* @param pid - list of pids to get versions for
|
||||
* @return
|
||||
*/
|
||||
@Query("SELECT t.myId, t.myResourceType, t.myVersion FROM ResourceTable t WHERE t.myId IN ( :pid )")
|
||||
Collection<Object[]> getResourceVersionsForPid(@Param("pid") List<Long> pid);
|
||||
|
||||
@Query("SELECT t FROM ResourceTable t LEFT JOIN FETCH t.myForcedId WHERE t.myPartitionId.myPartitionId IS NULL AND t.myId = :pid")
|
||||
Optional<ResourceTable> readByPartitionIdNull(@Param("pid") Long theResourceId);
|
||||
|
||||
|
|
|
@ -1,201 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.dao.expunge;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.delete.job.DeleteExpungeProcessor;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.util.OperationOutcomeUtil;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.PersistenceContext;
|
||||
import javax.persistence.PersistenceContextType;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Service
|
||||
/**
|
||||
* DeleteExpunge is now performed using the {@link ca.uhn.fhir.jpa.delete.DeleteExpungeJobSubmitterImpl} Spring Batch job.
|
||||
*/
|
||||
@Deprecated
|
||||
public class DeleteExpungeService {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeService.class);
|
||||
|
||||
@Autowired
|
||||
protected PlatformTransactionManager myPlatformTransactionManager;
|
||||
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||
private EntityManager myEntityManager;
|
||||
@Autowired
|
||||
private FhirContext myFhirContext;
|
||||
@Autowired
|
||||
private ResourceTableFKProvider myResourceTableFKProvider;
|
||||
@Autowired
|
||||
private IResourceLinkDao myResourceLinkDao;
|
||||
@Autowired
|
||||
private IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
@Autowired
|
||||
private IdHelperService myIdHelper;
|
||||
|
||||
public DeleteMethodOutcome expungeByResourcePids(String theUrl, String theResourceName, Slice<Long> thePids, RequestDetails theRequest) {
|
||||
StopWatch w = new StopWatch();
|
||||
if (thePids.isEmpty()) {
|
||||
return new DeleteMethodOutcome();
|
||||
}
|
||||
|
||||
HookParams params = new HookParams()
|
||||
.add(RequestDetails.class, theRequest)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequest)
|
||||
.add(String.class, theUrl);
|
||||
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRE_DELETE_EXPUNGE, params);
|
||||
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myPlatformTransactionManager);
|
||||
txTemplate.executeWithoutResult(t -> validateOkToDeleteAndExpunge(thePids));
|
||||
|
||||
ourLog.info("Expunging all records linking to {} resources...", thePids.getNumber());
|
||||
AtomicLong expungedEntitiesCount = new AtomicLong();
|
||||
AtomicLong expungedResourcesCount = new AtomicLong();
|
||||
PartitionRunner partitionRunner = new PartitionRunner(DeleteExpungeProcessor.PROCESS_NAME, DeleteExpungeProcessor.THREAD_PREFIX, myDaoConfig.getExpungeBatchSize(), myDaoConfig.getExpungeThreadCount());
|
||||
partitionRunner.runInPartitionedThreads(thePids, pidChunk -> deleteInTransaction(theResourceName, pidChunk, expungedResourcesCount, expungedEntitiesCount, theRequest));
|
||||
ourLog.info("Expunged a total of {} records", expungedEntitiesCount);
|
||||
|
||||
IBaseOperationOutcome oo;
|
||||
if (expungedResourcesCount.get() == 0) {
|
||||
oo = OperationOutcomeUtil.newInstance(myFhirContext);
|
||||
String message = myFhirContext.getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "unableToDeleteNotFound", theUrl);
|
||||
String severity = "warning";
|
||||
String code = "not-found";
|
||||
OperationOutcomeUtil.addIssue(myFhirContext, oo, severity, message, null, code);
|
||||
} else {
|
||||
oo = OperationOutcomeUtil.newInstance(myFhirContext);
|
||||
String message = myFhirContext.getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "successfulDeletes", expungedResourcesCount.get(), w.getMillis());
|
||||
String severity = "information";
|
||||
String code = "informational";
|
||||
OperationOutcomeUtil.addIssue(myFhirContext, oo, severity, message, null, code);
|
||||
}
|
||||
|
||||
DeleteMethodOutcome retval = new DeleteMethodOutcome();
|
||||
retval.setExpungedResourcesCount(expungedResourcesCount.get());
|
||||
retval.setExpungedEntitiesCount(expungedEntitiesCount.get());
|
||||
retval.setOperationOutcome(oo);
|
||||
return retval;
|
||||
}
|
||||
|
||||
public void validateOkToDeleteAndExpunge(Slice<Long> theAllTargetPids) {
|
||||
if (!myDaoConfig.isEnforceReferentialIntegrityOnDelete()) {
|
||||
ourLog.info("Referential integrity on delete disabled. Skipping referential integrity check.");
|
||||
return;
|
||||
}
|
||||
|
||||
List<ResourceLink> conflictResourceLinks = Collections.synchronizedList(new ArrayList<>());
|
||||
PartitionRunner partitionRunner = new PartitionRunner(DeleteExpungeProcessor.PROCESS_NAME, DeleteExpungeProcessor.THREAD_PREFIX, myDaoConfig.getExpungeBatchSize(), myDaoConfig.getExpungeThreadCount());
|
||||
partitionRunner.runInPartitionedThreads(theAllTargetPids, someTargetPids -> findResourceLinksWithTargetPidIn(theAllTargetPids.getContent(), someTargetPids, conflictResourceLinks));
|
||||
|
||||
if (conflictResourceLinks.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
ResourceLink firstConflict = conflictResourceLinks.get(0);
|
||||
|
||||
//NB-GGG: We previously instantiated these ID values from firstConflict.getSourceResource().getIdDt(), but in a situation where we
|
||||
//actually had to run delete conflict checks in multiple partitions, the executor service starts its own sessions on a per thread basis, and by the time
|
||||
//we arrive here, those sessions are closed. So instead, we resolve them from PIDs, which are eagerly loaded.
|
||||
String sourceResourceId = myIdHelper.resourceIdFromPidOrThrowException(firstConflict.getSourceResourcePid()).toVersionless().getValue();
|
||||
String targetResourceId = myIdHelper.resourceIdFromPidOrThrowException(firstConflict.getTargetResourcePid()).toVersionless().getValue();
|
||||
|
||||
throw new InvalidRequestException("DELETE with _expunge=true failed. Unable to delete " +
|
||||
targetResourceId + " because " + sourceResourceId + " refers to it via the path " + firstConflict.getSourcePath());
|
||||
}
|
||||
|
||||
public void findResourceLinksWithTargetPidIn(List<Long> theAllTargetPids, List<Long> theSomeTargetPids, List<ResourceLink> theConflictResourceLinks) {
|
||||
// We only need to find one conflict, so if we found one already in an earlier partition run, we can skip the rest of the searches
|
||||
if (theConflictResourceLinks.isEmpty()) {
|
||||
List<ResourceLink> conflictResourceLinks = myResourceLinkDao.findWithTargetPidIn(theSomeTargetPids).stream()
|
||||
// Filter out resource links for which we are planning to delete the source.
|
||||
// theAllTargetPids contains a list of all the pids we are planning to delete. So we only want
|
||||
// to consider a link to be a conflict if the source of that link is not in theAllTargetPids.
|
||||
.filter(link -> !theAllTargetPids.contains(link.getSourceResourcePid()))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
// We do this in two steps to avoid lock contention on this synchronized list
|
||||
theConflictResourceLinks.addAll(conflictResourceLinks);
|
||||
}
|
||||
}
|
||||
|
||||
private void deleteInTransaction(String theResourceName, List<Long> thePidChunk, AtomicLong theExpungedResourcesCount, AtomicLong theExpungedEntitiesCount, RequestDetails theRequest) {
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myPlatformTransactionManager);
|
||||
txTemplate.executeWithoutResult(t -> deleteAllRecordsLinkingTo(theResourceName, thePidChunk, theExpungedResourcesCount, theExpungedEntitiesCount, theRequest));
|
||||
}
|
||||
|
||||
private void deleteAllRecordsLinkingTo(String theResourceName, List<Long> thePids, AtomicLong theExpungedResourcesCount, AtomicLong theExpungedEntitiesCount, RequestDetails theRequest) {
|
||||
HookParams params = new HookParams()
|
||||
.add(String.class, theResourceName)
|
||||
.add(List.class, thePids)
|
||||
.add(AtomicLong.class, theExpungedEntitiesCount)
|
||||
.add(RequestDetails.class, theRequest)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequest);
|
||||
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRE_DELETE_EXPUNGE_PID_LIST, params);
|
||||
|
||||
String pidListString = thePids.toString().replace("[", "(").replace("]", ")");
|
||||
List<ResourceForeignKey> resourceForeignKeys = myResourceTableFKProvider.getResourceForeignKeys();
|
||||
|
||||
for (ResourceForeignKey resourceForeignKey : resourceForeignKeys) {
|
||||
deleteRecordsByColumn(pidListString, resourceForeignKey, theExpungedEntitiesCount);
|
||||
}
|
||||
|
||||
// Lastly we need to delete records from the resource table all of these other tables link to:
|
||||
ResourceForeignKey resourceTablePk = new ResourceForeignKey("HFJ_RESOURCE", "RES_ID");
|
||||
int entitiesDeleted = deleteRecordsByColumn(pidListString, resourceTablePk, theExpungedEntitiesCount);
|
||||
theExpungedResourcesCount.addAndGet(entitiesDeleted);
|
||||
}
|
||||
|
||||
private int deleteRecordsByColumn(String thePidListString, ResourceForeignKey theResourceForeignKey, AtomicLong theExpungedEntitiesCount) {
|
||||
int entitesDeleted = myEntityManager.createNativeQuery("DELETE FROM " + theResourceForeignKey.table + " WHERE " + theResourceForeignKey.key + " IN " + thePidListString).executeUpdate();
|
||||
ourLog.info("Expunged {} records from {}", entitesDeleted, theResourceForeignKey.table);
|
||||
theExpungedEntitiesCount.addAndGet(entitesDeleted);
|
||||
return entitesDeleted;
|
||||
}
|
||||
}
|
|
@ -65,6 +65,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
|||
import ca.uhn.fhir.jpa.model.entity.ResourceTag;
|
||||
import ca.uhn.fhir.jpa.model.entity.SearchParamPresent;
|
||||
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
|
@ -100,6 +101,9 @@ public class ExpungeEverythingService {
|
|||
|
||||
private TransactionTemplate myTxTemplate;
|
||||
|
||||
@Autowired
|
||||
private MemoryCacheService myMemoryCacheService;
|
||||
|
||||
@PostConstruct
|
||||
public void initTxTemplate() {
|
||||
myTxTemplate = new TransactionTemplate(myPlatformTransactionManager);
|
||||
|
@ -122,37 +126,37 @@ public class ExpungeEverythingService {
|
|||
counter.addAndGet(doExpungeEverythingQuery("UPDATE " + TermCodeSystem.class.getSimpleName() + " d SET d.myCurrentVersion = null"));
|
||||
return null;
|
||||
});
|
||||
counter.addAndGet(expungeEverythingByType(NpmPackageVersionResourceEntity.class));
|
||||
counter.addAndGet(expungeEverythingByType(NpmPackageVersionEntity.class));
|
||||
counter.addAndGet(expungeEverythingByType(NpmPackageEntity.class));
|
||||
counter.addAndGet(expungeEverythingByType(SearchParamPresent.class));
|
||||
counter.addAndGet(expungeEverythingByType(BulkImportJobFileEntity.class));
|
||||
counter.addAndGet(expungeEverythingByType(BulkImportJobEntity.class));
|
||||
counter.addAndGet(expungeEverythingByType(ForcedId.class));
|
||||
counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamDate.class));
|
||||
counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamNumber.class));
|
||||
counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamQuantity.class));
|
||||
counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamQuantityNormalized.class));
|
||||
counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamString.class));
|
||||
counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamToken.class));
|
||||
counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamUri.class));
|
||||
counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamCoords.class));
|
||||
counter.addAndGet(expungeEverythingByType(ResourceIndexedComboStringUnique.class));
|
||||
counter.addAndGet(expungeEverythingByType(ResourceIndexedComboTokenNonUnique.class));
|
||||
counter.addAndGet(expungeEverythingByType(ResourceLink.class));
|
||||
counter.addAndGet(expungeEverythingByType(SearchResult.class));
|
||||
counter.addAndGet(expungeEverythingByType(SearchInclude.class));
|
||||
counter.addAndGet(expungeEverythingByType(TermValueSetConceptDesignation.class));
|
||||
counter.addAndGet(expungeEverythingByType(TermValueSetConcept.class));
|
||||
counter.addAndGet(expungeEverythingByType(TermValueSet.class));
|
||||
counter.addAndGet(expungeEverythingByType(TermConceptParentChildLink.class));
|
||||
counter.addAndGet(expungeEverythingByType(TermConceptMapGroupElementTarget.class));
|
||||
counter.addAndGet(expungeEverythingByType(TermConceptMapGroupElement.class));
|
||||
counter.addAndGet(expungeEverythingByType(TermConceptMapGroup.class));
|
||||
counter.addAndGet(expungeEverythingByType(TermConceptMap.class));
|
||||
counter.addAndGet(expungeEverythingByType(TermConceptProperty.class));
|
||||
counter.addAndGet(expungeEverythingByType(TermConceptDesignation.class));
|
||||
counter.addAndGet(expungeEverythingByType(TermConcept.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(NpmPackageVersionResourceEntity.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(NpmPackageVersionEntity.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(NpmPackageEntity.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(SearchParamPresent.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(BulkImportJobFileEntity.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(BulkImportJobEntity.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ForcedId.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceIndexedSearchParamDate.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceIndexedSearchParamNumber.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceIndexedSearchParamQuantity.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceIndexedSearchParamQuantityNormalized.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceIndexedSearchParamString.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceIndexedSearchParamToken.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceIndexedSearchParamUri.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceIndexedSearchParamCoords.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceIndexedComboStringUnique.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceIndexedComboTokenNonUnique.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceLink.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(SearchResult.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(SearchInclude.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermValueSetConceptDesignation.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermValueSetConcept.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermValueSet.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermConceptParentChildLink.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermConceptMapGroupElementTarget.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermConceptMapGroupElement.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermConceptMapGroup.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermConceptMap.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermConceptProperty.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermConceptDesignation.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermConcept.class));
|
||||
myTxTemplate.execute(t -> {
|
||||
for (TermCodeSystem next : myEntityManager.createQuery("SELECT c FROM " + TermCodeSystem.class.getName() + " c", TermCodeSystem.class).getResultList()) {
|
||||
next.setCurrentVersion(null);
|
||||
|
@ -160,52 +164,66 @@ public class ExpungeEverythingService {
|
|||
}
|
||||
return null;
|
||||
});
|
||||
counter.addAndGet(expungeEverythingByType(TermCodeSystemVersion.class));
|
||||
counter.addAndGet(expungeEverythingByType(TermCodeSystem.class));
|
||||
counter.addAndGet(expungeEverythingByType(SubscriptionTable.class));
|
||||
counter.addAndGet(expungeEverythingByType(ResourceHistoryTag.class));
|
||||
counter.addAndGet(expungeEverythingByType(ResourceTag.class));
|
||||
counter.addAndGet(expungeEverythingByType(TagDefinition.class));
|
||||
counter.addAndGet(expungeEverythingByType(ResourceHistoryProvenanceEntity.class));
|
||||
counter.addAndGet(expungeEverythingByType(ResourceHistoryTable.class));
|
||||
counter.addAndGet(expungeEverythingByType(ResourceTable.class));
|
||||
counter.addAndGet(expungeEverythingByType(PartitionEntity.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermCodeSystemVersion.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TermCodeSystem.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(SubscriptionTable.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceHistoryTag.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceTag.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(TagDefinition.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceHistoryProvenanceEntity.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceHistoryTable.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(ResourceTable.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(PartitionEntity.class));
|
||||
myTxTemplate.execute(t -> {
|
||||
counter.addAndGet(doExpungeEverythingQuery("DELETE from " + Search.class.getSimpleName() + " d"));
|
||||
return null;
|
||||
});
|
||||
|
||||
purgeAllCaches();
|
||||
|
||||
ourLog.info("COMPLETED GLOBAL $expunge - Deleted {} rows", counter.get());
|
||||
}
|
||||
|
||||
private void purgeAllCaches() {
|
||||
myTxTemplate.execute(t -> {
|
||||
myMemoryCacheService.invalidateAllCaches();
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
private int expungeEverythingByTypeWithoutPurging(Class<?> theEntityType) {
|
||||
int outcome = 0;
|
||||
while (true) {
|
||||
StopWatch sw = new StopWatch();
|
||||
|
||||
@SuppressWarnings("ConstantConditions")
|
||||
int count = myTxTemplate.execute(t -> {
|
||||
CriteriaBuilder cb = myEntityManager.getCriteriaBuilder();
|
||||
CriteriaQuery<?> cq = cb.createQuery(theEntityType);
|
||||
cq.from(theEntityType);
|
||||
TypedQuery<?> query = myEntityManager.createQuery(cq);
|
||||
query.setMaxResults(1000);
|
||||
List<?> results = query.getResultList();
|
||||
for (Object result : results) {
|
||||
myEntityManager.remove(result);
|
||||
}
|
||||
return results.size();
|
||||
});
|
||||
|
||||
outcome += count;
|
||||
if (count == 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
ourLog.info("Have deleted {} entities of type {} in {}", outcome, theEntityType.getSimpleName(), sw.toString());
|
||||
}
|
||||
return outcome;
|
||||
}
|
||||
|
||||
public int expungeEverythingByType(Class<?> theEntityType) {
|
||||
|
||||
int outcome = 0;
|
||||
while (true) {
|
||||
StopWatch sw = new StopWatch();
|
||||
|
||||
@SuppressWarnings("ConstantConditions")
|
||||
int count = myTxTemplate.execute(t -> {
|
||||
CriteriaBuilder cb = myEntityManager.getCriteriaBuilder();
|
||||
CriteriaQuery<?> cq = cb.createQuery(theEntityType);
|
||||
cq.from(theEntityType);
|
||||
TypedQuery<?> query = myEntityManager.createQuery(cq);
|
||||
query.setMaxResults(1000);
|
||||
List<?> results = query.getResultList();
|
||||
for (Object result : results) {
|
||||
myEntityManager.remove(result);
|
||||
}
|
||||
return results.size();
|
||||
});
|
||||
|
||||
outcome += count;
|
||||
if (count == 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
ourLog.info("Have deleted {} entities of type {} in {}", outcome, theEntityType.getSimpleName(), sw.toString());
|
||||
}
|
||||
return outcome;
|
||||
int result = expungeEverythingByTypeWithoutPurging(theEntityType);
|
||||
purgeAllCaches();
|
||||
return result;
|
||||
}
|
||||
|
||||
private int doExpungeEverythingQuery(String theQuery) {
|
||||
|
|
|
@ -94,7 +94,6 @@ public class DaoResourceLinkResolver implements IResourceLinkResolver {
|
|||
throw new InvalidRequestException("Resource " + resName + "/" + idPart + " not found, specified in path: " + theSourcePath);
|
||||
|
||||
}
|
||||
|
||||
resolvedResource = createdTableOpt.get();
|
||||
}
|
||||
|
||||
|
|
|
@ -34,6 +34,7 @@ import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
|||
import ca.uhn.fhir.jpa.util.QueryChunker;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
@ -204,7 +205,11 @@ public class IdHelperService {
|
|||
*/
|
||||
@Nonnull
|
||||
public List<ResourcePersistentId> resolveResourcePersistentIdsWithCache(RequestPartitionId theRequestPartitionId, List<IIdType> theIds) {
|
||||
theIds.forEach(id -> Validate.isTrue(id.hasIdPart()));
|
||||
for (IIdType id : theIds) {
|
||||
if (!id.hasIdPart()) {
|
||||
throw new InvalidRequestException("Parameter value missing in request");
|
||||
}
|
||||
}
|
||||
|
||||
if (theIds.isEmpty()) {
|
||||
return Collections.emptyList();
|
||||
|
@ -303,7 +308,7 @@ public class IdHelperService {
|
|||
if (forcedId.isPresent()) {
|
||||
retVal.setValue(theResourceType + '/' + forcedId.get());
|
||||
} else {
|
||||
retVal.setValue(theResourceType + '/' + theId.toString());
|
||||
retVal.setValue(theResourceType + '/' + theId);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
|
|
|
@ -45,9 +45,12 @@ public class MdmLinkExpandSvc {
|
|||
@Autowired
|
||||
private IdHelperService myIdHelperService;
|
||||
|
||||
public MdmLinkExpandSvc() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a source resource, perform MDM expansion and return all the resource IDs of all resources that are
|
||||
* MDM-Matched to this resource.
|
||||
* Given a source resource, perform MDM expansion and return all the resource IDs of all resources that are
|
||||
* MDM-Matched to this resource.
|
||||
*
|
||||
* @param theResource The resource to MDM-Expand
|
||||
* @return A set of strings representing the FHIR IDs of the expanded resources.
|
||||
|
|
|
@ -559,11 +559,6 @@ class PredicateBuilderReference extends BasePredicateBuilder {
|
|||
myPredicateBuilder.addPredicateResourceId(theAndOrParams, theResourceName, theRequestPartitionId);
|
||||
break;
|
||||
|
||||
case IAnyResource.SP_RES_LANGUAGE:
|
||||
addPredicateLanguage(theAndOrParams,
|
||||
null);
|
||||
break;
|
||||
|
||||
case Constants.PARAM_HAS:
|
||||
addPredicateHas(theResourceName, theAndOrParams, theRequest, theRequestPartitionId);
|
||||
break;
|
||||
|
@ -733,9 +728,6 @@ class PredicateBuilderReference extends BasePredicateBuilder {
|
|||
null,
|
||||
theFilter.getValue());
|
||||
return myPredicateBuilder.addPredicateResourceId(Collections.singletonList(Collections.singletonList(param)), myResourceName, theFilter.getOperation(), theRequestPartitionId);
|
||||
} else if (theFilter.getParamPath().getName().equals(IAnyResource.SP_RES_LANGUAGE)) {
|
||||
return addPredicateLanguage(Collections.singletonList(Collections.singletonList(new StringParam(theFilter.getValue()))),
|
||||
theFilter.getOperation());
|
||||
}
|
||||
|
||||
RuntimeSearchParam searchParam = mySearchParamRegistry.getActiveSearchParam(theResourceName, theFilter.getParamPath().getName());
|
||||
|
@ -828,45 +820,6 @@ class PredicateBuilderReference extends BasePredicateBuilder {
|
|||
return qp;
|
||||
}
|
||||
|
||||
private Predicate addPredicateLanguage(List<List<IQueryParameterType>> theList,
|
||||
SearchFilterParser.CompareOperation operation) {
|
||||
for (List<? extends IQueryParameterType> nextList : theList) {
|
||||
|
||||
Set<String> values = new HashSet<>();
|
||||
for (IQueryParameterType next : nextList) {
|
||||
if (next instanceof StringParam) {
|
||||
String nextValue = ((StringParam) next).getValue();
|
||||
if (isBlank(nextValue)) {
|
||||
continue;
|
||||
}
|
||||
values.add(nextValue);
|
||||
} else {
|
||||
throw new InternalErrorException("Language parameter must be of type " + StringParam.class.getCanonicalName() + " - Got " + next.getClass().getCanonicalName());
|
||||
}
|
||||
}
|
||||
|
||||
if (values.isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
Predicate predicate;
|
||||
if ((operation == null) ||
|
||||
(operation == SearchFilterParser.CompareOperation.eq)) {
|
||||
predicate = myQueryStack.get("myLanguage").as(String.class).in(values);
|
||||
} else if (operation == SearchFilterParser.CompareOperation.ne) {
|
||||
predicate = myQueryStack.get("myLanguage").as(String.class).in(values).not();
|
||||
} else {
|
||||
throw new InvalidRequestException("Unsupported operator specified in language query, only \"eq\" and \"ne\" are supported");
|
||||
}
|
||||
myQueryStack.addPredicate(predicate);
|
||||
if (operation != null) {
|
||||
return predicate;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private void addPredicateSource(List<List<IQueryParameterType>> theAndOrParams, RequestDetails theRequest) {
|
||||
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
|
||||
addPredicateSource(nextAnd, SearchFilterParser.CompareOperation.eq, theRequest);
|
||||
|
|
|
@ -261,6 +261,9 @@ class PredicateBuilderToken extends BasePredicateBuilder implements IPredicateBu
|
|||
if (theSearchParam != null) {
|
||||
Set<String> valueSetUris = Sets.newHashSet();
|
||||
for (String nextPath : theSearchParam.getPathsSplit()) {
|
||||
if (!nextPath.startsWith(myResourceType + ".")) {
|
||||
continue;
|
||||
}
|
||||
BaseRuntimeChildDefinition def = myContext.newTerser().getDefinition(myResourceType, nextPath);
|
||||
if (def instanceof BaseRuntimeDeclaredChildDefinition) {
|
||||
String valueSet = ((BaseRuntimeDeclaredChildDefinition) def).getBindingValueSet();
|
||||
|
|
|
@ -92,8 +92,12 @@ public class FhirResourceDaoSearchParameterR4 extends BaseHapiFhirResourceDao<Se
|
|||
for (IPrimitiveType<?> nextBaseType : theResource.getBase()) {
|
||||
String nextBase = nextBaseType.getValueAsString();
|
||||
RuntimeSearchParam existingSearchParam = theSearchParamRegistry.getActiveSearchParam(nextBase, theResource.getCode());
|
||||
if (existingSearchParam != null && existingSearchParam.getId() == null) {
|
||||
throw new UnprocessableEntityException("Can not override built-in search parameter " + nextBase + ":" + theResource.getCode() + " because overriding is disabled on this server");
|
||||
if (existingSearchParam != null) {
|
||||
boolean isBuiltIn = existingSearchParam.getId() == null;
|
||||
isBuiltIn |= existingSearchParam.getUri().startsWith("http://hl7.org/fhir/SearchParameter/");
|
||||
if (isBuiltIn) {
|
||||
throw new UnprocessableEntityException("Can not override built-in search parameter " + nextBase + ":" + theResource.getCode() + " because overriding is disabled on this server");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
|||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.api.model.ResourceVersionConflictResolutionStrategy;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||
import ca.uhn.fhir.jpa.dao.DaoFailureUtil;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
|
@ -93,10 +94,9 @@ public class HapiTransactionService {
|
|||
* known to the system already, they'll both try to create a row in HFJ_TAG_DEF,
|
||||
* which is the tag definition table. In that case, a constraint error will be
|
||||
* thrown by one of the client threads, so we auto-retry in order to avoid
|
||||
* annopying spurious failures for the client.
|
||||
* annoying spurious failures for the client.
|
||||
*/
|
||||
if (e.getMessage().contains("HFJ_TAG_DEF") || e.getMessage().contains("hfj_tag_def") ||
|
||||
e.getMessage().contains("HFJ_RES_TAG") || e.getMessage().contains("hfj_res_tag")) {
|
||||
if (DaoFailureUtil.isTagStorageFailure(e)) {
|
||||
maxRetries = 3;
|
||||
}
|
||||
|
||||
|
|
|
@ -21,8 +21,9 @@ package ca.uhn.fhir.jpa.delete.job;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlProcessorJobConfig;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
|
||||
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
|
||||
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
|
||||
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import org.springframework.batch.core.Job;
|
||||
|
@ -45,7 +46,7 @@ import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME;
|
|||
* Delete Expunge job.
|
||||
*/
|
||||
@Configuration
|
||||
public class DeleteExpungeJobConfig extends MultiUrlProcessorJobConfig {
|
||||
public class DeleteExpungeJobConfig {
|
||||
public static final String DELETE_EXPUNGE_URL_LIST_STEP_NAME = "delete-expunge-url-list-step";
|
||||
|
||||
@Autowired
|
||||
|
@ -53,11 +54,23 @@ public class DeleteExpungeJobConfig extends MultiUrlProcessorJobConfig {
|
|||
@Autowired
|
||||
private JobBuilderFactory myJobBuilderFactory;
|
||||
|
||||
@Autowired
|
||||
private MultiUrlJobParameterValidator myMultiUrlProcessorParameterValidator;
|
||||
|
||||
@Autowired
|
||||
private PidReaderCounterListener myPidCountRecorderListener;
|
||||
|
||||
@Autowired
|
||||
private ReverseCronologicalBatchResourcePidReader myReverseCronologicalBatchResourcePidReader;
|
||||
|
||||
@Autowired
|
||||
private SqlExecutorWriter mySqlExecutorWriter;
|
||||
|
||||
@Bean(name = DELETE_EXPUNGE_JOB_NAME)
|
||||
@Lazy
|
||||
public Job deleteExpungeJob(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) {
|
||||
public Job deleteExpungeJob() {
|
||||
return myJobBuilderFactory.get(DELETE_EXPUNGE_JOB_NAME)
|
||||
.validator(multiUrlProcessorParameterValidator(theMatchUrlService, theDaoRegistry))
|
||||
.validator(myMultiUrlProcessorParameterValidator)
|
||||
.start(deleteExpungeUrlListStep())
|
||||
.build();
|
||||
}
|
||||
|
@ -66,10 +79,10 @@ public class DeleteExpungeJobConfig extends MultiUrlProcessorJobConfig {
|
|||
public Step deleteExpungeUrlListStep() {
|
||||
return myStepBuilderFactory.get(DELETE_EXPUNGE_URL_LIST_STEP_NAME)
|
||||
.<List<Long>, List<String>>chunk(1)
|
||||
.reader(reverseCronologicalBatchResourcePidReader())
|
||||
.reader(myReverseCronologicalBatchResourcePidReader)
|
||||
.processor(deleteExpungeProcessor())
|
||||
.writer(sqlExecutorWriter())
|
||||
.listener(pidCountRecorderListener())
|
||||
.writer(mySqlExecutorWriter)
|
||||
.listener(myPidCountRecorderListener)
|
||||
.listener(deleteExpungePromotionListener())
|
||||
.build();
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.entity;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.mdm.api.IMdmLink;
|
||||
import ca.uhn.fhir.mdm.api.MdmLinkSourceEnum;
|
||||
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
|
@ -47,7 +48,7 @@ import java.util.Date;
|
|||
@Table(name = "MPI_LINK", uniqueConstraints = {
|
||||
@UniqueConstraint(name = "IDX_EMPI_PERSON_TGT", columnNames = {"PERSON_PID", "TARGET_PID"}),
|
||||
})
|
||||
public class MdmLink {
|
||||
public class MdmLink implements IMdmLink {
|
||||
public static final int VERSION_LENGTH = 16;
|
||||
private static final int MATCH_RESULT_LENGTH = 16;
|
||||
private static final int LINK_SOURCE_LENGTH = 16;
|
||||
|
|
|
@ -51,6 +51,8 @@ public class ReindexEverythingJobConfig {
|
|||
private JobBuilderFactory myJobBuilderFactory;
|
||||
@Autowired
|
||||
private ReindexWriter myReindexWriter;
|
||||
@Autowired
|
||||
private PidReaderCounterListener myPidCountRecorderListener;
|
||||
|
||||
@Bean(name = REINDEX_EVERYTHING_JOB_NAME)
|
||||
@Lazy
|
||||
|
@ -66,7 +68,7 @@ public class ReindexEverythingJobConfig {
|
|||
.<List<Long>, List<Long>>chunk(1)
|
||||
.reader(cronologicalBatchAllResourcePidReader())
|
||||
.writer(myReindexWriter)
|
||||
.listener(reindexEverythingPidCountRecorderListener())
|
||||
.listener(myPidCountRecorderListener)
|
||||
.listener(reindexEverythingPromotionListener())
|
||||
.build();
|
||||
}
|
||||
|
@ -77,12 +79,6 @@ public class ReindexEverythingJobConfig {
|
|||
return new CronologicalBatchAllResourcePidReader();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public PidReaderCounterListener reindexEverythingPidCountRecorderListener() {
|
||||
return new PidReaderCounterListener();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ExecutionContextPromotionListener reindexEverythingPromotionListener() {
|
||||
ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener();
|
||||
|
|
|
@ -21,15 +21,17 @@ package ca.uhn.fhir.jpa.reindex.job;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlProcessorJobConfig;
|
||||
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
|
||||
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
|
||||
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
|
||||
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import org.springframework.batch.core.Job;
|
||||
import org.springframework.batch.core.Step;
|
||||
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||
import org.springframework.batch.core.listener.ExecutionContextPromotionListener;
|
||||
import org.springframework.batch.item.ItemReader;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
@ -44,7 +46,7 @@ import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.REINDEX_JOB_NAME;
|
|||
* Reindex job.
|
||||
*/
|
||||
@Configuration
|
||||
public class ReindexJobConfig extends MultiUrlProcessorJobConfig {
|
||||
public class ReindexJobConfig {
|
||||
public static final String REINDEX_URL_LIST_STEP_NAME = "reindex-url-list-step";
|
||||
|
||||
@Autowired
|
||||
|
@ -54,11 +56,20 @@ public class ReindexJobConfig extends MultiUrlProcessorJobConfig {
|
|||
@Autowired
|
||||
private ReindexWriter myReindexWriter;
|
||||
|
||||
@Autowired
|
||||
private MultiUrlJobParameterValidator myMultiUrlProcessorParameterValidator;
|
||||
|
||||
@Autowired
|
||||
private PidReaderCounterListener myPidCountRecorderListener;
|
||||
|
||||
@Autowired
|
||||
private ReverseCronologicalBatchResourcePidReader myReverseCronologicalBatchResourcePidReader;
|
||||
|
||||
@Bean(name = REINDEX_JOB_NAME)
|
||||
@Lazy
|
||||
public Job reindexJob(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) {
|
||||
public Job reindexJob() {
|
||||
return myJobBuilderFactory.get(REINDEX_JOB_NAME)
|
||||
.validator(multiUrlProcessorParameterValidator(theMatchUrlService, theDaoRegistry))
|
||||
.validator(myMultiUrlProcessorParameterValidator)
|
||||
.start(reindexUrlListStep())
|
||||
.build();
|
||||
}
|
||||
|
@ -67,9 +78,9 @@ public class ReindexJobConfig extends MultiUrlProcessorJobConfig {
|
|||
public Step reindexUrlListStep() {
|
||||
return myStepBuilderFactory.get(REINDEX_URL_LIST_STEP_NAME)
|
||||
.<List<Long>, List<Long>>chunk(1)
|
||||
.reader(reverseCronologicalBatchResourcePidReader())
|
||||
.reader(myReverseCronologicalBatchResourcePidReader)
|
||||
.writer(myReindexWriter)
|
||||
.listener(pidCountRecorderListener())
|
||||
.listener(myPidCountRecorderListener)
|
||||
.listener(reindexPromotionListener())
|
||||
.build();
|
||||
}
|
||||
|
|
|
@ -82,7 +82,6 @@ import org.springframework.data.domain.Sort;
|
|||
import org.springframework.orm.jpa.JpaDialect;
|
||||
import org.springframework.orm.jpa.JpaTransactionManager;
|
||||
import org.springframework.orm.jpa.vendor.HibernateJpaDialect;
|
||||
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
|
@ -111,7 +110,6 @@ import java.util.UUID;
|
|||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
|
||||
|
@ -123,7 +121,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
public static final Integer INTEGER_0 = 0;
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SearchCoordinatorSvcImpl.class);
|
||||
private final ConcurrentHashMap<String, SearchTask> myIdToSearchTask = new ConcurrentHashMap<>();
|
||||
private final ExecutorService myExecutor;
|
||||
@Autowired
|
||||
private FhirContext myContext;
|
||||
@Autowired
|
||||
|
@ -162,8 +159,13 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
* Constructor
|
||||
*/
|
||||
@Autowired
|
||||
public SearchCoordinatorSvcImpl(ThreadPoolTaskExecutor searchCoordinatorThreadFactory) {
|
||||
myExecutor = searchCoordinatorThreadFactory.getThreadPoolExecutor();
|
||||
public SearchCoordinatorSvcImpl() {
|
||||
super();
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
Set<String> getActiveSearchIds() {
|
||||
return myIdToSearchTask.keySet();
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
|
@ -274,7 +276,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(theRequestDetails, resourceType, params, null);
|
||||
SearchContinuationTask task = new SearchContinuationTask(search, resourceDao, params, resourceType, theRequestDetails, requestPartitionId);
|
||||
myIdToSearchTask.put(search.getUuid(), task);
|
||||
myExecutor.submit(task);
|
||||
task.call();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -406,7 +408,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
|
||||
SearchTask task = new SearchTask(theSearch, theCallingDao, theParams, theResourceType, theRequestDetails, theRequestPartitionId);
|
||||
myIdToSearchTask.put(theSearch.getUuid(), task);
|
||||
myExecutor.submit(task);
|
||||
task.call();
|
||||
|
||||
PersistedJpaSearchFirstPageBundleProvider retVal = myPersistedJpaBundleProviderFactory.newInstanceFirstPage(theRequestDetails, theSearch, task, theSb);
|
||||
|
||||
|
@ -1087,7 +1089,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
ourLog.trace("Performing count");
|
||||
ISearchBuilder sb = newSearchBuilder();
|
||||
Iterator<Long> countIterator = sb.createCountQuery(myParams, mySearch.getUuid(), myRequest, myRequestPartitionId);
|
||||
Long count = countIterator.hasNext() ? countIterator.next() : 0;
|
||||
Long count = countIterator.hasNext() ? countIterator.next() : 0L;
|
||||
ourLog.trace("Got count {}", count);
|
||||
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myManagedTxManager);
|
||||
|
|
|
@ -434,9 +434,6 @@ public class QueryStack {
|
|||
param.setValueAsQueryToken(null, null, null, theFilter.getValue());
|
||||
return theQueryStack3.createPredicateResourceId(null, Collections.singletonList(Collections.singletonList(param)), theResourceName, theFilter.getOperation(), theRequestPartitionId);
|
||||
}
|
||||
case IAnyResource.SP_RES_LANGUAGE: {
|
||||
return theQueryStack3.createPredicateLanguage(Collections.singletonList(Collections.singletonList(new StringParam(theFilter.getValue()))), theFilter.getOperation());
|
||||
}
|
||||
case Constants.PARAM_SOURCE: {
|
||||
TokenParam param = new TokenParam();
|
||||
param.setValueAsQueryToken(null, null, null, theFilter.getValue());
|
||||
|
@ -579,44 +576,6 @@ public class QueryStack {
|
|||
return toAndPredicate(andPredicates);
|
||||
}
|
||||
|
||||
public Condition createPredicateLanguage(List<List<IQueryParameterType>> theList, Object theOperation) {
|
||||
|
||||
ResourceTablePredicateBuilder rootTable = mySqlBuilder.getOrCreateResourceTablePredicateBuilder();
|
||||
|
||||
List<Condition> predicates = new ArrayList<>();
|
||||
for (List<? extends IQueryParameterType> nextList : theList) {
|
||||
|
||||
Set<String> values = new HashSet<>();
|
||||
for (IQueryParameterType next : nextList) {
|
||||
if (next instanceof StringParam) {
|
||||
String nextValue = ((StringParam) next).getValue();
|
||||
if (isBlank(nextValue)) {
|
||||
continue;
|
||||
}
|
||||
values.add(nextValue);
|
||||
} else {
|
||||
throw new InternalErrorException("Language parameter must be of type " + StringParam.class.getCanonicalName() + " - Got " + next.getClass().getCanonicalName());
|
||||
}
|
||||
}
|
||||
|
||||
if (values.isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if ((theOperation == null) ||
|
||||
(theOperation == SearchFilterParser.CompareOperation.eq)) {
|
||||
predicates.add(rootTable.createLanguagePredicate(values, false));
|
||||
} else if (theOperation == SearchFilterParser.CompareOperation.ne) {
|
||||
predicates.add(rootTable.createLanguagePredicate(values, true));
|
||||
} else {
|
||||
throw new InvalidRequestException("Unsupported operator specified in language query, only \"eq\" and \"ne\" are supported");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return toAndPredicate(predicates);
|
||||
}
|
||||
|
||||
public Condition createPredicateNumber(@Nullable DbColumn theSourceJoinColumn, String theResourceName,
|
||||
String theSpnamePrefix, RuntimeSearchParam theSearchParam, List<? extends IQueryParameterType> theList,
|
||||
SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) {
|
||||
|
@ -729,10 +688,10 @@ public class QueryStack {
|
|||
return predicateBuilder.createPredicate(theRequest, theResourceName, theParamName, theList, theOperation, theRequestPartitionId);
|
||||
}
|
||||
|
||||
private Condition createPredicateReferenceForContainedResource(@Nullable DbColumn theSourceJoinColumn,
|
||||
String theResourceName, String theParamName, RuntimeSearchParam theSearchParam,
|
||||
List<? extends IQueryParameterType> theList, SearchFilterParser.CompareOperation theOperation,
|
||||
RequestDetails theRequest, RequestPartitionId theRequestPartitionId) {
|
||||
public Condition createPredicateReferenceForContainedResource(@Nullable DbColumn theSourceJoinColumn,
|
||||
String theResourceName, String theParamName, RuntimeSearchParam theSearchParam,
|
||||
List<? extends IQueryParameterType> theList, SearchFilterParser.CompareOperation theOperation,
|
||||
RequestDetails theRequest, RequestPartitionId theRequestPartitionId) {
|
||||
|
||||
String spnamePrefix = theParamName;
|
||||
|
||||
|
@ -794,31 +753,31 @@ public class QueryStack {
|
|||
|
||||
switch (targetParamDefinition.getParamType()) {
|
||||
case DATE:
|
||||
containedCondition = createPredicateDate(null, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
containedCondition = createPredicateDate(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
orValues, theOperation, theRequestPartitionId);
|
||||
break;
|
||||
case NUMBER:
|
||||
containedCondition = createPredicateNumber(null, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
containedCondition = createPredicateNumber(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
orValues, theOperation, theRequestPartitionId);
|
||||
break;
|
||||
case QUANTITY:
|
||||
containedCondition = createPredicateQuantity(null, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
containedCondition = createPredicateQuantity(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
orValues, theOperation, theRequestPartitionId);
|
||||
break;
|
||||
case STRING:
|
||||
containedCondition = createPredicateString(null, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
containedCondition = createPredicateString(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
orValues, theOperation, theRequestPartitionId);
|
||||
break;
|
||||
case TOKEN:
|
||||
containedCondition = createPredicateToken(null, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
containedCondition = createPredicateToken(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
orValues, theOperation, theRequestPartitionId);
|
||||
break;
|
||||
case COMPOSITE:
|
||||
containedCondition = createPredicateComposite(null, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
containedCondition = createPredicateComposite(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
orValues, theRequestPartitionId);
|
||||
break;
|
||||
case URI:
|
||||
containedCondition = createPredicateUri(null, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
containedCondition = createPredicateUri(theSourceJoinColumn, theResourceName, spnamePrefix, targetParamDefinition,
|
||||
orValues, theOperation, theRequest, theRequestPartitionId);
|
||||
break;
|
||||
case HAS:
|
||||
|
@ -988,9 +947,12 @@ public class QueryStack {
|
|||
String theSpnamePrefix, RuntimeSearchParam theSearchParam, List<? extends IQueryParameterType> theList,
|
||||
SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) {
|
||||
|
||||
List<IQueryParameterType> tokens = new ArrayList<>();
|
||||
List<IQueryParameterType> tokens = new ArrayList<>();
|
||||
|
||||
boolean paramInverted = false;
|
||||
TokenParamModifier modifier = null;
|
||||
|
||||
for (IQueryParameterType nextOr : theList) {
|
||||
|
||||
if (nextOr instanceof TokenParam) {
|
||||
if (!((TokenParam) nextOr).isEmpty()) {
|
||||
TokenParam id = (TokenParam) nextOr;
|
||||
|
@ -1009,17 +971,20 @@ public class QueryStack {
|
|||
}
|
||||
|
||||
return createPredicateString(theSourceJoinColumn, theResourceName, theSpnamePrefix, theSearchParam, theList, null, theRequestPartitionId);
|
||||
}
|
||||
|
||||
modifier = id.getModifier();
|
||||
// for :not modifier, create a token and remove the :not modifier
|
||||
if (modifier != null && modifier == TokenParamModifier.NOT) {
|
||||
tokens.add(new TokenParam(((TokenParam) nextOr).getSystem(), ((TokenParam) nextOr).getValue()));
|
||||
paramInverted = true;
|
||||
} else {
|
||||
tokens.add(nextOr);
|
||||
}
|
||||
|
||||
tokens.add(nextOr);
|
||||
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
tokens.add(nextOr);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (tokens.isEmpty()) {
|
||||
|
@ -1027,14 +992,37 @@ public class QueryStack {
|
|||
}
|
||||
|
||||
String paramName = getParamNameWithPrefix(theSpnamePrefix, theSearchParam.getName());
|
||||
Condition predicate;
|
||||
BaseJoiningPredicateBuilder join;
|
||||
|
||||
if (paramInverted) {
|
||||
SearchQueryBuilder sqlBuilder = mySqlBuilder.newChildSqlBuilder();
|
||||
TokenPredicateBuilder tokenSelector = sqlBuilder.addTokenPredicateBuilder(null);
|
||||
sqlBuilder.addPredicate(tokenSelector.createPredicateToken(tokens, theResourceName, theSpnamePrefix, theSearchParam, theRequestPartitionId));
|
||||
SelectQuery sql = sqlBuilder.getSelect();
|
||||
Expression subSelect = new Subquery(sql);
|
||||
|
||||
join = mySqlBuilder.getOrCreateFirstPredicateBuilder();
|
||||
|
||||
if (theSourceJoinColumn == null) {
|
||||
predicate = new InCondition(join.getResourceIdColumn(), subSelect).setNegate(true);
|
||||
} else {
|
||||
//-- for the resource link, need join with target_resource_id
|
||||
predicate = new InCondition(theSourceJoinColumn, subSelect).setNegate(true);
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
TokenPredicateBuilder tokenJoin = createOrReusePredicateBuilder(PredicateBuilderTypeEnum.TOKEN, theSourceJoinColumn, paramName, () -> mySqlBuilder.addTokenPredicateBuilder(theSourceJoinColumn)).getResult();
|
||||
|
||||
TokenPredicateBuilder join = createOrReusePredicateBuilder(PredicateBuilderTypeEnum.TOKEN, theSourceJoinColumn, paramName, () -> mySqlBuilder.addTokenPredicateBuilder(theSourceJoinColumn)).getResult();
|
||||
if (theList.get(0).getMissing() != null) {
|
||||
return tokenJoin.createPredicateParamMissingForNonReference(theResourceName, paramName, theList.get(0).getMissing(), theRequestPartitionId);
|
||||
}
|
||||
|
||||
if (theList.get(0).getMissing() != null) {
|
||||
return join.createPredicateParamMissingForNonReference(theResourceName, paramName, theList.get(0).getMissing(), theRequestPartitionId);
|
||||
}
|
||||
|
||||
Condition predicate = join.createPredicateToken(tokens, theResourceName, theSpnamePrefix, theSearchParam, theOperation, theRequestPartitionId);
|
||||
predicate = tokenJoin.createPredicateToken(tokens, theResourceName, theSpnamePrefix, theSearchParam, theOperation, theRequestPartitionId);
|
||||
join = tokenJoin;
|
||||
}
|
||||
|
||||
return join.combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate);
|
||||
}
|
||||
|
||||
|
@ -1070,9 +1058,6 @@ public class QueryStack {
|
|||
case IAnyResource.SP_RES_ID:
|
||||
return createPredicateResourceId(theSourceJoinColumn, theAndOrParams, theResourceName, null, theRequestPartitionId);
|
||||
|
||||
case IAnyResource.SP_RES_LANGUAGE:
|
||||
return createPredicateLanguage(theAndOrParams, null);
|
||||
|
||||
case Constants.PARAM_HAS:
|
||||
return createPredicateHas(theSourceJoinColumn, theResourceName, theAndOrParams, theRequest, theRequestPartitionId);
|
||||
|
||||
|
@ -1133,10 +1118,24 @@ public class QueryStack {
|
|||
break;
|
||||
case REFERENCE:
|
||||
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
|
||||
if (theSearchContainedMode.equals(SearchContainedModeEnum.TRUE))
|
||||
andPredicates.add(createPredicateReferenceForContainedResource(theSourceJoinColumn, theResourceName, theParamName, nextParamDef, nextAnd, null, theRequest, theRequestPartitionId));
|
||||
else
|
||||
if (theSearchContainedMode.equals(SearchContainedModeEnum.TRUE)) {
|
||||
// TODO: The _contained parameter is not intended to control search chain interpretation like this.
|
||||
// See SMILE-2898 for details.
|
||||
// For now, leave the incorrect implementation alone, just in case someone is relying on it,
|
||||
// until the complete fix is available.
|
||||
andPredicates.add(createPredicateReferenceForContainedResource(null, theResourceName, theParamName, nextParamDef, nextAnd, null, theRequest, theRequestPartitionId));
|
||||
} else if (isEligibleForContainedResourceSearch(nextAnd)) {
|
||||
// TODO for now, restrict contained reference traversal to the last reference in the chain
|
||||
// We don't seem to be indexing the outbound references of a contained resource, so we can't
|
||||
// include them in search chains.
|
||||
// It would be nice to eventually relax this constraint, but no client seems to be asking for it.
|
||||
andPredicates.add(toOrPredicate(
|
||||
createPredicateReference(theSourceJoinColumn, theResourceName, theParamName, nextAnd, null, theRequest, theRequestPartitionId),
|
||||
createPredicateReferenceForContainedResource(theSourceJoinColumn, theResourceName, theParamName, nextParamDef, nextAnd, null, theRequest, theRequestPartitionId)
|
||||
));
|
||||
} else {
|
||||
andPredicates.add(createPredicateReference(theSourceJoinColumn, theResourceName, theParamName, nextAnd, null, theRequest, theRequestPartitionId));
|
||||
}
|
||||
}
|
||||
break;
|
||||
case STRING:
|
||||
|
@ -1214,6 +1213,14 @@ public class QueryStack {
|
|||
return toAndPredicate(andPredicates);
|
||||
}
|
||||
|
||||
private boolean isEligibleForContainedResourceSearch(List<? extends IQueryParameterType> nextAnd) {
|
||||
return myModelConfig.isIndexOnContainedResources() &&
|
||||
nextAnd.stream()
|
||||
.filter(t -> t instanceof ReferenceParam)
|
||||
.map(t -> (ReferenceParam) t)
|
||||
.noneMatch(t -> t.getChain().contains("."));
|
||||
}
|
||||
|
||||
public void addPredicateCompositeUnique(String theIndexString, RequestPartitionId theRequestPartitionId) {
|
||||
ComboUniqueSearchParameterPredicateBuilder predicateBuilder = mySqlBuilder.addComboUniquePredicateBuilder();
|
||||
Condition predicate = predicateBuilder.createPredicateIndexString(theRequestPartitionId, theIndexString);
|
||||
|
|
|
@ -221,11 +221,15 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
|
||||
SearchContainedModeEnum searchContainedMode = theParams.getSearchContainedMode();
|
||||
|
||||
// Handle _id last, since it can typically be tacked onto a different parameter
|
||||
List<String> paramNames = myParams.keySet().stream().filter(t -> !t.equals(IAnyResource.SP_RES_ID)).collect(Collectors.toList());
|
||||
// Handle _id and _tag last, since they can typically be tacked onto a different parameter
|
||||
List<String> paramNames = myParams.keySet().stream().filter(t -> !t.equals(IAnyResource.SP_RES_ID))
|
||||
.filter(t -> !t.equals(Constants.PARAM_TAG)).collect(Collectors.toList());
|
||||
if (myParams.containsKey(IAnyResource.SP_RES_ID)) {
|
||||
paramNames.add(IAnyResource.SP_RES_ID);
|
||||
}
|
||||
if (myParams.containsKey(Constants.PARAM_TAG)) {
|
||||
paramNames.add(Constants.PARAM_TAG);
|
||||
}
|
||||
|
||||
// Handle each parameter
|
||||
for (String nextParamName : paramNames) {
|
||||
|
@ -373,7 +377,7 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
SearchQueryBuilder sqlBuilder = new SearchQueryBuilder(myContext, myDaoConfig.getModelConfig(), myPartitionSettings, myRequestPartitionId, sqlBuilderResourceName, mySqlBuilderFactory, myDialectProvider, theCount);
|
||||
QueryStack queryStack3 = new QueryStack(theParams, myDaoConfig, myDaoConfig.getModelConfig(), myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings);
|
||||
|
||||
if (theParams.keySet().size() > 1 || theParams.getSort() != null || theParams.keySet().contains(Constants.PARAM_HAS)) {
|
||||
if (theParams.keySet().size() > 1 || theParams.getSort() != null || theParams.keySet().contains(Constants.PARAM_HAS) || isPotentiallyContainedReferenceParameterExistsAtRoot(theParams)) {
|
||||
List<RuntimeSearchParam> activeComboParams = mySearchParamRegistry.getActiveComboSearchParams(myResourceName, theParams.keySet());
|
||||
if (activeComboParams.isEmpty()) {
|
||||
sqlBuilder.setNeedResourceTableRoot(true);
|
||||
|
@ -483,6 +487,13 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
return Optional.of(executor);
|
||||
}
|
||||
|
||||
private boolean isPotentiallyContainedReferenceParameterExistsAtRoot(SearchParameterMap theParams) {
|
||||
return myModelConfig.isIndexOnContainedResources() && theParams.values().stream()
|
||||
.flatMap(Collection::stream)
|
||||
.flatMap(Collection::stream)
|
||||
.anyMatch(t -> t instanceof ReferenceParam);
|
||||
}
|
||||
|
||||
private List<Long> normalizeIdListForLastNInClause(List<Long> lastnResourceIds) {
|
||||
/*
|
||||
The following is a workaround to a known issue involving Hibernate. If queries are used with "in" clauses with large and varying
|
||||
|
|
|
@ -38,20 +38,18 @@ import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
|||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderReference;
|
||||
import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser;
|
||||
import ca.uhn.fhir.jpa.search.builder.QueryStack;
|
||||
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
|
||||
import ca.uhn.fhir.jpa.search.builder.QueryStack;
|
||||
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceMetaParams;
|
||||
import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil;
|
||||
import ca.uhn.fhir.rest.api.SearchContainedModeEnum;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.parser.DataFormatException;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.SearchContainedModeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.param.CompositeParam;
|
||||
|
@ -62,9 +60,12 @@ import ca.uhn.fhir.rest.param.ReferenceParam;
|
|||
import ca.uhn.fhir.rest.param.SpecialParam;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParamModifier;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.healthmarketscience.sqlbuilder.BinaryCondition;
|
||||
import com.healthmarketscience.sqlbuilder.ComboCondition;
|
||||
|
@ -338,16 +339,28 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
|
|||
boolean foundChainMatch = false;
|
||||
List<String> candidateTargetTypes = new ArrayList<>();
|
||||
List<Condition> orPredicates = new ArrayList<>();
|
||||
boolean paramInverted = false;
|
||||
QueryStack childQueryFactory = myQueryStack.newChildQueryFactoryWithFullBuilderReuse();
|
||||
for (String nextType : resourceTypes) {
|
||||
String chain = theReferenceParam.getChain();
|
||||
|
||||
String remainingChain = null;
|
||||
int chainDotIndex = chain.indexOf('.');
|
||||
if (chainDotIndex != -1) {
|
||||
remainingChain = chain.substring(chainDotIndex + 1);
|
||||
chain = chain.substring(0, chainDotIndex);
|
||||
}
|
||||
String chain = theReferenceParam.getChain();
|
||||
|
||||
String remainingChain = null;
|
||||
int chainDotIndex = chain.indexOf('.');
|
||||
if (chainDotIndex != -1) {
|
||||
remainingChain = chain.substring(chainDotIndex + 1);
|
||||
chain = chain.substring(0, chainDotIndex);
|
||||
}
|
||||
|
||||
int qualifierIndex = chain.indexOf(':');
|
||||
String qualifier = null;
|
||||
if (qualifierIndex != -1) {
|
||||
qualifier = chain.substring(qualifierIndex);
|
||||
chain = chain.substring(0, qualifierIndex);
|
||||
}
|
||||
|
||||
boolean isMeta = ResourceMetaParams.RESOURCE_META_PARAMS.containsKey(chain);
|
||||
|
||||
for (String nextType : resourceTypes) {
|
||||
|
||||
RuntimeResourceDefinition typeDef = getFhirContext().getResourceDefinition(nextType);
|
||||
String subResourceName = typeDef.getName();
|
||||
|
@ -358,14 +371,6 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
|
|||
continue;
|
||||
}
|
||||
|
||||
int qualifierIndex = chain.indexOf(':');
|
||||
String qualifier = null;
|
||||
if (qualifierIndex != -1) {
|
||||
qualifier = chain.substring(qualifierIndex);
|
||||
chain = chain.substring(0, qualifierIndex);
|
||||
}
|
||||
|
||||
boolean isMeta = ResourceMetaParams.RESOURCE_META_PARAMS.containsKey(chain);
|
||||
RuntimeSearchParam param = null;
|
||||
if (!isMeta) {
|
||||
param = mySearchParamRegistry.getActiveSearchParam(nextType, chain);
|
||||
|
@ -383,6 +388,13 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
|
|||
if (chainValue == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// For the token param, if it's a :not modifier, need switch OR to AND
|
||||
if (!paramInverted && chainValue instanceof TokenParam) {
|
||||
if (((TokenParam) chainValue).getModifier() == TokenParamModifier.NOT) {
|
||||
paramInverted = true;
|
||||
}
|
||||
}
|
||||
foundChainMatch = true;
|
||||
orValues.add(chainValue);
|
||||
}
|
||||
|
@ -399,7 +411,6 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
|
|||
andPredicates.add(childQueryFactory.searchForIdsWithAndOr(myColumnTargetResourceId, subResourceName, chain, chainParamValues, theRequest, theRequestPartitionId, SearchContainedModeEnum.FALSE));
|
||||
|
||||
orPredicates.add(toAndPredicate(andPredicates));
|
||||
|
||||
}
|
||||
|
||||
if (candidateTargetTypes.isEmpty()) {
|
||||
|
@ -410,10 +421,17 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
|
|||
warnAboutPerformanceOnUnqualifiedResources(theParamName, theRequest, candidateTargetTypes);
|
||||
}
|
||||
|
||||
Condition multiTypeOrPredicate = toOrPredicate(orPredicates);
|
||||
// If :not modifier for a token, switch OR with AND in the multi-type case
|
||||
Condition multiTypePredicate;
|
||||
if (paramInverted) {
|
||||
multiTypePredicate = toAndPredicate(orPredicates);
|
||||
} else {
|
||||
multiTypePredicate = toOrPredicate(orPredicates);
|
||||
}
|
||||
|
||||
List<String> pathsToMatch = createResourceLinkPaths(theResourceName, theParamName);
|
||||
Condition pathPredicate = createPredicateSourcePaths(pathsToMatch);
|
||||
return toAndPredicate(pathPredicate, multiTypeOrPredicate);
|
||||
return toAndPredicate(pathPredicate, multiTypePredicate);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue