Merge branch 'master' into 2732-bump-conceptmap-group-element-display-storage-size
This commit is contained in:
commit
164e684d7c
|
@ -2,11 +2,11 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.5.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.5.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<packaging>pom</packaging>
|
||||
|
@ -140,6 +140,25 @@
|
|||
</ignoredResourcePatterns>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-checkstyle-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>process-sources</phase>
|
||||
<goals>
|
||||
<goal>checkstyle</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<failsOnError>true</failsOnError>
|
||||
<enableRulesSummary>true</enableRulesSummary>
|
||||
<enableSeveritySummary>true</enableSeveritySummary>
|
||||
<consoleOutput>true</consoleOutput>
|
||||
<configLocation>${maven.multiModuleProjectDirectory}/src/checkstyle/checkstyle.xml</configLocation>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -19,11 +19,14 @@
|
|||
<dependencies>
|
||||
|
||||
<!-- JSON -->
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.datatype</groupId>
|
||||
<artifactId>jackson-datatype-jsr310</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- XML -->
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.woodstox</groupId>
|
||||
|
|
|
@ -225,6 +225,18 @@ public class FhirContext {
|
|||
|
||||
}
|
||||
|
||||
public static FhirContext forDstu3Cached() {
|
||||
return forCached(FhirVersionEnum.DSTU3);
|
||||
}
|
||||
|
||||
public static FhirContext forR4Cached() {
|
||||
return forCached(FhirVersionEnum.R4);
|
||||
}
|
||||
|
||||
public static FhirContext forR5Cached() {
|
||||
return forCached(FhirVersionEnum.R5);
|
||||
}
|
||||
|
||||
private String createUnknownResourceNameError(final String theResourceName, final FhirVersionEnum theVersion) {
|
||||
return getLocalizer().getMessage(FhirContext.class, "unknownResourceName", theResourceName, theVersion);
|
||||
}
|
||||
|
|
|
@ -1840,6 +1840,9 @@ public enum Pointcut implements IPointcut {
|
|||
* pulled out of the servlet request. This parameter is identical to the RequestDetails parameter above but will
|
||||
* only be populated when operating in a RestfulServer implementation. It is provided as a convenience.
|
||||
* </li>
|
||||
* <li>
|
||||
* ca.uhn.fhir.context.RuntimeResourceDefinition - the resource type being accessed
|
||||
* </li>
|
||||
* </ul>
|
||||
* <p>
|
||||
* Hooks must return void.
|
||||
|
@ -1851,7 +1854,8 @@ public enum Pointcut implements IPointcut {
|
|||
// Params
|
||||
"ca.uhn.fhir.interceptor.model.RequestPartitionId",
|
||||
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
||||
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
|
||||
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails",
|
||||
"ca.uhn.fhir.context.RuntimeResourceDefinition"
|
||||
),
|
||||
|
||||
/**
|
||||
|
|
|
@ -20,6 +20,10 @@ package ca.uhn.fhir.interceptor.model;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.model.api.IModelJson;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
|
@ -41,12 +45,17 @@ import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
|
|||
/**
|
||||
* @since 5.0.0
|
||||
*/
|
||||
public class RequestPartitionId {
|
||||
|
||||
public class RequestPartitionId implements IModelJson {
|
||||
private static final RequestPartitionId ALL_PARTITIONS = new RequestPartitionId();
|
||||
private static final ObjectMapper ourObjectMapper = new ObjectMapper().registerModule(new com.fasterxml.jackson.datatype.jsr310.JavaTimeModule());
|
||||
|
||||
@JsonProperty("partitionDate")
|
||||
private final LocalDate myPartitionDate;
|
||||
@JsonProperty("allPartitions")
|
||||
private final boolean myAllPartitions;
|
||||
@JsonProperty("partitionIds")
|
||||
private final List<Integer> myPartitionIds;
|
||||
@JsonProperty("partitionNames")
|
||||
private final List<String> myPartitionNames;
|
||||
|
||||
/**
|
||||
|
@ -80,6 +89,10 @@ public class RequestPartitionId {
|
|||
myAllPartitions = true;
|
||||
}
|
||||
|
||||
public static RequestPartitionId fromJson(String theJson) throws JsonProcessingException {
|
||||
return ourObjectMapper.readValue(theJson, RequestPartitionId.class);
|
||||
}
|
||||
|
||||
public boolean isAllPartitions() {
|
||||
return myAllPartitions;
|
||||
}
|
||||
|
@ -308,4 +321,8 @@ public class RequestPartitionId {
|
|||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
public String asJson() throws JsonProcessingException {
|
||||
return ourObjectMapper.writeValueAsString(this);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -72,6 +72,7 @@ public enum VersionEnum {
|
|||
V5_3_2,
|
||||
V5_3_3,
|
||||
V5_4_0,
|
||||
V5_4_1,
|
||||
V5_5_0,
|
||||
;
|
||||
|
||||
|
|
|
@ -1,16 +1,22 @@
|
|||
package ca.uhn.fhir.interceptor.model;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.time.LocalDate;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class RequestPartitionIdTest {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(RequestPartitionIdTest.class);
|
||||
|
||||
@Test
|
||||
public void testHashCode() {
|
||||
|
@ -36,5 +42,30 @@ public class RequestPartitionIdTest {
|
|||
assertFalse(RequestPartitionId.forPartitionIdsAndNames(null, Lists.newArrayList(1, 2), null).isDefaultPartition());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSerDeserSer() throws JsonProcessingException {
|
||||
{
|
||||
RequestPartitionId start = RequestPartitionId.fromPartitionId(123, LocalDate.of(2020, 1, 1));
|
||||
String json = assertSerDeserSer(start);
|
||||
assertThat(json, containsString("\"partitionDate\":[2020,1,1]"));
|
||||
assertThat(json, containsString("\"partitionIds\":[123]"));
|
||||
}
|
||||
{
|
||||
RequestPartitionId start = RequestPartitionId.forPartitionIdsAndNames(Lists.newArrayList("Name1", "Name2"), null, null);
|
||||
String json = assertSerDeserSer(start);
|
||||
assertThat(json, containsString("partitionNames\":[\"Name1\",\"Name2\"]"));
|
||||
}
|
||||
assertSerDeserSer(RequestPartitionId.allPartitions());
|
||||
assertSerDeserSer(RequestPartitionId.defaultPartition());
|
||||
}
|
||||
|
||||
private String assertSerDeserSer(RequestPartitionId start) throws JsonProcessingException {
|
||||
String json = start.asJson();
|
||||
ourLog.info(json);
|
||||
RequestPartitionId end = RequestPartitionId.fromJson(json);
|
||||
assertEquals(start, end);
|
||||
String json2 = end.asJson();
|
||||
assertEquals(json, json2);
|
||||
return json;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,14 +3,14 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>5.5.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE4-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.fusesource.jansi.Ansi;
|
|||
import org.fusesource.jansi.AnsiConsole;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.PrintStream;
|
||||
import java.io.PrintWriter;
|
||||
import java.lang.management.ManagementFactory;
|
||||
import java.util.ArrayList;
|
||||
|
@ -43,6 +44,7 @@ import java.util.Arrays;
|
|||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
import static org.fusesource.jansi.Ansi.ansi;
|
||||
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
|
@ -67,17 +69,27 @@ public abstract class BaseApp {
|
|||
|
||||
private void logAppHeader() {
|
||||
System.out.flush();
|
||||
System.out.println("------------------------------------------------------------");
|
||||
String msg = "------------------------------------------------------------";
|
||||
printMessageToStdout(msg);
|
||||
logProductName();
|
||||
System.out.println("------------------------------------------------------------");
|
||||
System.out.println("Process ID : " + ManagementFactory.getRuntimeMXBean().getName());
|
||||
System.out.println("Max configured JVM memory (Xmx) : " + FileHelper.getFileSizeDisplay(Runtime.getRuntime().maxMemory(), 1));
|
||||
System.out.println("Detected Java version : " + System.getProperty("java.version"));
|
||||
System.out.println("------------------------------------------------------------");
|
||||
printMessageToStdout("------------------------------------------------------------");
|
||||
printMessageToStdout("Process ID : " + ManagementFactory.getRuntimeMXBean().getName());
|
||||
printMessageToStdout("Max configured JVM memory (Xmx) : " + FileHelper.getFileSizeDisplay(Runtime.getRuntime().maxMemory(), 1));
|
||||
printMessageToStdout("Detected Java version : " + System.getProperty("java.version"));
|
||||
printMessageToStdout("------------------------------------------------------------");
|
||||
}
|
||||
|
||||
private void printMessageToStdout(String theMsg) {
|
||||
PrintStream out = System.out;
|
||||
if (isNotBlank(theMsg)) {
|
||||
out.println(theMsg);
|
||||
} else {
|
||||
out.println();
|
||||
}
|
||||
}
|
||||
|
||||
protected void logProductName() {
|
||||
System.out.println("\ud83d\udd25 " + ansi().bold() + " " + provideProductName() + ansi().boldOff() + " " + provideProductVersion() + " - Command Line Tool");
|
||||
printMessageToStdout("\ud83d\udd25 " + ansi().bold() + " " + provideProductName() + ansi().boldOff() + " " + provideProductVersion() + " - Command Line Tool");
|
||||
}
|
||||
|
||||
private void logCommandUsage(BaseCommand theCommand) {
|
||||
|
@ -99,32 +111,32 @@ public abstract class BaseApp {
|
|||
}
|
||||
|
||||
// Usage
|
||||
System.out.println("Usage:");
|
||||
System.out.println(" " + provideCommandName() + " " + theCommand.getCommandName() + " [options]");
|
||||
System.out.println();
|
||||
printMessageToStdout("Usage:");
|
||||
printMessageToStdout(" " + provideCommandName() + " " + theCommand.getCommandName() + " [options]");
|
||||
printMessageToStdout("");
|
||||
|
||||
// Description
|
||||
String wrapped = WordUtils.wrap(theCommand.getCommandDescription(), columns);
|
||||
System.out.println(wrapped);
|
||||
System.out.println();
|
||||
printMessageToStdout(wrapped);
|
||||
printMessageToStdout("");
|
||||
|
||||
// Usage Notes
|
||||
List<String> usageNotes = theCommand.provideUsageNotes();
|
||||
for (String next : usageNotes) {
|
||||
wrapped = WordUtils.wrap(next, columns);
|
||||
System.out.println(wrapped);
|
||||
System.out.println();
|
||||
printMessageToStdout(wrapped);
|
||||
printMessageToStdout("");
|
||||
}
|
||||
|
||||
// Options
|
||||
System.out.println("Options:");
|
||||
printMessageToStdout("Options:");
|
||||
HelpFormatter fmt = new HelpFormatter();
|
||||
PrintWriter pw = new PrintWriter(System.out);
|
||||
fmt.printOptions(pw, columns, getOptions(theCommand), 2, 2);
|
||||
pw.flush();
|
||||
|
||||
// That's it!
|
||||
System.out.println();
|
||||
printMessageToStdout("");
|
||||
}
|
||||
|
||||
private Options getOptions(BaseCommand theCommand) {
|
||||
|
@ -135,10 +147,10 @@ public abstract class BaseApp {
|
|||
|
||||
private void logUsage() {
|
||||
logAppHeader();
|
||||
System.out.println("Usage:");
|
||||
System.out.println(" " + provideCommandName() + " {command} [options]");
|
||||
System.out.println();
|
||||
System.out.println("Commands:");
|
||||
printMessageToStdout("Usage:");
|
||||
printMessageToStdout(" " + provideCommandName() + " {command} [options]");
|
||||
printMessageToStdout("");
|
||||
printMessageToStdout("Commands:");
|
||||
|
||||
int longestCommandLength = 0;
|
||||
for (BaseCommand next : ourCommands) {
|
||||
|
@ -151,12 +163,12 @@ public abstract class BaseApp {
|
|||
for (int i = 1; i < rightParts.length; i++) {
|
||||
rightParts[i] = StringUtils.leftPad("", left.length() + 3) + rightParts[i];
|
||||
}
|
||||
System.out.println(ansi().bold().fg(Ansi.Color.GREEN) + left + ansi().boldOff().fg(Ansi.Color.WHITE) + " - " + ansi().bold() + StringUtils.join(rightParts, LINESEP));
|
||||
printMessageToStdout(ansi().bold().fg(Ansi.Color.GREEN) + left + ansi().boldOff().fg(Ansi.Color.WHITE) + " - " + ansi().bold() + StringUtils.join(rightParts, LINESEP));
|
||||
}
|
||||
System.out.println();
|
||||
System.out.println(ansi().boldOff().fg(Ansi.Color.WHITE) + "See what options are available:");
|
||||
System.out.println(" " + provideCommandName() + " help {command}");
|
||||
System.out.println();
|
||||
printMessageToStdout("");
|
||||
printMessageToStdout(ansi().boldOff().fg(Ansi.Color.WHITE) + "See what options are available:");
|
||||
printMessageToStdout(" " + provideCommandName() + " help {command}");
|
||||
printMessageToStdout("");
|
||||
}
|
||||
|
||||
protected abstract String provideCommandName();
|
||||
|
@ -235,8 +247,8 @@ public abstract class BaseApp {
|
|||
|
||||
if (command == null) {
|
||||
String message = "Unrecognized command: " + ansi().bold().fg(Ansi.Color.RED) + theArgs[0] + ansi().boldOff().fg(Ansi.Color.WHITE);
|
||||
System.out.println(message);
|
||||
System.out.println();
|
||||
printMessageToStdout(message);
|
||||
printMessageToStdout("");
|
||||
logUsage();
|
||||
exitDueToProblem(message);
|
||||
return;
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>5.5.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -119,7 +119,7 @@
|
|||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.5.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.5.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -141,6 +141,13 @@
|
|||
<skip>true</skip>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-checkstyle-plugin</artifactId>
|
||||
<configuration>
|
||||
<skip>true</skip>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
<resources>
|
||||
<resource>
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
release-date: "2021-06-15"
|
||||
codename: "Pangolin"
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2509
|
||||
backport: 5.4.1
|
||||
title: "Pagination returned incorrect offset and count in the previous link of the last page
|
||||
when total element count was one more than multiple of page size. Problem is now fixed"
|
|
@ -1,4 +1,5 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2652
|
||||
backport: 5.4.1
|
||||
title: "Settings have been added to the JPA Server DaoConfig to enable/disable various individual kinds of scheduled tasks."
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
---
|
||||
type: change
|
||||
issue: 2653
|
||||
backport: 5.4.1
|
||||
title: "When performing a conditional create/update/delete on a JPA server, if the match URL
|
||||
contained a plus character, this character was interpreted as a space (per legacy URL encoding
|
||||
rules) even though this has proven to not be the intended behaviour in real life applications.
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2653
|
||||
backport: 5.4.1
|
||||
title: "When performing a conditional create operation on a JPA server, the system will now
|
||||
verify that the conditional URL actually matches the data supplied in the resource body,
|
||||
and aborts the conditional create if it does not."
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2665
|
||||
backport: 5.4.1
|
||||
title: When performing a FHIR transaction containing a conditional create, references to that resource
|
||||
were inadvertently replaced with contained references."
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2672
|
||||
backport: 5.4.1
|
||||
title: "A concurrency error was fixed when using client assigned IDs on a highly concurrent server
|
||||
with resource deletion disabled."
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2672
|
||||
backport: 5.4.1
|
||||
title: "Support has been added to the JPA server for `_include` and `_revinclude` where the
|
||||
value is a qualified star, e.g. `_include=Observation:*`."
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2674
|
||||
backport: 5.4.1
|
||||
title: "A null-pointer exception was fixed when a ResponseTerminologyDisplayInterceptor is registered and a search
|
||||
or read response returns a resource with code value that in turn returns a null code lookup."
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2675
|
||||
backport: 5.4.1
|
||||
title: "A new interceptor ValidationMessageSuppressingInterceptor has been added. This interceptor can be used
|
||||
to selectively suppress specific vaLidation messages."
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2676
|
||||
backport: 5.4.1
|
||||
title: "A new config option has been added to the DaoConfig that causes generated SQL statements to
|
||||
account for potential null values in HAPI FHIR JPA date index rows. Nulls are no longer ever
|
||||
used in this table after HAPI FHIR 5.3.0, but legacy data may still have nulls."
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2676
|
||||
backport: 5.4.1
|
||||
title: "A new setting has been added to the DaoConfig that allows the maximum number of
|
||||
`_include` and `_revinclude` resources to be added to a single search page result."
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2676
|
||||
backport: 5.4.1
|
||||
title: "When performing non-query cache JPA searches (i.e. searches with `Cache-Control: no-store`)
|
||||
the loading of `_include` and `_revinclude` will now factor the maximum include count."
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2676
|
||||
backport: 5.4.1
|
||||
title: "Subscription notifications will no longer be triggered by default in response to changes
|
||||
that do not increment the resource version (e.g. `$meta-add` and `$meta-delete`). A new
|
||||
DaoConfig setting has been added to make this configurable."
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2674
|
||||
backport: 5.4.1
|
||||
title: "When myDaoConfig.setDefaultTotalMode(SearchTotalModeEnum.ACCURATE) and there are zero search results on an _id search,
|
||||
An Index Out of Bounds error was thrown. This has been corrected."
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2681
|
||||
backport: 5.4.1
|
||||
title: "A new DaoConfig setting called Mass Ingestion Mode has been added. This mode enables rapid
|
||||
data ingestion by skipping a number of unnecessary checks during backloading."
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2682
|
||||
backport: 5.4.1
|
||||
title: "Fixes the problem that FHIR package IDs were incorrectly treated as case sensitive when being loaded, causing loads to fail when dependencies were declared with a different case than in the package itself."
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
---
|
||||
type: perf
|
||||
issue: 2688
|
||||
backport: 5.4.1
|
||||
title: "FHIR Transaction duplicate record checks are now performed without any database interactions or SQL statements,
|
||||
reducing the processing load associated with FHIR transactions by at least a small amount."
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
---
|
||||
type: perf
|
||||
issue: 2688
|
||||
backport: 5.4.1
|
||||
title: "Conditional URL lookups in the JPA server will now explicitly specify a maximum fetch size of 2, avoiding
|
||||
fetching more data that won't be used inadvertently in some situations."
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2692
|
||||
backport: 5.4.1
|
||||
title: "A new Pointcut has been added that is invoked when a new Bulk Export is initiated."
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2693
|
||||
backport: 5.4.1
|
||||
title: "Constraint errors were not always auto-retried even when configured to do so on certain
|
||||
platforms (particularly Postgresql) where constraint names are auto converted to lower case.
|
||||
Thanks to Bruno Hedman for the pull request!"
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
---
|
||||
type: change
|
||||
issue: 2695
|
||||
backport: 5.4.1
|
||||
title: "Bulk import batch jobs are now activated in a local scheduled task, making bulk import
|
||||
jobs better able to take advantage of large clusters."
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
type: change
|
||||
issue: 2697
|
||||
title: "DELETE _expunge=true has been converted to use Spring Batch. It now simply returns the jobId of the Spring Batch
|
||||
job while the job continues to run in the background. A new operation called $expunge-delete has been added to provide
|
||||
more fine-grained control of the delete expunge operation. This operation accepts an ordered list of URLs to be delete
|
||||
expunged and an optional batch-size parameter that will be used to perform the delete expunge. If no batch size is
|
||||
specified in the operation, then the value of DaoConfig.getExpungeBatchSize() is used."
|
|
@ -1,5 +1,6 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2702
|
||||
backport: 5.4.1
|
||||
title: "The JPA server terminology uploader now supports uploading ICD-10-CM (US Edition) using the
|
||||
native format for that vocabulary."
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2705
|
||||
backport: 5.4.1
|
||||
title: "When searching by source, if deleted resources are matched, the search returned an incorrect size. This has been corrected."
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2695
|
||||
backport: 5.4.1
|
||||
title: "The _filter search parameter was incorrectly included in the server capability statement if it was disabled
|
||||
on the server. This has been corrected."
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2712
|
||||
backport: 5.4.1
|
||||
title: "AuthorizationInterceptor can now be used to authorize bulk export requests"
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: perf
|
||||
issue: 2717
|
||||
title: "A new setting has been added to the DaoConfig called Tag Versioning Mode. This setting controls whether a single collection of
|
||||
tags/profiles/security labels is maintained across all versions of a single resource, or whether each version of the
|
||||
resource maintains its own independent collection. Previously each version always maintained an independent collection,
|
||||
which is useful sometimes, but is often not useful and can affect performance."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: perf
|
||||
issue: 2717
|
||||
title: "FHIR transactions in the JPA server that perform writes will now aggressively pre-fetch as many entities
|
||||
as possible at the very start of transaction processing. This can drastically reduce the number of
|
||||
round-trips, especially as the number of resources in a transaction gets bigger."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2624
|
||||
title: "ValueSet expansion did not correctly preserve the order if multiple codes were included
|
||||
in a single inclusion block."
|
|
@ -129,4 +129,7 @@ X-Retry-On-Version-Conflict: retry; max-retries=100
|
|||
|
||||
# Controlling Delete with Expunge size
|
||||
|
||||
During the delete with expunge operation there is an internal synchronous search which locates all the resources to be deleted. The default maximum size of this search is 10000. This can be configured via the [Internal Synchronous Search Size](/hapi-fhir/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setInternalSynchronousSearchSize(java.lang.Integer)) property.
|
||||
Delete with expunge submits a job to delete and expunge the requested resources. This is done in batches. If the DELETE
|
||||
?_expunge=true syntax is used to trigger the delete expunge, then the batch size will be determined by the value
|
||||
of [Expunge Batch Size](/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getExpungeBatchSize())
|
||||
property.
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -147,9 +147,9 @@
|
|||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-checkstyle-plugin</artifactId>
|
||||
<executions>
|
||||
<execution><id>validate</id><phase>none</phase></execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<skip>true</skip>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.5.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ import ca.uhn.fhir.rest.api.SearchTotalModeEnum;
|
|||
import ca.uhn.fhir.util.HapiExtensions;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.dstu2.model.Subscription;
|
||||
|
@ -84,6 +85,10 @@ public class DaoConfig {
|
|||
*/
|
||||
public static final boolean DEFAULT_ENABLE_TASKS = true;
|
||||
public static final int DEFAULT_MAXIMUM_INCLUDES_TO_LOAD_PER_PAGE = 1000;
|
||||
/**
|
||||
* @since 5.5.0
|
||||
*/
|
||||
public static final TagStorageModeEnum DEFAULT_TAG_STORAGE_MODE = TagStorageModeEnum.VERSIONED;
|
||||
/**
|
||||
* Default value for {@link #setMaximumSearchResultCountInTransaction(Integer)}
|
||||
*
|
||||
|
@ -129,6 +134,7 @@ public class DaoConfig {
|
|||
private SearchTotalModeEnum myDefaultTotalMode = null;
|
||||
private int myEverythingIncludesFetchPageSize = 50;
|
||||
private int myBulkImportMaxRetryCount = 10;
|
||||
private TagStorageModeEnum myTagStorageMode = DEFAULT_TAG_STORAGE_MODE;
|
||||
/**
|
||||
* update setter javadoc if default changes
|
||||
*/
|
||||
|
@ -215,11 +221,11 @@ public class DaoConfig {
|
|||
* update setter javadoc if default changes
|
||||
*/
|
||||
@Nonnull
|
||||
private Long myTranslationCachesExpireAfterWriteInMinutes = DEFAULT_TRANSLATION_CACHES_EXPIRE_AFTER_WRITE_IN_MINUTES;
|
||||
private final Long myTranslationCachesExpireAfterWriteInMinutes = DEFAULT_TRANSLATION_CACHES_EXPIRE_AFTER_WRITE_IN_MINUTES;
|
||||
/**
|
||||
* @since 5.4.0
|
||||
*/
|
||||
private boolean myMatchUrlCache;
|
||||
private boolean myMatchUrlCacheEnabled;
|
||||
/**
|
||||
* @since 5.5.0
|
||||
*/
|
||||
|
@ -266,6 +272,26 @@ public class DaoConfig {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the tag storage mode for the server. Default is {@link TagStorageModeEnum#VERSIONED}.
|
||||
*
|
||||
* @since 5.5.0
|
||||
*/
|
||||
@Nonnull
|
||||
public TagStorageModeEnum getTagStorageMode() {
|
||||
return myTagStorageMode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the tag storage mode for the server. Default is {@link TagStorageModeEnum#VERSIONED}.
|
||||
*
|
||||
* @since 5.5.0
|
||||
*/
|
||||
public void setTagStorageMode(@Nonnull TagStorageModeEnum theTagStorageMode) {
|
||||
Validate.notNull(theTagStorageMode, "theTagStorageMode must not be null");
|
||||
myTagStorageMode = theTagStorageMode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the maximum number of times that a chunk will be retried during bulk import
|
||||
* processes before giving up.
|
||||
|
@ -421,9 +447,11 @@ public class DaoConfig {
|
|||
* Default is <code>false</code>
|
||||
*
|
||||
* @since 5.4.0
|
||||
* @deprecated Deprecated in 5.5.0. Use {@link #isMatchUrlCacheEnabled()} instead (the name of this method is misleading)
|
||||
*/
|
||||
@Deprecated
|
||||
public boolean getMatchUrlCache() {
|
||||
return myMatchUrlCache;
|
||||
return myMatchUrlCacheEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -435,9 +463,39 @@ public class DaoConfig {
|
|||
* Default is <code>false</code>
|
||||
*
|
||||
* @since 5.4.0
|
||||
* @deprecated Deprecated in 5.5.0. Use {@link #setMatchUrlCacheEnabled(boolean)} instead (the name of this method is misleading)
|
||||
*/
|
||||
@Deprecated
|
||||
public void setMatchUrlCache(boolean theMatchUrlCache) {
|
||||
myMatchUrlCache = theMatchUrlCache;
|
||||
myMatchUrlCacheEnabled = theMatchUrlCache;
|
||||
}
|
||||
|
||||
/**
|
||||
* If enabled, resolutions for match URLs (e.g. conditional create URLs, conditional update URLs, etc) will be
|
||||
* cached in an in-memory cache. This cache can have a noticeable improvement on write performance on servers
|
||||
* where conditional operations are frequently performed, but note that this cache will not be
|
||||
* invalidated based on updates to resources so this may have detrimental effects.
|
||||
* <p>
|
||||
* Default is <code>false</code>
|
||||
*
|
||||
* @since 5.5.0
|
||||
*/
|
||||
public boolean isMatchUrlCacheEnabled() {
|
||||
return getMatchUrlCache();
|
||||
}
|
||||
|
||||
/**
|
||||
* If enabled, resolutions for match URLs (e.g. conditional create URLs, conditional update URLs, etc) will be
|
||||
* cached in an in-memory cache. This cache can have a noticeable improvement on write performance on servers
|
||||
* where conditional operations are frequently performed, but note that this cache will not be
|
||||
* invalidated based on updates to resources so this may have detrimental effects.
|
||||
* <p>
|
||||
* Default is <code>false</code>
|
||||
*
|
||||
* @since 5.5.0
|
||||
*/
|
||||
public void setMatchUrlCacheEnabled(boolean theMatchUrlCache) {
|
||||
setMatchUrlCache(theMatchUrlCache);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1572,7 +1630,8 @@ public class DaoConfig {
|
|||
|
||||
/**
|
||||
* The expunge batch size (default 800) determines the number of records deleted within a single transaction by the
|
||||
* expunge operation.
|
||||
* expunge operation. When expunging via DELETE ?_expunge=true, then this value determines the batch size for
|
||||
* the number of resources deleted and expunged at a time.
|
||||
*/
|
||||
public int getExpungeBatchSize() {
|
||||
return myExpungeBatchSize;
|
||||
|
@ -1580,7 +1639,8 @@ public class DaoConfig {
|
|||
|
||||
/**
|
||||
* The expunge batch size (default 800) determines the number of records deleted within a single transaction by the
|
||||
* expunge operation.
|
||||
* expunge operation. When expunging via DELETE ?_expunge=true, then this value determines the batch size for
|
||||
* the number of resources deleted and expunged at a time.
|
||||
*/
|
||||
public void setExpungeBatchSize(int theExpungeBatchSize) {
|
||||
myExpungeBatchSize = theExpungeBatchSize;
|
||||
|
@ -2271,9 +2331,8 @@ public class DaoConfig {
|
|||
|
||||
/**
|
||||
* <p>
|
||||
* This determines the internal search size that is run synchronously during operations such as:
|
||||
* 1. Delete with _expunge parameter.
|
||||
* 2. Searching for Code System IDs by System and Code
|
||||
* This determines the internal search size that is run synchronously during operations such as searching for
|
||||
* Code System IDs by System and Code
|
||||
* </p>
|
||||
*
|
||||
* @since 5.4.0
|
||||
|
@ -2284,9 +2343,8 @@ public class DaoConfig {
|
|||
|
||||
/**
|
||||
* <p>
|
||||
* This determines the internal search size that is run synchronously during operations such as:
|
||||
* 1. Delete with _expunge parameter.
|
||||
* 2. Searching for Code System IDs by System and Code
|
||||
* This determines the internal search size that is run synchronously during operations such as searching for
|
||||
* Code System IDs by System and Code
|
||||
* </p>
|
||||
*
|
||||
* @since 5.4.0
|
||||
|
@ -2472,6 +2530,30 @@ public class DaoConfig {
|
|||
myTriggerSubscriptionsForNonVersioningChanges = theTriggerSubscriptionsForNonVersioningChanges;
|
||||
}
|
||||
|
||||
public boolean canDeleteExpunge() {
|
||||
return isAllowMultipleDelete() && isExpungeEnabled() && isDeleteExpungeEnabled();
|
||||
}
|
||||
|
||||
public String cannotDeleteExpungeReason() {
|
||||
List<String> reasons = new ArrayList<>();
|
||||
if (!isAllowMultipleDelete()) {
|
||||
reasons.add("Multiple Delete");
|
||||
}
|
||||
if (!isExpungeEnabled()) {
|
||||
reasons.add("Expunge");
|
||||
}
|
||||
if (!isDeleteExpungeEnabled()) {
|
||||
reasons.add("Delete Expunge");
|
||||
}
|
||||
String retval = "Delete Expunge is not supported on this server. ";
|
||||
if (reasons.size() == 1) {
|
||||
retval += reasons.get(0) + " is disabled.";
|
||||
} else {
|
||||
retval += "The following configurations are disabled: " + StringUtils.join(reasons, ", ");
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
|
||||
public enum StoreMetaSourceInformationEnum {
|
||||
NONE(false, false),
|
||||
SOURCE_URI(true, false),
|
||||
|
@ -2548,4 +2630,17 @@ public class DaoConfig {
|
|||
ANY
|
||||
}
|
||||
|
||||
public enum TagStorageModeEnum {
|
||||
|
||||
/**
|
||||
* A separate set of tags is stored for each resource version
|
||||
*/
|
||||
VERSIONED,
|
||||
|
||||
/**
|
||||
* A single set of tags is shared by all resource versions
|
||||
*/
|
||||
NON_VERSIONED
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package ca.uhn.fhir.jpa.api.dao;
|
||||
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
||||
|
@ -26,6 +27,6 @@ import org.hl7.fhir.instance.model.api.IIdType;
|
|||
|
||||
public interface IFhirResourceDaoSubscription<T extends IBaseResource> extends IFhirResourceDao<T> {
|
||||
|
||||
Long getSubscriptionTablePidForSubscriptionResource(IIdType theId, RequestDetails theRequest);
|
||||
Long getSubscriptionTablePidForSubscriptionResource(IIdType theId, RequestDetails theRequest, TransactionDetails theTransactionDetails);
|
||||
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.api.model;
|
|||
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
|
@ -32,31 +33,51 @@ import java.util.List;
|
|||
public class DeleteMethodOutcome extends MethodOutcome {
|
||||
|
||||
private List<ResourceTable> myDeletedEntities;
|
||||
@Deprecated
|
||||
private long myExpungedResourcesCount;
|
||||
@Deprecated
|
||||
private long myExpungedEntitiesCount;
|
||||
|
||||
public DeleteMethodOutcome() {
|
||||
}
|
||||
|
||||
public DeleteMethodOutcome(IBaseOperationOutcome theBaseOperationOutcome) {
|
||||
super(theBaseOperationOutcome);
|
||||
}
|
||||
|
||||
public List<ResourceTable> getDeletedEntities() {
|
||||
return myDeletedEntities;
|
||||
}
|
||||
|
||||
/**
|
||||
* Use {@link ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter#ENTITY_TOTAL_UPDATED_OR_DELETED}
|
||||
*/
|
||||
@Deprecated
|
||||
public DeleteMethodOutcome setDeletedEntities(List<ResourceTable> theDeletedEntities) {
|
||||
myDeletedEntities = theDeletedEntities;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Use {@link ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener#RESOURCE_TOTAL_PROCESSED}
|
||||
*/
|
||||
@Deprecated
|
||||
public long getExpungedResourcesCount() {
|
||||
return myExpungedResourcesCount;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public DeleteMethodOutcome setExpungedResourcesCount(long theExpungedResourcesCount) {
|
||||
myExpungedResourcesCount = theExpungedResourcesCount;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public long getExpungedEntitiesCount() {
|
||||
return myExpungedEntitiesCount;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public DeleteMethodOutcome setExpungedEntitiesCount(long theExpungedEntitiesCount) {
|
||||
myExpungedEntitiesCount = theExpungedEntitiesCount;
|
||||
return this;
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.5.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.5.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.batch;
|
|||
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig;
|
||||
import ca.uhn.fhir.jpa.delete.job.DeleteExpungeJobConfig;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
|
||||
|
@ -32,9 +33,10 @@ import java.util.Set;
|
|||
@Configuration
|
||||
//When you define a new batch job, add it here.
|
||||
@Import({
|
||||
CommonBatchJobConfig.class,
|
||||
BulkExportJobConfig.class,
|
||||
BulkImportJobConfig.class
|
||||
CommonBatchJobConfig.class,
|
||||
BulkExportJobConfig.class,
|
||||
BulkImportJobConfig.class,
|
||||
DeleteExpungeJobConfig.class
|
||||
})
|
||||
public class BatchJobsConfig {
|
||||
|
||||
|
@ -73,4 +75,8 @@ public class BatchJobsConfig {
|
|||
RECORD_PROCESSING_STEP_NAMES = Collections.unmodifiableSet(recordProcessingStepNames);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete Expunge
|
||||
*/
|
||||
public static final String DELETE_EXPUNGE_JOB_NAME = "deleteExpungeJob";
|
||||
}
|
||||
|
|
|
@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.batch;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.batch.processors.GoldenResourceAnnotatingProcessor;
|
||||
import ca.uhn.fhir.jpa.batch.processors.PidToIBaseResourceProcessor;
|
||||
import ca.uhn.fhir.jpa.batch.processor.GoldenResourceAnnotatingProcessor;
|
||||
import ca.uhn.fhir.jpa.batch.processor.PidToIBaseResourceProcessor;
|
||||
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
|
|
@ -0,0 +1,48 @@
|
|||
package ca.uhn.fhir.jpa.batch.listener;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.springframework.batch.core.StepExecution;
|
||||
import org.springframework.batch.core.annotation.AfterProcess;
|
||||
import org.springframework.batch.core.annotation.BeforeStep;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Add the number of pids processed to the execution context so we can track progress of the job
|
||||
*/
|
||||
public class PidReaderCounterListener {
|
||||
public static final String RESOURCE_TOTAL_PROCESSED = "resource.total.processed";
|
||||
|
||||
private StepExecution myStepExecution;
|
||||
private Long myTotalPidsProcessed = 0L;
|
||||
|
||||
@BeforeStep
|
||||
public void setStepExecution(StepExecution stepExecution) {
|
||||
myStepExecution = stepExecution;
|
||||
}
|
||||
|
||||
@AfterProcess
|
||||
public void afterProcess(List<Long> thePids, List<String> theSqlList) {
|
||||
myTotalPidsProcessed += thePids.size();
|
||||
myStepExecution.getExecutionContext().putLong(RESOURCE_TOTAL_PROCESSED, myTotalPidsProcessed);
|
||||
}
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.batch.processors;
|
||||
package ca.uhn.fhir.jpa.batch.processor;
|
||||
|
||||
/*-
|
||||
* #%L
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.batch.processors;
|
||||
package ca.uhn.fhir.jpa.batch.processor;
|
||||
|
||||
/*-
|
||||
* #%L
|
|
@ -0,0 +1,200 @@
|
|||
package ca.uhn.fhir.jpa.batch.reader;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.delete.model.PartitionedUrl;
|
||||
import ca.uhn.fhir.jpa.delete.model.RequestListJson;
|
||||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.SortOrderEnum;
|
||||
import ca.uhn.fhir.rest.api.SortSpec;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.param.DateRangeParam;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.item.ExecutionContext;
|
||||
import org.springframework.batch.item.ItemReader;
|
||||
import org.springframework.batch.item.ItemStream;
|
||||
import org.springframework.batch.item.ItemStreamException;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* This Spring Batch reader takes 4 parameters:
|
||||
* {@link #JOB_PARAM_REQUEST_LIST}: A list of URLs to search for along with the partitions those searches should be performed on
|
||||
* {@link #JOB_PARAM_BATCH_SIZE}: The number of resources to return with each search. If ommitted, {@link DaoConfig#getExpungeBatchSize} will be used.
|
||||
* {@link #JOB_PARAM_START_TIME}: The latest timestamp of resources to search for
|
||||
* <p>
|
||||
* The reader will return at most {@link #JOB_PARAM_BATCH_SIZE} pids every time it is called, or null
|
||||
* once no more matching resources are available. It returns the resources in reverse chronological order
|
||||
* and stores where it's at in the Spring Batch execution context with the key {@link #CURRENT_THRESHOLD_HIGH}
|
||||
* appended with "." and the index number of the url list item it has gotten up to. This is to permit
|
||||
* restarting jobs that use this reader so it can pick up where it left off.
|
||||
*/
|
||||
public class ReverseCronologicalBatchResourcePidReader implements ItemReader<List<Long>>, ItemStream {
|
||||
|
||||
public static final String JOB_PARAM_REQUEST_LIST = "url-list";
|
||||
public static final String JOB_PARAM_BATCH_SIZE = "batch-size";
|
||||
public static final String JOB_PARAM_START_TIME = "start-time";
|
||||
|
||||
public static final String CURRENT_URL_INDEX = "current.url-index";
|
||||
public static final String CURRENT_THRESHOLD_HIGH = "current.threshold-high";
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ReverseCronologicalBatchResourcePidReader.class);
|
||||
|
||||
@Autowired
|
||||
private FhirContext myFhirContext;
|
||||
@Autowired
|
||||
private MatchUrlService myMatchUrlService;
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
|
||||
private List<PartitionedUrl> myPartitionedUrls;
|
||||
private Integer myBatchSize;
|
||||
private final Map<Integer, Date> myThresholdHighByUrlIndex = new HashMap<>();
|
||||
private int myUrlIndex = 0;
|
||||
private Date myStartTime;
|
||||
|
||||
@Autowired
|
||||
public void setRequestListJson(@Value("#{jobParameters['" + JOB_PARAM_REQUEST_LIST + "']}") String theRequestListJson) {
|
||||
RequestListJson requestListJson = RequestListJson.fromJson(theRequestListJson);
|
||||
myPartitionedUrls = requestListJson.getPartitionedUrls();
|
||||
}
|
||||
|
||||
@Autowired
|
||||
public void setBatchSize(@Value("#{jobParameters['" + JOB_PARAM_BATCH_SIZE + "']}") Integer theBatchSize) {
|
||||
myBatchSize = theBatchSize;
|
||||
}
|
||||
|
||||
@Autowired
|
||||
public void setStartTime(@Value("#{jobParameters['" + JOB_PARAM_START_TIME + "']}") Date theStartTime) {
|
||||
myStartTime = theStartTime;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Long> read() throws Exception {
|
||||
while (myUrlIndex < myPartitionedUrls.size()) {
|
||||
List<Long> nextBatch;
|
||||
nextBatch = getNextBatch();
|
||||
if (nextBatch.isEmpty()) {
|
||||
++myUrlIndex;
|
||||
continue;
|
||||
}
|
||||
|
||||
return nextBatch;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private List<Long> getNextBatch() {
|
||||
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(myPartitionedUrls.get(myUrlIndex).getUrl());
|
||||
SearchParameterMap map = buildSearchParameterMap(resourceSearch);
|
||||
|
||||
// Perform the search
|
||||
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(resourceSearch.getResourceName());
|
||||
List<Long> retval = dao.searchForIds(map, buildSystemRequestDetails()).stream()
|
||||
.map(ResourcePersistentId::getIdAsLong)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
if (ourLog.isDebugEnabled()) {
|
||||
ourLog.debug("Search for {}{} returned {} results", resourceSearch.getResourceName(), map.toNormalizedQueryString(myFhirContext), retval.size());
|
||||
ourLog.debug("Results: {}", retval);
|
||||
}
|
||||
|
||||
if (!retval.isEmpty()) {
|
||||
// Adjust the high threshold to be the earliest resource in the batch we found
|
||||
Long pidOfOldestResourceInBatch = retval.get(retval.size() - 1);
|
||||
IBaseResource earliestResource = dao.readByPid(new ResourcePersistentId(pidOfOldestResourceInBatch));
|
||||
myThresholdHighByUrlIndex.put(myUrlIndex, earliestResource.getMeta().getLastUpdated());
|
||||
}
|
||||
|
||||
return retval;
|
||||
}
|
||||
|
||||
@NotNull
|
||||
private SearchParameterMap buildSearchParameterMap(ResourceSearch resourceSearch) {
|
||||
SearchParameterMap map = resourceSearch.getSearchParameterMap();
|
||||
map.setLastUpdated(new DateRangeParam().setUpperBoundInclusive(myThresholdHighByUrlIndex.get(myUrlIndex)));
|
||||
map.setLoadSynchronousUpTo(myBatchSize);
|
||||
map.setSort(new SortSpec(Constants.PARAM_LASTUPDATED, SortOrderEnum.DESC));
|
||||
return map;
|
||||
}
|
||||
|
||||
@NotNull
|
||||
private SystemRequestDetails buildSystemRequestDetails() {
|
||||
SystemRequestDetails retval = new SystemRequestDetails();
|
||||
retval.setRequestPartitionId(myPartitionedUrls.get(myUrlIndex).getRequestPartitionId());
|
||||
return retval;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void open(ExecutionContext executionContext) throws ItemStreamException {
|
||||
if (myBatchSize == null) {
|
||||
myBatchSize = myDaoConfig.getExpungeBatchSize();
|
||||
}
|
||||
if (executionContext.containsKey(CURRENT_URL_INDEX)) {
|
||||
myUrlIndex = new Long(executionContext.getLong(CURRENT_URL_INDEX)).intValue();
|
||||
}
|
||||
for (int index = 0; index < myPartitionedUrls.size(); ++index) {
|
||||
String key = highKey(index);
|
||||
if (executionContext.containsKey(key)) {
|
||||
myThresholdHighByUrlIndex.put(index, new Date(executionContext.getLong(key)));
|
||||
} else {
|
||||
myThresholdHighByUrlIndex.put(index, myStartTime);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static String highKey(int theIndex) {
|
||||
return CURRENT_THRESHOLD_HIGH + "." + theIndex;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(ExecutionContext executionContext) throws ItemStreamException {
|
||||
executionContext.putLong(CURRENT_URL_INDEX, myUrlIndex);
|
||||
for (int index = 0; index < myPartitionedUrls.size(); ++index) {
|
||||
Date date = myThresholdHighByUrlIndex.get(index);
|
||||
if (date != null) {
|
||||
executionContext.putLong(highKey(index), date.getTime());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws ItemStreamException {
|
||||
}
|
||||
}
|
|
@ -0,0 +1,67 @@
|
|||
package ca.uhn.fhir.jpa.batch.writer;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.core.StepExecution;
|
||||
import org.springframework.batch.core.annotation.BeforeStep;
|
||||
import org.springframework.batch.item.ItemWriter;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.PersistenceContext;
|
||||
import javax.persistence.PersistenceContextType;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* This Spring Batch writer accepts a list of SQL commands and executes them.
|
||||
* The total number of entities updated or deleted is stored in the execution context
|
||||
* with the key {@link #ENTITY_TOTAL_UPDATED_OR_DELETED}. The entire list is committed within a
|
||||
* single transaction (provided by Spring Batch).
|
||||
*/
|
||||
public class SqlExecutorWriter implements ItemWriter<List<String>> {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(SqlExecutorWriter.class);
|
||||
|
||||
public static final String ENTITY_TOTAL_UPDATED_OR_DELETED = "entity.total.updated-or-deleted";
|
||||
|
||||
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||
private EntityManager myEntityManager;
|
||||
private Long totalUpdated = 0L;
|
||||
private StepExecution myStepExecution;
|
||||
|
||||
@BeforeStep
|
||||
public void setStepExecution(StepExecution stepExecution) {
|
||||
myStepExecution = stepExecution;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(List<? extends List<String>> theSqlLists) throws Exception {
|
||||
for (List<String> sqlList : theSqlLists) {
|
||||
ourLog.info("Executing {} sql commands", sqlList.size());
|
||||
for (String sql : sqlList) {
|
||||
ourLog.trace("Executing sql " + sql);
|
||||
totalUpdated += myEntityManager.createNativeQuery(sql).executeUpdate();
|
||||
myStepExecution.getExecutionContext().putLong(ENTITY_TOTAL_UPDATED_OR_DELETED, totalUpdated);
|
||||
}
|
||||
}
|
||||
ourLog.debug("{} records updated", totalUpdated);
|
||||
}
|
||||
}
|
|
@ -21,8 +21,8 @@ package ca.uhn.fhir.jpa.bulk.export.job;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.batch.processors.GoldenResourceAnnotatingProcessor;
|
||||
import ca.uhn.fhir.jpa.batch.processors.PidToIBaseResourceProcessor;
|
||||
import ca.uhn.fhir.jpa.batch.processor.GoldenResourceAnnotatingProcessor;
|
||||
import ca.uhn.fhir.jpa.batch.processor.PidToIBaseResourceProcessor;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
|
|
|
@ -61,6 +61,7 @@ import ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderUri;
|
|||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.delete.DeleteConflictFinderService;
|
||||
import ca.uhn.fhir.jpa.delete.DeleteConflictService;
|
||||
import ca.uhn.fhir.jpa.delete.DeleteExpungeJobSubmitterImpl;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.graphql.JpaStorageServices;
|
||||
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
|
||||
|
@ -132,9 +133,11 @@ import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
|||
import ca.uhn.fhir.jpa.validation.JpaResourceLoader;
|
||||
import ca.uhn.fhir.jpa.validation.ValidationSettings;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
|
||||
import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationInterceptor;
|
||||
import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices;
|
||||
import ca.uhn.fhir.rest.server.interceptor.partition.RequestTenantPartitionInterceptor;
|
||||
import ca.uhn.fhir.rest.server.provider.DeleteExpungeProvider;
|
||||
import org.hibernate.jpa.HibernatePersistenceProvider;
|
||||
import org.hl7.fhir.common.hapi.validation.support.UnknownCodeSystemWarningValidationSupport;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
@ -528,6 +531,18 @@ public abstract class BaseConfig {
|
|||
return new BulkDataExportProvider();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
public IDeleteExpungeJobSubmitter myDeleteExpungeJobSubmitter() {
|
||||
return new DeleteExpungeJobSubmitterImpl();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
public DeleteExpungeProvider deleteExpungeProvider(FhirContext theFhirContext, IDeleteExpungeJobSubmitter theDeleteExpungeJobSubmitter) {
|
||||
return new DeleteExpungeProvider(theFhirContext, theDeleteExpungeJobSubmitter);
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
public IBulkDataImportSvc bulkDataImportSvc() {
|
||||
|
|
|
@ -58,7 +58,6 @@ import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher;
|
|||
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.jpa.util.AddRemoveCount;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.model.api.IResource;
|
||||
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
|
||||
|
@ -84,6 +83,7 @@ import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
|||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.util.CoverageIgnore;
|
||||
import ca.uhn.fhir.util.HapiExtensions;
|
||||
import ca.uhn.fhir.util.MetaUtil;
|
||||
|
@ -137,6 +137,7 @@ import java.util.Collections;
|
|||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
@ -185,6 +186,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
private static final Logger ourLog = LoggerFactory.getLogger(BaseHapiFhirDao.class);
|
||||
private static final Map<FhirVersionEnum, FhirContext> ourRetrievalContexts = new HashMap<>();
|
||||
private static final String PROCESSING_SUB_REQUEST = "BaseHapiFhirDao.processingSubRequest";
|
||||
private static final String TRANSACTION_DETAILS_CACHE_KEY_EXISTING_SEARCH_PARAMS = BaseHapiFhirDao.class.getName() + "_EXISTING_SEARCH_PARAMS";
|
||||
private static boolean ourValidationDisabledForUnitTest;
|
||||
private static boolean ourDisableIncrementOnUpdateForUnitTest = false;
|
||||
|
||||
|
@ -394,6 +396,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
|
||||
@Autowired
|
||||
public void setContext(FhirContext theContext) {
|
||||
super.myFhirContext = theContext;
|
||||
myContext = theContext;
|
||||
}
|
||||
|
||||
|
@ -604,10 +607,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
|
||||
}
|
||||
|
||||
boolean skipUpdatingTags = false;
|
||||
if (myConfig.isMassIngestionMode() && theEntity.isHasTags()) {
|
||||
skipUpdatingTags = true;
|
||||
}
|
||||
boolean skipUpdatingTags = myConfig.isMassIngestionMode() && theEntity.isHasTags();
|
||||
|
||||
if (!skipUpdatingTags) {
|
||||
Set<ResourceTag> allDefs = new HashSet<>();
|
||||
|
@ -668,6 +668,10 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
|
||||
// Don't check existing - We'll rely on the SHA256 hash only
|
||||
|
||||
} else if (theEntity.getVersion() == 1L && theEntity.getCurrentVersionEntity() == null) {
|
||||
|
||||
// No previous version if this is the first version
|
||||
|
||||
} else {
|
||||
ResourceHistoryTable currentHistoryVersion = theEntity.getCurrentVersionEntity();
|
||||
if (currentHistoryVersion == null) {
|
||||
|
@ -791,27 +795,23 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
res.getMeta().setLastUpdated(theEntity.getUpdatedDate());
|
||||
IDao.RESOURCE_PID.put(res, theEntity.getResourceId());
|
||||
|
||||
Collection<? extends BaseTag> tags = theTagList;
|
||||
|
||||
if (theEntity.isHasTags()) {
|
||||
for (BaseTag next : tags) {
|
||||
switch (next.getTag().getTagType()) {
|
||||
case PROFILE:
|
||||
res.getMeta().addProfile(next.getTag().getCode());
|
||||
break;
|
||||
case SECURITY_LABEL:
|
||||
IBaseCoding sec = res.getMeta().addSecurity();
|
||||
sec.setSystem(next.getTag().getSystem());
|
||||
sec.setCode(next.getTag().getCode());
|
||||
sec.setDisplay(next.getTag().getDisplay());
|
||||
break;
|
||||
case TAG:
|
||||
IBaseCoding tag = res.getMeta().addTag();
|
||||
tag.setSystem(next.getTag().getSystem());
|
||||
tag.setCode(next.getTag().getCode());
|
||||
tag.setDisplay(next.getTag().getDisplay());
|
||||
break;
|
||||
}
|
||||
for (BaseTag next : theTagList) {
|
||||
switch (next.getTag().getTagType()) {
|
||||
case PROFILE:
|
||||
res.getMeta().addProfile(next.getTag().getCode());
|
||||
break;
|
||||
case SECURITY_LABEL:
|
||||
IBaseCoding sec = res.getMeta().addSecurity();
|
||||
sec.setSystem(next.getTag().getSystem());
|
||||
sec.setCode(next.getTag().getCode());
|
||||
sec.setDisplay(next.getTag().getDisplay());
|
||||
break;
|
||||
case TAG:
|
||||
IBaseCoding tag = res.getMeta().addTag();
|
||||
tag.setSystem(next.getTag().getSystem());
|
||||
tag.setCode(next.getTag().getCode());
|
||||
tag.setDisplay(next.getTag().getDisplay());
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -912,7 +912,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
// 1. get resource, it's encoding and the tags if any
|
||||
byte[] resourceBytes;
|
||||
ResourceEncodingEnum resourceEncoding;
|
||||
Collection<? extends BaseTag> myTagList;
|
||||
Collection<? extends BaseTag> tagList = Collections.emptyList();
|
||||
long version;
|
||||
String provenanceSourceUri = null;
|
||||
String provenanceRequestId = null;
|
||||
|
@ -921,10 +921,14 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
ResourceHistoryTable history = (ResourceHistoryTable) theEntity;
|
||||
resourceBytes = history.getResource();
|
||||
resourceEncoding = history.getEncoding();
|
||||
if (history.isHasTags()) {
|
||||
myTagList = history.getTags();
|
||||
if (getConfig().getTagStorageMode() == DaoConfig.TagStorageModeEnum.VERSIONED) {
|
||||
if (history.isHasTags()) {
|
||||
tagList = history.getTags();
|
||||
}
|
||||
} else {
|
||||
myTagList = Collections.emptyList();
|
||||
if (history.getResourceTable().isHasTags()) {
|
||||
tagList = history.getResourceTable().getTags();
|
||||
}
|
||||
}
|
||||
version = history.getVersion();
|
||||
if (history.getProvenance() != null) {
|
||||
|
@ -948,9 +952,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
resourceBytes = history.getResource();
|
||||
resourceEncoding = history.getEncoding();
|
||||
if (resource.isHasTags()) {
|
||||
myTagList = resource.getTags();
|
||||
tagList = resource.getTags();
|
||||
} else {
|
||||
myTagList = Collections.emptyList();
|
||||
tagList = Collections.emptyList();
|
||||
}
|
||||
version = history.getVersion();
|
||||
if (history.getProvenance() != null) {
|
||||
|
@ -966,9 +970,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
provenanceRequestId = view.getProvenanceRequestId();
|
||||
provenanceSourceUri = view.getProvenanceSourceUri();
|
||||
if (theTagList == null)
|
||||
myTagList = new HashSet<>();
|
||||
tagList = new HashSet<>();
|
||||
else
|
||||
myTagList = theTagList;
|
||||
tagList = theTagList;
|
||||
} else {
|
||||
// something wrong
|
||||
return null;
|
||||
|
@ -980,7 +984,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
// 3. Use the appropriate custom type if one is specified in the context
|
||||
Class<R> resourceType = theResourceType;
|
||||
if (myContext.hasDefaultTypeForProfile()) {
|
||||
for (BaseTag nextTag : myTagList) {
|
||||
for (BaseTag nextTag : tagList) {
|
||||
if (nextTag.getTag().getTagType() == TagTypeEnum.PROFILE) {
|
||||
String profile = nextTag.getTag().getCode();
|
||||
if (isNotBlank(profile)) {
|
||||
|
@ -1030,10 +1034,10 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
// 5. fill MetaData
|
||||
if (retVal instanceof IResource) {
|
||||
IResource res = (IResource) retVal;
|
||||
retVal = populateResourceMetadataHapi(resourceType, theEntity, myTagList, theForHistoryOperation, res, version);
|
||||
retVal = populateResourceMetadataHapi(resourceType, theEntity, tagList, theForHistoryOperation, res, version);
|
||||
} else {
|
||||
IAnyResource res = (IAnyResource) retVal;
|
||||
retVal = populateResourceMetadataRi(resourceType, theEntity, myTagList, theForHistoryOperation, res, version);
|
||||
retVal = populateResourceMetadataRi(resourceType, theEntity, tagList, theForHistoryOperation, res, version);
|
||||
}
|
||||
|
||||
// 6. Handle source (provenance)
|
||||
|
@ -1152,14 +1156,22 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
changed = populateResourceIntoEntity(theTransactionDetails, theRequest, theResource, entity, true);
|
||||
|
||||
} else {
|
||||
|
||||
// CREATE or UPDATE
|
||||
existingParams = new ResourceIndexedSearchParams(entity);
|
||||
|
||||
IdentityHashMap<ResourceTable, ResourceIndexedSearchParams> existingSearchParams = theTransactionDetails.getOrCreateUserData(TRANSACTION_DETAILS_CACHE_KEY_EXISTING_SEARCH_PARAMS, () -> new IdentityHashMap<>());
|
||||
existingParams = existingSearchParams.get(entity);
|
||||
if (existingParams == null) {
|
||||
existingParams = new ResourceIndexedSearchParams(entity);
|
||||
existingSearchParams.put(entity, existingParams);
|
||||
}
|
||||
entity.setDeleted(null);
|
||||
|
||||
if (thePerformIndexing) {
|
||||
// TODO: is this IF statement always true? Try removing it
|
||||
if (thePerformIndexing || ((ResourceTable) theEntity).getVersion() == 1) {
|
||||
|
||||
newParams = new ResourceIndexedSearchParams();
|
||||
mySearchParamWithInlineReferencesExtractor.populateFromResource(newParams, theTransactionDetails, entity, theResource, existingParams, theRequest);
|
||||
mySearchParamWithInlineReferencesExtractor.populateFromResource(newParams, theTransactionDetails, entity, theResource, existingParams, theRequest, thePerformIndexing);
|
||||
|
||||
changed = populateResourceIntoEntity(theTransactionDetails, theRequest, theResource, entity, true);
|
||||
|
||||
|
@ -1175,7 +1187,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
// to match a resource and then update it in a way that it no longer
|
||||
// matches. We could certainly make this configurable though in the
|
||||
// future.
|
||||
if (entity.getVersion() <= 1L && entity.getCreatedByMatchUrl() != null) {
|
||||
if (entity.getVersion() <= 1L && entity.getCreatedByMatchUrl() != null && thePerformIndexing) {
|
||||
verifyMatchUrlForConditionalCreate(theResource, entity.getCreatedByMatchUrl(), entity, newParams);
|
||||
}
|
||||
|
||||
|
@ -1205,7 +1217,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
|
||||
}
|
||||
|
||||
if (thePerformIndexing && changed != null && !changed.isChanged() && !theForceUpdate && myConfig.isSuppressUpdatesWithNoChange()) {
|
||||
if (thePerformIndexing && changed != null && !changed.isChanged() && !theForceUpdate && myConfig.isSuppressUpdatesWithNoChange() && (entity.getVersion() > 1 || theUpdateVersion)) {
|
||||
ourLog.debug("Resource {} has not changed", entity.getIdDt().toUnqualified().getValue());
|
||||
if (theResource != null) {
|
||||
updateResourceMetadata(entity, theResource);
|
||||
|
@ -1245,7 +1257,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
* Create history entry
|
||||
*/
|
||||
if (theCreateNewHistoryEntry) {
|
||||
final ResourceHistoryTable historyEntry = entity.toHistory();
|
||||
boolean versionedTags = getConfig().getTagStorageMode() == DaoConfig.TagStorageModeEnum.VERSIONED;
|
||||
final ResourceHistoryTable historyEntry = entity.toHistory(versionedTags);
|
||||
historyEntry.setEncoding(changed.getEncoding());
|
||||
historyEntry.setResource(changed.getResource());
|
||||
|
||||
|
@ -1575,6 +1588,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
// nothing yet
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setDaoConfigForUnitTest(DaoConfig theDaoConfig) {
|
||||
myConfig = theDaoConfig;
|
||||
}
|
||||
|
||||
private class AddTagDefinitionToCacheAfterCommitSynchronization implements TransactionSynchronization {
|
||||
|
||||
private final TagDefinition myTagDefinition;
|
||||
|
@ -1726,11 +1744,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
ourDisableIncrementOnUpdateForUnitTest = theDisableIncrementOnUpdateForUnitTest;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setDaoConfigForUnitTest(DaoConfig theDaoConfig) {
|
||||
myConfig = theDaoConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* Do not call this method outside of unit tests
|
||||
*/
|
||||
|
|
|
@ -34,7 +34,7 @@ import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
|
|||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
|
||||
import ca.uhn.fhir.jpa.api.model.LazyDaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.DeleteExpungeService;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.delete.DeleteConflictService;
|
||||
import ca.uhn.fhir.jpa.model.entity.BaseHasResource;
|
||||
|
@ -55,9 +55,9 @@ import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
|
|||
import ca.uhn.fhir.jpa.search.cache.SearchCacheStatusEnum;
|
||||
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.model.dstu2.resource.ListResource;
|
||||
|
@ -77,6 +77,7 @@ import ca.uhn.fhir.rest.api.server.IPreResourceShowDetails;
|
|||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.SimplePreResourceAccessDetails;
|
||||
import ca.uhn.fhir.rest.api.server.SimplePreResourceShowDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.rest.param.HasParam;
|
||||
|
@ -91,6 +92,7 @@ import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
|||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.util.ObjectUtil;
|
||||
import ca.uhn.fhir.util.OperationOutcomeUtil;
|
||||
import ca.uhn.fhir.util.ReflectionUtil;
|
||||
|
@ -111,9 +113,10 @@ import org.hl7.fhir.instance.model.api.IBaseParameters;
|
|||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.springframework.batch.core.JobExecution;
|
||||
import org.springframework.batch.core.JobParametersInvalidException;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Required;
|
||||
import org.springframework.data.domain.SliceImpl;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
|
@ -131,6 +134,7 @@ import javax.servlet.http.HttpServletResponse;
|
|||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
|
@ -168,7 +172,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
@Autowired
|
||||
private MatchUrlService myMatchUrlService;
|
||||
@Autowired
|
||||
private DeleteExpungeService myDeleteExpungeService;
|
||||
private IDeleteExpungeJobSubmitter myDeleteExpungeJobSubmitter;
|
||||
|
||||
private IInstanceValidatorModule myInstanceValidator;
|
||||
private String myResourceName;
|
||||
|
@ -257,9 +261,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
entity.setResourceType(toResourceName(theResource));
|
||||
entity.setPartitionId(theRequestPartitionId);
|
||||
entity.setCreatedByMatchUrl(theIfNoneExist);
|
||||
entity.setVersion(1);
|
||||
|
||||
if (isNotBlank(theIfNoneExist)) {
|
||||
Set<ResourcePersistentId> match = myMatchResourceUrlService.processMatchUrl(theIfNoneExist, myResourceType, theRequest);
|
||||
Set<ResourcePersistentId> match = myMatchResourceUrlService.processMatchUrl(theIfNoneExist, myResourceType, theTransactionDetails, theRequest);
|
||||
if (match.size() > 1) {
|
||||
String msg = getContext().getLocalizer().getMessageSanitized(BaseHapiFhirDao.class, "transactionOperationWithMultipleMatchFailure", "CREATE", theIfNoneExist, match.size());
|
||||
throw new PreconditionFailedException(msg);
|
||||
|
@ -338,9 +343,17 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
doCallHooks(theTransactionDetails, theRequest, Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED, hookParams);
|
||||
|
||||
// Perform actual DB update
|
||||
ResourceTable updatedEntity = updateEntity(theRequest, theResource, entity, null, thePerformIndexing, thePerformIndexing, theTransactionDetails, false, thePerformIndexing);
|
||||
ResourceTable updatedEntity = updateEntity(theRequest, theResource, entity, null, thePerformIndexing, false, theTransactionDetails, false, thePerformIndexing);
|
||||
|
||||
IIdType id = myFhirContext.getVersion().newIdType().setValue(updatedEntity.getIdDt().toUnqualifiedVersionless().getValue());
|
||||
ResourcePersistentId persistentId = new ResourcePersistentId(updatedEntity.getResourceId());
|
||||
theTransactionDetails.addResolvedResourceId(id, persistentId);
|
||||
if (entity.getForcedId() != null) {
|
||||
myIdHelperService.addResolvedPidToForcedId(persistentId, theRequestPartitionId, updatedEntity.getResourceType(), updatedEntity.getForcedId().getForcedId());
|
||||
}
|
||||
|
||||
theResource.setId(entity.getIdDt());
|
||||
|
||||
if (serverAssignedId) {
|
||||
switch (getConfig().getResourceClientIdStrategy()) {
|
||||
case NOT_ALLOWED:
|
||||
|
@ -357,16 +370,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
if (theIfNoneExist != null) {
|
||||
// Pre-cache the match URL
|
||||
myMatchResourceUrlService.matchUrlResolved(theIfNoneExist, new ResourcePersistentId(entity.getResourceId()));
|
||||
}
|
||||
|
||||
/*
|
||||
* If we aren't indexing (meaning we're probably executing a sub-operation within a transaction),
|
||||
* we'll manually increase the version. This is important because we want the updated version number
|
||||
* to be reflected in the resource shared with interceptors
|
||||
*/
|
||||
if (!thePerformIndexing) {
|
||||
incrementId(theResource, entity, theResource.getIdElement());
|
||||
myMatchResourceUrlService.matchUrlResolved(theTransactionDetails, getResourceName(), theIfNoneExist, new ResourcePersistentId(entity.getResourceId()));
|
||||
}
|
||||
|
||||
// Update the version/last updated in the resource so that interceptors get
|
||||
|
@ -399,9 +403,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
if (updatedEntity.getForcedId() != null) {
|
||||
forcedId = updatedEntity.getForcedId().getForcedId();
|
||||
}
|
||||
if (myIdHelperService != null) {
|
||||
myIdHelperService.addResolvedPidToForcedId(new ResourcePersistentId(updatedEntity.getResourceId()), theRequestPartitionId, getResourceName(), forcedId);
|
||||
}
|
||||
myIdHelperService.addResolvedPidToForcedId(persistentId, theRequestPartitionId, getResourceName(), forcedId);
|
||||
|
||||
ourLog.debug(msg);
|
||||
return outcome;
|
||||
|
@ -443,7 +445,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
validateIdPresentForDelete(theId);
|
||||
validateDeleteEnabled();
|
||||
|
||||
final ResourceTable entity = readEntityLatestVersion(theId, theRequestDetails);
|
||||
final ResourceTable entity = readEntityLatestVersion(theId, theRequestDetails, theTransactionDetails);
|
||||
if (theId.hasVersionIdPart() && Long.parseLong(theId.getVersionIdPart()) != entity.getVersion()) {
|
||||
throw new ResourceVersionConflictException("Trying to delete " + theId + " but this is not the current version");
|
||||
}
|
||||
|
@ -517,12 +519,17 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
|
||||
@Override
|
||||
public DeleteMethodOutcome deleteByUrl(String theUrl, RequestDetails theRequestDetails) {
|
||||
public DeleteMethodOutcome deleteByUrl(String theUrl, RequestDetails theRequest) {
|
||||
validateDeleteEnabled();
|
||||
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(theUrl);
|
||||
|
||||
return myTransactionService.execute(theRequestDetails, tx -> {
|
||||
if (resourceSearch.isDeleteExpunge()) {
|
||||
return deleteExpunge(theUrl, theRequest);
|
||||
}
|
||||
|
||||
return myTransactionService.execute(theRequest, tx -> {
|
||||
DeleteConflictList deleteConflicts = new DeleteConflictList();
|
||||
DeleteMethodOutcome outcome = deleteByUrl(theUrl, deleteConflicts, theRequestDetails);
|
||||
DeleteMethodOutcome outcome = deleteByUrl(theUrl, deleteConflicts, theRequest);
|
||||
DeleteConflictService.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts);
|
||||
return outcome;
|
||||
});
|
||||
|
@ -541,8 +548,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
@Nonnull
|
||||
private DeleteMethodOutcome doDeleteByUrl(String theUrl, DeleteConflictList deleteConflicts, RequestDetails theRequest) {
|
||||
RuntimeResourceDefinition resourceDef = getContext().getResourceDefinition(myResourceType);
|
||||
SearchParameterMap paramMap = myMatchUrlService.translateMatchUrl(theUrl, resourceDef);
|
||||
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(theUrl);
|
||||
SearchParameterMap paramMap = resourceSearch.getSearchParameterMap();
|
||||
paramMap.setLoadSynchronous(true);
|
||||
|
||||
Set<ResourcePersistentId> resourceIds = myMatchResourceUrlService.search(paramMap, myResourceType, theRequest);
|
||||
|
@ -553,19 +560,21 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
}
|
||||
|
||||
if (paramMap.isDeleteExpunge()) {
|
||||
return deleteExpunge(theUrl, theRequest, resourceIds);
|
||||
} else {
|
||||
return deletePidList(theUrl, resourceIds, deleteConflicts, theRequest);
|
||||
}
|
||||
return deletePidList(theUrl, resourceIds, deleteConflicts, theRequest);
|
||||
}
|
||||
|
||||
private DeleteMethodOutcome deleteExpunge(String theUrl, RequestDetails theTheRequest, Set<ResourcePersistentId> theResourceIds) {
|
||||
if (!getConfig().isExpungeEnabled() || !getConfig().isDeleteExpungeEnabled()) {
|
||||
throw new MethodNotAllowedException("_expunge is not enabled on this server");
|
||||
private DeleteMethodOutcome deleteExpunge(String theUrl, RequestDetails theRequest) {
|
||||
if (!getConfig().canDeleteExpunge()) {
|
||||
throw new MethodNotAllowedException("_expunge is not enabled on this server: " + getConfig().cannotDeleteExpungeReason());
|
||||
}
|
||||
|
||||
return myDeleteExpungeService.expungeByResourcePids(theUrl, myResourceName, new SliceImpl<>(ResourcePersistentId.toLongList(theResourceIds)), theTheRequest);
|
||||
List<String> urlsToDeleteExpunge = Collections.singletonList(theUrl);
|
||||
try {
|
||||
JobExecution jobExecution = myDeleteExpungeJobSubmitter.submitJob(getConfig().getExpungeBatchSize(), theRequest, urlsToDeleteExpunge);
|
||||
return new DeleteMethodOutcome(createInfoOperationOutcome("Delete job submitted with id " + jobExecution.getId()));
|
||||
} catch (JobParametersInvalidException e) {
|
||||
throw new InvalidRequestException("Invalid Delete Expunge Request: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
|
@ -913,7 +922,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
throw new ResourceNotFoundException(theResourceId);
|
||||
}
|
||||
|
||||
ResourceTable latestVersion = readEntityLatestVersion(theResourceId, theRequest);
|
||||
ResourceTable latestVersion = readEntityLatestVersion(theResourceId, theRequest, transactionDetails);
|
||||
if (latestVersion.getVersion() != entity.getVersion()) {
|
||||
doMetaAdd(theMetaAdd, entity, theRequest, transactionDetails);
|
||||
} else {
|
||||
|
@ -948,7 +957,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
throw new ResourceNotFoundException(theResourceId);
|
||||
}
|
||||
|
||||
ResourceTable latestVersion = readEntityLatestVersion(theResourceId, theRequest);
|
||||
ResourceTable latestVersion = readEntityLatestVersion(theResourceId, theRequest, transactionDetails);
|
||||
if (latestVersion.getVersion() != entity.getVersion()) {
|
||||
doMetaDelete(theMetaDel, entity, theRequest, transactionDetails);
|
||||
} else {
|
||||
|
@ -1007,14 +1016,14 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
@Override
|
||||
public DaoMethodOutcome patch(IIdType theId, String theConditionalUrl, PatchTypeEnum thePatchType, String thePatchBody, IBaseParameters theFhirPatchBody, RequestDetails theRequest) {
|
||||
return myTransactionService.execute(theRequest, tx -> doPatch(theId, theConditionalUrl, thePatchType, thePatchBody, theFhirPatchBody, theRequest));
|
||||
return myTransactionService.execute(theRequest, tx -> doPatch(theId, theConditionalUrl, thePatchType, thePatchBody, theFhirPatchBody, theRequest, new TransactionDetails()));
|
||||
}
|
||||
|
||||
private DaoMethodOutcome doPatch(IIdType theId, String theConditionalUrl, PatchTypeEnum thePatchType, String thePatchBody, IBaseParameters theFhirPatchBody, RequestDetails theRequest) {
|
||||
private DaoMethodOutcome doPatch(IIdType theId, String theConditionalUrl, PatchTypeEnum thePatchType, String thePatchBody, IBaseParameters theFhirPatchBody, RequestDetails theRequest, TransactionDetails theTransactionDetails) {
|
||||
ResourceTable entityToUpdate;
|
||||
if (isNotBlank(theConditionalUrl)) {
|
||||
|
||||
Set<ResourcePersistentId> match = myMatchResourceUrlService.processMatchUrl(theConditionalUrl, myResourceType, theRequest);
|
||||
Set<ResourcePersistentId> match = myMatchResourceUrlService.processMatchUrl(theConditionalUrl, myResourceType, theTransactionDetails, theRequest);
|
||||
if (match.size() > 1) {
|
||||
String msg = getContext().getLocalizer().getMessageSanitized(BaseHapiFhirDao.class, "transactionOperationWithMultipleMatchFailure", "PATCH", theConditionalUrl, match.size());
|
||||
throw new PreconditionFailedException(msg);
|
||||
|
@ -1027,7 +1036,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
|
||||
} else {
|
||||
entityToUpdate = readEntityLatestVersion(theId, theRequest);
|
||||
entityToUpdate = readEntityLatestVersion(theId, theRequest, theTransactionDetails);
|
||||
if (theId.hasVersionIdPart()) {
|
||||
if (theId.getVersionIdPartAsLong() != entityToUpdate.getVersion()) {
|
||||
throw new ResourceVersionConflictException("Version " + theId.getVersionIdPart() + " is not the most recent version of this resource, unable to apply patch");
|
||||
|
@ -1064,7 +1073,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
@Override
|
||||
public void start() {
|
||||
assert getConfig() != null;
|
||||
|
||||
|
||||
ourLog.debug("Starting resource DAO for type: {}", getResourceName());
|
||||
myInstanceValidator = getApplicationContext().getBean(IInstanceValidatorModule.class);
|
||||
myTxTemplate = new TransactionTemplate(myPlatformTransactionManager);
|
||||
|
@ -1252,15 +1261,19 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
|
||||
@Nonnull
|
||||
protected ResourceTable readEntityLatestVersion(IIdType theId, RequestDetails theRequestDetails) {
|
||||
protected ResourceTable readEntityLatestVersion(IIdType theId, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) {
|
||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, getResourceName());
|
||||
return readEntityLatestVersion(theId, requestPartitionId);
|
||||
return readEntityLatestVersion(theId, requestPartitionId, theTransactionDetails);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private ResourceTable readEntityLatestVersion(IIdType theId, @Nullable RequestPartitionId theRequestPartitionId) {
|
||||
private ResourceTable readEntityLatestVersion(IIdType theId, @Nullable RequestPartitionId theRequestPartitionId, TransactionDetails theTransactionDetails) {
|
||||
validateResourceTypeAndThrowInvalidRequestException(theId);
|
||||
|
||||
if (theTransactionDetails.isResolvedResourceIdEmpty(theId.toUnqualifiedVersionless())) {
|
||||
throw new ResourceNotFoundException(theId);
|
||||
}
|
||||
|
||||
ResourcePersistentId persistentId = myIdHelperService.resolveResourcePersistentIds(theRequestPartitionId, getResourceName(), theId.getIdPart());
|
||||
ResourceTable entity = myEntityManager.find(ResourceTable.class, persistentId.getId());
|
||||
if (entity == null) {
|
||||
|
@ -1569,7 +1582,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
IIdType resourceId;
|
||||
if (isNotBlank(theMatchUrl)) {
|
||||
Set<ResourcePersistentId> match = myMatchResourceUrlService.processMatchUrl(theMatchUrl, myResourceType, theRequest);
|
||||
Set<ResourcePersistentId> match = myMatchResourceUrlService.processMatchUrl(theMatchUrl, myResourceType, theTransactionDetails, theRequest);
|
||||
if (match.size() > 1) {
|
||||
String msg = getContext().getLocalizer().getMessageSanitized(BaseHapiFhirDao.class, "transactionOperationWithMultipleMatchFailure", "UPDATE", theMatchUrl, match.size());
|
||||
throw new PreconditionFailedException(msg);
|
||||
|
@ -1582,7 +1595,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
// Pre-cache the match URL
|
||||
if (outcome.getPersistentId() != null) {
|
||||
myMatchResourceUrlService.matchUrlResolved(theMatchUrl, outcome.getPersistentId());
|
||||
myMatchResourceUrlService.matchUrlResolved(theTransactionDetails, getResourceName(), theMatchUrl, outcome.getPersistentId());
|
||||
}
|
||||
|
||||
return outcome;
|
||||
|
@ -1610,7 +1623,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
if (!create) {
|
||||
try {
|
||||
entity = readEntityLatestVersion(resourceId, requestPartitionId);
|
||||
entity = readEntityLatestVersion(resourceId, requestPartitionId, theTransactionDetails);
|
||||
} catch (ResourceNotFoundException e) {
|
||||
create = true;
|
||||
}
|
||||
|
@ -1692,6 +1705,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
@Override
|
||||
@Transactional(propagation = Propagation.SUPPORTS)
|
||||
public MethodOutcome validate(T theResource, IIdType theId, String theRawResource, EncodingEnum theEncoding, ValidationModeEnum theMode, String theProfile, RequestDetails theRequest) {
|
||||
TransactionDetails transactionDetails = new TransactionDetails();
|
||||
|
||||
if (theRequest != null) {
|
||||
ActionRequestDetails requestDetails = new ActionRequestDetails(theRequest, theResource, null, theId);
|
||||
notifyInterceptors(RestOperationTypeEnum.VALIDATE, requestDetails);
|
||||
|
@ -1701,7 +1716,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
if (theId == null || theId.hasIdPart() == false) {
|
||||
throw new InvalidRequestException("No ID supplied. ID is required when validating with mode=DELETE");
|
||||
}
|
||||
final ResourceTable entity = readEntityLatestVersion(theId, theRequest);
|
||||
final ResourceTable entity = readEntityLatestVersion(theId, theRequest, transactionDetails);
|
||||
|
||||
// Validate that there are no resources pointing to the candidate that
|
||||
// would prevent deletion
|
||||
|
@ -1799,6 +1814,11 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setIdHelperSvcForUnitTest(IdHelperService theIdHelperService) {
|
||||
myIdHelperService = theIdHelperService;
|
||||
}
|
||||
|
||||
private static class IdChecker implements IValidatorModule {
|
||||
|
||||
private final ValidationModeEnum myMode;
|
||||
|
|
|
@ -145,6 +145,20 @@ public abstract class BaseTransactionProcessor {
|
|||
@Autowired
|
||||
private InMemoryResourceMatcher myInMemoryResourceMatcher;
|
||||
|
||||
@VisibleForTesting
|
||||
public void setDaoConfig(DaoConfig theDaoConfig) {
|
||||
myDaoConfig = theDaoConfig;
|
||||
}
|
||||
|
||||
public ITransactionProcessorVersionAdapter getVersionAdapter() {
|
||||
return myVersionAdapter;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setVersionAdapter(ITransactionProcessorVersionAdapter theVersionAdapter) {
|
||||
myVersionAdapter = theVersionAdapter;
|
||||
}
|
||||
|
||||
@PostConstruct
|
||||
public void start() {
|
||||
ourLog.trace("Starting transaction processor");
|
||||
|
@ -287,11 +301,6 @@ public abstract class BaseTransactionProcessor {
|
|||
}
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setVersionAdapter(ITransactionProcessorVersionAdapter theVersionAdapter) {
|
||||
myVersionAdapter = theVersionAdapter;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setTxManager(PlatformTransactionManager theTxManager) {
|
||||
myTxManager = theTxManager;
|
||||
|
@ -582,8 +591,8 @@ public abstract class BaseTransactionProcessor {
|
|||
myModelConfig = theModelConfig;
|
||||
}
|
||||
|
||||
private Map<IBase, IIdType> doTransactionWriteOperations(final RequestDetails theRequest, String theActionName, TransactionDetails theTransactionDetails, Set<IIdType> theAllIds,
|
||||
Map<IIdType, IIdType> theIdSubstitutions, Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome, IBaseBundle theResponse, IdentityHashMap<IBase, Integer> theOriginalRequestOrder, List<IBase> theEntries, StopWatch theTransactionStopWatch) {
|
||||
protected Map<IBase, IIdType> doTransactionWriteOperations(final RequestDetails theRequest, String theActionName, TransactionDetails theTransactionDetails, Set<IIdType> theAllIds,
|
||||
Map<IIdType, IIdType> theIdSubstitutions, Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome, IBaseBundle theResponse, IdentityHashMap<IBase, Integer> theOriginalRequestOrder, List<IBase> theEntries, StopWatch theTransactionStopWatch) {
|
||||
|
||||
theTransactionDetails.beginAcceptingDeferredInterceptorBroadcasts(
|
||||
Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED,
|
||||
|
@ -1067,7 +1076,7 @@ public abstract class BaseTransactionProcessor {
|
|||
if (!nextId.hasIdPart()) {
|
||||
if (resourceReference.getResource() != null) {
|
||||
IIdType targetId = resourceReference.getResource().getIdElement();
|
||||
if (targetId.getValue() == null) {
|
||||
if (targetId.getValue() == null || targetId.getValue().startsWith("#")) {
|
||||
// This means it's a contained resource
|
||||
continue;
|
||||
} else if (theIdSubstitutions.containsValue(targetId)) {
|
||||
|
@ -1136,10 +1145,7 @@ public abstract class BaseTransactionProcessor {
|
|||
|
||||
IBasePersistedResource updateOutcome = null;
|
||||
if (updatedEntities.contains(nextOutcome.getEntity())) {
|
||||
boolean forceUpdateVersion = false;
|
||||
if (!theReferencesToAutoVersion.isEmpty()) {
|
||||
forceUpdateVersion = true;
|
||||
}
|
||||
boolean forceUpdateVersion = !theReferencesToAutoVersion.isEmpty();
|
||||
|
||||
updateOutcome = jpaDao.updateInternal(theRequest, nextResource, true, forceUpdateVersion, nextOutcome.getEntity(), nextResource.getIdElement(), nextOutcome.getPreviousResource(), theTransactionDetails);
|
||||
} else if (!nonUpdatedEntities.contains(nextOutcome.getId())) {
|
||||
|
@ -1258,7 +1264,6 @@ public abstract class BaseTransactionProcessor {
|
|||
return dao;
|
||||
}
|
||||
|
||||
|
||||
private String toResourceName(Class<? extends IBaseResource> theResourceType) {
|
||||
return myContext.getResourceType(theResourceType);
|
||||
}
|
||||
|
@ -1318,11 +1323,6 @@ public abstract class BaseTransactionProcessor {
|
|||
return null;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setDaoConfig(DaoConfig theDaoConfig) {
|
||||
myDaoConfig = theDaoConfig;
|
||||
}
|
||||
|
||||
public interface ITransactionProcessorVersionAdapter<BUNDLE extends IBaseBundle, BUNDLEENTRY extends IBase> {
|
||||
|
||||
void setResponseStatus(BUNDLEENTRY theBundleEntry, String theStatus);
|
||||
|
|
|
@ -25,9 +25,9 @@ import ca.uhn.fhir.jpa.dao.data.ISubscriptionTableDao;
|
|||
import ca.uhn.fhir.jpa.entity.SubscriptionTable;
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Subscription;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
@ -51,8 +51,8 @@ public class FhirResourceDaoSubscriptionDstu2 extends BaseHapiFhirResourceDao<Su
|
|||
}
|
||||
|
||||
@Override
|
||||
public Long getSubscriptionTablePidForSubscriptionResource(IIdType theId, RequestDetails theRequest) {
|
||||
ResourceTable entity = readEntityLatestVersion(theId, theRequest);
|
||||
public Long getSubscriptionTablePidForSubscriptionResource(IIdType theId, RequestDetails theRequest, TransactionDetails theTransactionDetails) {
|
||||
ResourceTable entity = readEntityLatestVersion(theId, theRequest, theTransactionDetails);
|
||||
SubscriptionTable table = mySubscriptionTableDao.findOneByResourcePid(entity.getId());
|
||||
if (table == null) {
|
||||
return null;
|
||||
|
|
|
@ -31,19 +31,21 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
|||
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
|
||||
|
@ -65,31 +67,63 @@ public class MatchResourceUrlService {
|
|||
/**
|
||||
* Note that this will only return a maximum of 2 results!!
|
||||
*/
|
||||
public <R extends IBaseResource> Set<ResourcePersistentId> processMatchUrl(String theMatchUrl, Class<R> theResourceType, RequestDetails theRequest) {
|
||||
if (myDaoConfig.getMatchUrlCache()) {
|
||||
ResourcePersistentId existing = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.MATCH_URL, theMatchUrl);
|
||||
if (existing != null) {
|
||||
return Collections.singleton(existing);
|
||||
public <R extends IBaseResource> Set<ResourcePersistentId> processMatchUrl(String theMatchUrl, Class<R> theResourceType, TransactionDetails theTransactionDetails, RequestDetails theRequest) {
|
||||
String resourceType = myContext.getResourceType(theResourceType);
|
||||
String matchUrl = massageForStorage(resourceType, theMatchUrl);
|
||||
|
||||
ResourcePersistentId resolvedInTransaction = theTransactionDetails.getResolvedMatchUrls().get(matchUrl);
|
||||
if (resolvedInTransaction != null) {
|
||||
if (resolvedInTransaction == TransactionDetails.NOT_FOUND) {
|
||||
return Collections.emptySet();
|
||||
} else {
|
||||
return Collections.singleton(resolvedInTransaction);
|
||||
}
|
||||
}
|
||||
|
||||
ResourcePersistentId resolvedInCache = processMatchUrlUsingCacheOnly(resourceType, matchUrl);
|
||||
if (resolvedInCache != null) {
|
||||
return Collections.singleton(resolvedInCache);
|
||||
}
|
||||
|
||||
RuntimeResourceDefinition resourceDef = myContext.getResourceDefinition(theResourceType);
|
||||
SearchParameterMap paramMap = myMatchUrlService.translateMatchUrl(theMatchUrl, resourceDef);
|
||||
SearchParameterMap paramMap = myMatchUrlService.translateMatchUrl(matchUrl, resourceDef);
|
||||
if (paramMap.isEmpty() && paramMap.getLastUpdated() == null) {
|
||||
throw new InvalidRequestException("Invalid match URL[" + theMatchUrl + "] - URL has no search parameters");
|
||||
throw new InvalidRequestException("Invalid match URL[" + matchUrl + "] - URL has no search parameters");
|
||||
}
|
||||
paramMap.setLoadSynchronousUpTo(2);
|
||||
|
||||
Set<ResourcePersistentId> retVal = search(paramMap, theResourceType, theRequest);
|
||||
|
||||
if (myDaoConfig.getMatchUrlCache() && retVal.size() == 1) {
|
||||
if (myDaoConfig.isMatchUrlCacheEnabled() && retVal.size() == 1) {
|
||||
ResourcePersistentId pid = retVal.iterator().next();
|
||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.MATCH_URL, theMatchUrl, pid);
|
||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.MATCH_URL, matchUrl, pid);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private String massageForStorage(String theResourceType, String theMatchUrl) {
|
||||
Validate.notBlank(theMatchUrl, "theMatchUrl must not be null or blank");
|
||||
int questionMarkIdx = theMatchUrl.indexOf("?");
|
||||
if (questionMarkIdx > 0) {
|
||||
return theMatchUrl;
|
||||
}
|
||||
if (questionMarkIdx == 0) {
|
||||
return theResourceType + theMatchUrl;
|
||||
}
|
||||
return theResourceType + "?" + theMatchUrl;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public ResourcePersistentId processMatchUrlUsingCacheOnly(String theResourceType, String theMatchUrl) {
|
||||
ResourcePersistentId existing = null;
|
||||
if (myDaoConfig.getMatchUrlCache()) {
|
||||
String matchUrl = massageForStorage(theResourceType, theMatchUrl);
|
||||
existing = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.MATCH_URL, matchUrl);
|
||||
}
|
||||
return existing;
|
||||
}
|
||||
|
||||
public <R extends IBaseResource> Set<ResourcePersistentId> search(SearchParameterMap theParamMap, Class<R> theResourceType, RequestDetails theRequest) {
|
||||
StopWatch sw = new StopWatch();
|
||||
IFhirResourceDao<R> dao = myDaoRegistry.getResourceDao(theResourceType);
|
||||
|
@ -102,7 +136,7 @@ public class MatchResourceUrlService {
|
|||
// Interceptor broadcast: JPA_PERFTRACE_INFO
|
||||
if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO, myInterceptorBroadcaster, theRequest)) {
|
||||
StorageProcessingMessage message = new StorageProcessingMessage();
|
||||
message.setMessage("Processed conditional resource URL with " + retVal.size() + " result(s) in " + sw.toString());
|
||||
message.setMessage("Processed conditional resource URL with " + retVal.size() + " result(s) in " + sw);
|
||||
HookParams params = new HookParams()
|
||||
.add(RequestDetails.class, theRequest)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequest)
|
||||
|
@ -113,11 +147,14 @@ public class MatchResourceUrlService {
|
|||
}
|
||||
|
||||
|
||||
public void matchUrlResolved(String theMatchUrl, ResourcePersistentId theResourcePersistentId) {
|
||||
public void matchUrlResolved(TransactionDetails theTransactionDetails, String theResourceType, String theMatchUrl, ResourcePersistentId theResourcePersistentId) {
|
||||
Validate.notBlank(theMatchUrl);
|
||||
Validate.notNull(theResourcePersistentId);
|
||||
if (myDaoConfig.getMatchUrlCache()) {
|
||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.MATCH_URL, theMatchUrl, theResourcePersistentId);
|
||||
String matchUrl = massageForStorage(theResourceType, theMatchUrl);
|
||||
theTransactionDetails.addResolvedMatchUrl(matchUrl, theResourcePersistentId);
|
||||
if (myDaoConfig.isMatchUrlCacheEnabled()) {
|
||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.MATCH_URL, matchUrl, theResourcePersistentId);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -20,12 +20,31 @@ package ca.uhn.fhir.jpa.dao;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hibernate.Session;
|
||||
import org.hibernate.internal.SessionImpl;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -35,17 +54,48 @@ import javax.persistence.EntityManager;
|
|||
import javax.persistence.PersistenceContext;
|
||||
import javax.persistence.PersistenceContextType;
|
||||
import javax.persistence.PersistenceException;
|
||||
import javax.persistence.TypedQuery;
|
||||
import javax.persistence.criteria.CriteriaBuilder;
|
||||
import javax.persistence.criteria.CriteriaQuery;
|
||||
import javax.persistence.criteria.Predicate;
|
||||
import javax.persistence.criteria.Root;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.jpa.dao.index.IdHelperService.EMPTY_PREDICATE_ARRAY;
|
||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
public class TransactionProcessor extends BaseTransactionProcessor {
|
||||
|
||||
public static final Pattern SINGLE_PARAMETER_MATCH_URL_PATTERN = Pattern.compile("^[^?]+[?][a-z0-9-]+=[^&,]+$");
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(TransactionProcessor.class);
|
||||
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||
private EntityManager myEntityManager;
|
||||
@Autowired(required = false)
|
||||
private HapiFhirHibernateJpaDialect myHapiFhirHibernateJpaDialect;
|
||||
@Autowired
|
||||
private IdHelperService myIdHelperService;
|
||||
@Autowired
|
||||
private PartitionSettings myPartitionSettings;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
@Autowired
|
||||
private FhirContext myFhirContext;
|
||||
@Autowired
|
||||
private MatchResourceUrlService myMatchResourceUrlService;
|
||||
@Autowired
|
||||
private MatchUrlService myMatchUrlService;
|
||||
@Autowired
|
||||
private IRequestPartitionHelperSvc myRequestPartitionSvc;
|
||||
|
||||
|
||||
public void setEntityManagerForUnitTest(EntityManager theEntityManager) {
|
||||
myEntityManager = theEntityManager;
|
||||
|
@ -58,6 +108,225 @@ public class TransactionProcessor extends BaseTransactionProcessor {
|
|||
Validate.notNull(myEntityManager);
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setFhirContextForUnitTest(FhirContext theFhirContext) {
|
||||
myFhirContext = theFhirContext;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Map<IBase, IIdType> doTransactionWriteOperations(final RequestDetails theRequest, String theActionName, TransactionDetails theTransactionDetails, Set<IIdType> theAllIds,
|
||||
Map<IIdType, IIdType> theIdSubstitutions, Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome, IBaseBundle theResponse, IdentityHashMap<IBase, Integer> theOriginalRequestOrder, List<IBase> theEntries, StopWatch theTransactionStopWatch) {
|
||||
|
||||
ITransactionProcessorVersionAdapter versionAdapter = getVersionAdapter();
|
||||
RequestPartitionId requestPartitionId = null;
|
||||
if (!myPartitionSettings.isPartitioningEnabled()) {
|
||||
requestPartitionId = RequestPartitionId.allPartitions();
|
||||
} else {
|
||||
// If all entries in the transaction point to the exact same partition, we'll try and do a pre-fetch
|
||||
Set<RequestPartitionId> requestPartitionIdsForAllEntries = new HashSet<>();
|
||||
for (IBase nextEntry : theEntries) {
|
||||
IBaseResource resource = versionAdapter.getResource(nextEntry);
|
||||
if (resource != null) {
|
||||
RequestPartitionId requestPartition = myRequestPartitionSvc.determineReadPartitionForRequest(theRequest, myFhirContext.getResourceType(resource));
|
||||
requestPartitionIdsForAllEntries.add(requestPartition);
|
||||
}
|
||||
}
|
||||
if (requestPartitionIdsForAllEntries.size() == 1) {
|
||||
requestPartitionId = requestPartitionIdsForAllEntries.iterator().next();
|
||||
}
|
||||
}
|
||||
|
||||
if (requestPartitionId != null) {
|
||||
|
||||
Set<String> foundIds = new HashSet<>();
|
||||
List<Long> idsToPreFetch = new ArrayList<>();
|
||||
|
||||
/*
|
||||
* Pre-Fetch any resources that are referred to normally by ID, e.g.
|
||||
* regular FHIR updates within the transaction.
|
||||
*/
|
||||
List<IIdType> idsToPreResolve = new ArrayList<>();
|
||||
for (IBase nextEntry : theEntries) {
|
||||
IBaseResource resource = versionAdapter.getResource(nextEntry);
|
||||
if (resource != null) {
|
||||
String fullUrl = versionAdapter.getFullUrl(nextEntry);
|
||||
boolean isPlaceholder = defaultString(fullUrl).startsWith("urn:");
|
||||
if (!isPlaceholder) {
|
||||
if (resource.getIdElement().hasIdPart() && resource.getIdElement().hasResourceType()) {
|
||||
idsToPreResolve.add(resource.getIdElement());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
List<ResourcePersistentId> outcome = myIdHelperService.resolveResourcePersistentIdsWithCache(requestPartitionId, idsToPreResolve);
|
||||
for (ResourcePersistentId next : outcome) {
|
||||
foundIds.add(next.getAssociatedResourceId().toUnqualifiedVersionless().getValue());
|
||||
theTransactionDetails.addResolvedResourceId(next.getAssociatedResourceId(), next);
|
||||
if (myDaoConfig.getResourceClientIdStrategy() != DaoConfig.ClientIdStrategyEnum.ANY || !next.getAssociatedResourceId().isIdPartValidLong()) {
|
||||
idsToPreFetch.add(next.getIdAsLong());
|
||||
}
|
||||
}
|
||||
for (IIdType next : idsToPreResolve) {
|
||||
if (!foundIds.contains(next.toUnqualifiedVersionless().getValue())) {
|
||||
theTransactionDetails.addResolvedResourceId(next.toUnqualifiedVersionless(), null);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Pre-resolve any conditional URLs we can
|
||||
*/
|
||||
List<MatchUrlToResolve> searchParameterMapsToResolve = new ArrayList<>();
|
||||
for (IBase nextEntry : theEntries) {
|
||||
IBaseResource resource = versionAdapter.getResource(nextEntry);
|
||||
if (resource != null) {
|
||||
String verb = versionAdapter.getEntryRequestVerb(myFhirContext, nextEntry);
|
||||
String requestUrl = versionAdapter.getEntryRequestUrl(nextEntry);
|
||||
String requestIfNoneExist = versionAdapter.getEntryIfNoneExist(nextEntry);
|
||||
String resourceType = myFhirContext.getResourceType(resource);
|
||||
if ("PUT".equals(verb) && requestUrl != null && requestUrl.contains("?")) {
|
||||
ResourcePersistentId cachedId = myMatchResourceUrlService.processMatchUrlUsingCacheOnly(resourceType, requestUrl);
|
||||
if (cachedId != null) {
|
||||
idsToPreFetch.add(cachedId.getIdAsLong());
|
||||
} else if (SINGLE_PARAMETER_MATCH_URL_PATTERN.matcher(requestUrl).matches()) {
|
||||
RuntimeResourceDefinition resourceDefinition = myFhirContext.getResourceDefinition(resource);
|
||||
SearchParameterMap matchUrlSearchMap = myMatchUrlService.translateMatchUrl(requestUrl, resourceDefinition);
|
||||
searchParameterMapsToResolve.add(new MatchUrlToResolve(requestUrl, matchUrlSearchMap, resourceDefinition));
|
||||
}
|
||||
} else if ("POST".equals(verb) && requestIfNoneExist != null && requestIfNoneExist.contains("?")) {
|
||||
ResourcePersistentId cachedId = myMatchResourceUrlService.processMatchUrlUsingCacheOnly(resourceType, requestIfNoneExist);
|
||||
if (cachedId != null) {
|
||||
idsToPreFetch.add(cachedId.getIdAsLong());
|
||||
} else if (SINGLE_PARAMETER_MATCH_URL_PATTERN.matcher(requestIfNoneExist).matches()) {
|
||||
RuntimeResourceDefinition resourceDefinition = myFhirContext.getResourceDefinition(resource);
|
||||
SearchParameterMap matchUrlSearchMap = myMatchUrlService.translateMatchUrl(requestIfNoneExist, resourceDefinition);
|
||||
searchParameterMapsToResolve.add(new MatchUrlToResolve(requestIfNoneExist, matchUrlSearchMap, resourceDefinition));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
if (searchParameterMapsToResolve.size() > 0) {
|
||||
CriteriaBuilder cb = myEntityManager.getCriteriaBuilder();
|
||||
CriteriaQuery<ResourceIndexedSearchParamToken> cq = cb.createQuery(ResourceIndexedSearchParamToken.class);
|
||||
Root<ResourceIndexedSearchParamToken> from = cq.from(ResourceIndexedSearchParamToken.class);
|
||||
List<Predicate> orPredicates = new ArrayList<>();
|
||||
|
||||
for (MatchUrlToResolve next : searchParameterMapsToResolve) {
|
||||
Collection<List<List<IQueryParameterType>>> values = next.myMatchUrlSearchMap.values();
|
||||
if (values.size() == 1) {
|
||||
List<List<IQueryParameterType>> andList = values.iterator().next();
|
||||
IQueryParameterType param = andList.get(0).get(0);
|
||||
|
||||
if (param instanceof TokenParam) {
|
||||
TokenParam tokenParam = (TokenParam) param;
|
||||
Predicate hashPredicate = null;
|
||||
if (isNotBlank(tokenParam.getValue()) && isNotBlank(tokenParam.getSystem())) {
|
||||
next.myHashSystemAndValue = ResourceIndexedSearchParamToken.calculateHashSystemAndValue(myPartitionSettings, requestPartitionId, next.myResourceDefinition.getName(), next.myMatchUrlSearchMap.keySet().iterator().next(), tokenParam.getSystem(), tokenParam.getValue());
|
||||
hashPredicate = cb.equal(from.get("myHashSystemAndValue").as(Long.class), next.myHashSystemAndValue);
|
||||
} else if (isNotBlank(tokenParam.getValue())) {
|
||||
next.myHashValue = ResourceIndexedSearchParamToken.calculateHashValue(myPartitionSettings, requestPartitionId, next.myResourceDefinition.getName(), next.myMatchUrlSearchMap.keySet().iterator().next(), tokenParam.getValue());
|
||||
hashPredicate = cb.equal(from.get("myHashValue").as(Long.class), next.myHashValue);
|
||||
}
|
||||
|
||||
if (hashPredicate != null) {
|
||||
|
||||
if (myPartitionSettings.isPartitioningEnabled() && !myPartitionSettings.isIncludePartitionInSearchHashes()) {
|
||||
if (requestPartitionId.isDefaultPartition()) {
|
||||
Predicate partitionIdCriteria = cb.isNull(from.get("myPartitionIdValue").as(Integer.class));
|
||||
hashPredicate = cb.and(hashPredicate, partitionIdCriteria);
|
||||
} else if (!requestPartitionId.isAllPartitions()) {
|
||||
Predicate partitionIdCriteria = from.get("myPartitionIdValue").as(Integer.class).in(requestPartitionId.getPartitionIds());
|
||||
hashPredicate = cb.and(hashPredicate, partitionIdCriteria);
|
||||
}
|
||||
}
|
||||
|
||||
orPredicates.add(hashPredicate);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (orPredicates.size() > 1) {
|
||||
cq.where(cb.or(orPredicates.toArray(EMPTY_PREDICATE_ARRAY)));
|
||||
|
||||
TypedQuery<ResourceIndexedSearchParamToken> query = myEntityManager.createQuery(cq);
|
||||
List<ResourceIndexedSearchParamToken> results = query.getResultList();
|
||||
for (ResourceIndexedSearchParamToken nextResult : results) {
|
||||
|
||||
for (MatchUrlToResolve nextSearchParameterMap : searchParameterMapsToResolve) {
|
||||
if (nextSearchParameterMap.myHashSystemAndValue != null && nextSearchParameterMap.myHashSystemAndValue.equals(nextResult.getHashSystemAndValue())) {
|
||||
idsToPreFetch.add(nextResult.getResourcePid());
|
||||
myMatchResourceUrlService.matchUrlResolved(theTransactionDetails, nextSearchParameterMap.myResourceDefinition.getName(), nextSearchParameterMap.myRequestUrl, new ResourcePersistentId(nextResult.getResourcePid()));
|
||||
theTransactionDetails.addResolvedMatchUrl(nextSearchParameterMap.myRequestUrl, new ResourcePersistentId(nextResult.getResourcePid()));
|
||||
nextSearchParameterMap.myResolved = true;
|
||||
}
|
||||
if (nextSearchParameterMap.myHashValue != null && nextSearchParameterMap.myHashValue.equals(nextResult.getHashValue())) {
|
||||
idsToPreFetch.add(nextResult.getResourcePid());
|
||||
myMatchResourceUrlService.matchUrlResolved(theTransactionDetails, nextSearchParameterMap.myResourceDefinition.getName(), nextSearchParameterMap.myRequestUrl, new ResourcePersistentId(nextResult.getResourcePid()));
|
||||
theTransactionDetails.addResolvedMatchUrl(nextSearchParameterMap.myRequestUrl, new ResourcePersistentId(nextResult.getResourcePid()));
|
||||
nextSearchParameterMap.myResolved = true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
for (MatchUrlToResolve nextSearchParameterMap : searchParameterMapsToResolve) {
|
||||
// No matches
|
||||
if (!nextSearchParameterMap.myResolved) {
|
||||
theTransactionDetails.addResolvedMatchUrl(nextSearchParameterMap.myRequestUrl, TransactionDetails.NOT_FOUND);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Pre-fetch the resources we're touching in this transaction in mass - this reduced the
|
||||
* number of database round trips.
|
||||
*
|
||||
* The thresholds below are kind of arbitrary. It's not
|
||||
* actually guaranteed that this pre-fetching will help (e.g. if a Bundle contains
|
||||
* a bundle of NOP conditional creates for example, the pre-fetching is actually loading
|
||||
* more data than would otherwise be loaded).
|
||||
*
|
||||
* However, for realistic average workloads, this should reduce the number of round trips.
|
||||
*/
|
||||
if (idsToPreFetch.size() > 2) {
|
||||
List<ResourceTable> loadedResourceTableEntries = preFetchIndexes(idsToPreFetch, "forcedId", "myForcedId");
|
||||
|
||||
if (loadedResourceTableEntries.stream().filter(t -> t.isParamsStringPopulated()).count() > 1) {
|
||||
preFetchIndexes(idsToPreFetch, "string", "myParamsString");
|
||||
}
|
||||
if (loadedResourceTableEntries.stream().filter(t -> t.isParamsTokenPopulated()).count() > 1) {
|
||||
preFetchIndexes(idsToPreFetch, "token", "myParamsToken");
|
||||
}
|
||||
if (loadedResourceTableEntries.stream().filter(t -> t.isParamsDatePopulated()).count() > 1) {
|
||||
preFetchIndexes(idsToPreFetch, "date", "myParamsDate");
|
||||
}
|
||||
if (loadedResourceTableEntries.stream().filter(t -> t.isParamsDatePopulated()).count() > 1) {
|
||||
preFetchIndexes(idsToPreFetch, "quantity", "myParamsQuantity");
|
||||
}
|
||||
if (loadedResourceTableEntries.stream().filter(t -> t.isHasLinks()).count() > 1) {
|
||||
preFetchIndexes(idsToPreFetch, "resourceLinks", "myResourceLinks");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return super.doTransactionWriteOperations(theRequest, theActionName, theTransactionDetails, theAllIds, theIdSubstitutions, theIdToPersistedOutcome, theResponse, theOriginalRequestOrder, theEntries, theTransactionStopWatch);
|
||||
}
|
||||
|
||||
private List<ResourceTable> preFetchIndexes(List<Long> ids, String typeDesc, String fieldName) {
|
||||
TypedQuery<ResourceTable> query = myEntityManager.createQuery("FROM ResourceTable r LEFT JOIN FETCH r." + fieldName + " WHERE r.myId IN ( :IDS )", ResourceTable.class);
|
||||
query.setParameter("IDS", ids);
|
||||
List<ResourceTable> indexFetchOutcome = query.getResultList();
|
||||
ourLog.debug("Pre-fetched {} {}} indexes", indexFetchOutcome.size(), typeDesc);
|
||||
return indexFetchOutcome;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void flushSession(Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome) {
|
||||
|
@ -86,5 +355,29 @@ public class TransactionProcessor extends BaseTransactionProcessor {
|
|||
}
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setPartitionSettingsForUnitTest(PartitionSettings thePartitionSettings) {
|
||||
myPartitionSettings = thePartitionSettings;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setIdHelperServiceForUnitTest(IdHelperService theIdHelperService) {
|
||||
myIdHelperService = theIdHelperService;
|
||||
}
|
||||
|
||||
private static class MatchUrlToResolve {
|
||||
|
||||
private final String myRequestUrl;
|
||||
private final SearchParameterMap myMatchUrlSearchMap;
|
||||
private final RuntimeResourceDefinition myResourceDefinition;
|
||||
public boolean myResolved;
|
||||
private Long myHashValue;
|
||||
private Long myHashSystemAndValue;
|
||||
|
||||
public MatchUrlToResolve(String theRequestUrl, SearchParameterMap theMatchUrlSearchMap, RuntimeResourceDefinition theResourceDefinition) {
|
||||
myRequestUrl = theRequestUrl;
|
||||
myMatchUrlSearchMap = theMatchUrlSearchMap;
|
||||
myResourceDefinition = theResourceDefinition;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,8 +26,8 @@ import ca.uhn.fhir.jpa.dao.data.ISubscriptionTableDao;
|
|||
import ca.uhn.fhir.jpa.entity.SubscriptionTable;
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import org.hl7.fhir.dstu3.model.Subscription;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
@ -48,8 +48,8 @@ public class FhirResourceDaoSubscriptionDstu3 extends BaseHapiFhirResourceDao<Su
|
|||
}
|
||||
|
||||
@Override
|
||||
public Long getSubscriptionTablePidForSubscriptionResource(IIdType theId, RequestDetails theRequest) {
|
||||
ResourceTable entity = readEntityLatestVersion(theId, theRequest);
|
||||
public Long getSubscriptionTablePidForSubscriptionResource(IIdType theId, RequestDetails theRequest, TransactionDetails theTransactionDetails) {
|
||||
ResourceTable entity = readEntityLatestVersion(theId, theRequest, theTransactionDetails);
|
||||
SubscriptionTable table = mySubscriptionTableDao.findOneByResourcePid(entity.getId());
|
||||
if (table == null) {
|
||||
return null;
|
||||
|
|
|
@ -30,10 +30,10 @@ import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
|||
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.util.OperationOutcomeUtil;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
|
||||
|
@ -55,6 +55,10 @@ import java.util.concurrent.atomic.AtomicLong;
|
|||
import java.util.stream.Collectors;
|
||||
|
||||
@Service
|
||||
/**
|
||||
* DeleteExpunge is now performed using the {@link ca.uhn.fhir.jpa.delete.DeleteExpungeJobSubmitterImpl} Spring Batch job.
|
||||
*/
|
||||
@Deprecated
|
||||
public class DeleteExpungeService {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeService.class);
|
||||
|
||||
|
|
|
@ -50,11 +50,20 @@ import org.springframework.stereotype.Service;
|
|||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.PersistenceContext;
|
||||
import javax.persistence.PersistenceContextType;
|
||||
import javax.persistence.TypedQuery;
|
||||
import javax.persistence.criteria.CriteriaBuilder;
|
||||
import javax.persistence.criteria.CriteriaQuery;
|
||||
import javax.persistence.criteria.Predicate;
|
||||
import javax.persistence.criteria.Root;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -62,7 +71,6 @@ import java.util.Optional;
|
|||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
/**
|
||||
|
@ -86,6 +94,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
public class IdHelperService {
|
||||
private static final String RESOURCE_PID = "RESOURCE_PID";
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(IdHelperService.class);
|
||||
public static final Predicate[] EMPTY_PREDICATE_ARRAY = new Predicate[0];
|
||||
@Autowired
|
||||
protected IForcedIdDao myForcedIdDao;
|
||||
@Autowired
|
||||
|
@ -167,6 +176,9 @@ public class IdHelperService {
|
|||
return RequestPartitionId.stringifyForKey(theRequestPartitionId) + "/" + theResourceType + "/" + theId;
|
||||
}
|
||||
|
||||
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||
private EntityManager myEntityManager;
|
||||
|
||||
/**
|
||||
* Given a collection of resource IDs (resource type + id), resolves the internal persistent IDs.
|
||||
* <p>
|
||||
|
@ -181,71 +193,80 @@ public class IdHelperService {
|
|||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
List<ResourcePersistentId> retVal = new ArrayList<>();
|
||||
List<ResourcePersistentId> retVal = new ArrayList<>(theIds.size());
|
||||
|
||||
if (myDaoConfig.getResourceClientIdStrategy() != DaoConfig.ClientIdStrategyEnum.ANY) {
|
||||
theIds
|
||||
.stream()
|
||||
.filter(IdHelperService::isValidPid)
|
||||
.map(IIdType::getIdPartAsLong)
|
||||
.map(ResourcePersistentId::new)
|
||||
.forEach(retVal::add);
|
||||
Set<IIdType> idsToCheck = new HashSet<>(theIds.size());
|
||||
for (IIdType nextId : theIds) {
|
||||
if (myDaoConfig.getResourceClientIdStrategy() != DaoConfig.ClientIdStrategyEnum.ANY) {
|
||||
if (nextId.isIdPartValidLong()) {
|
||||
retVal.add(new ResourcePersistentId(nextId.getIdPartAsLong()).setAssociatedResourceId(nextId));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
String key = toForcedIdToPidKey(theRequestPartitionId, nextId.getResourceType(), nextId.getIdPart());
|
||||
ResourcePersistentId cachedId = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key);
|
||||
if (cachedId != null) {
|
||||
retVal.add(cachedId);
|
||||
continue;
|
||||
}
|
||||
|
||||
idsToCheck.add(nextId);
|
||||
}
|
||||
|
||||
ListMultimap<String, String> typeToIds = organizeIdsByResourceType(theIds);
|
||||
if (idsToCheck.size() > 0) {
|
||||
CriteriaBuilder cb = myEntityManager.getCriteriaBuilder();
|
||||
CriteriaQuery<ForcedId> criteriaQuery = cb.createQuery(ForcedId.class);
|
||||
Root<ForcedId> from = criteriaQuery.from(ForcedId.class);
|
||||
|
||||
for (Map.Entry<String, Collection<String>> nextEntry : typeToIds.asMap().entrySet()) {
|
||||
String nextResourceType = nextEntry.getKey();
|
||||
Collection<String> nextIds = nextEntry.getValue();
|
||||
if (isBlank(nextResourceType)) {
|
||||
List<Predicate> predicates = new ArrayList<>(idsToCheck.size());
|
||||
for (IIdType next : idsToCheck) {
|
||||
|
||||
List<Long> views = myForcedIdDao.findByForcedId(nextIds);
|
||||
views.forEach(t -> retVal.add(new ResourcePersistentId(t)));
|
||||
List<Predicate> andPredicates = new ArrayList<>(3);
|
||||
|
||||
} else {
|
||||
|
||||
// String partitionIdStringForKey = RequestPartitionId.stringifyForKey(theRequestPartitionId);
|
||||
for (Iterator<String> idIterator = nextIds.iterator(); idIterator.hasNext(); ) {
|
||||
String nextId = idIterator.next();
|
||||
String key = toForcedIdToPidKey(theRequestPartitionId, nextResourceType, nextId);
|
||||
ResourcePersistentId nextCachedPid = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key);
|
||||
if (nextCachedPid != null) {
|
||||
idIterator.remove();
|
||||
retVal.add(nextCachedPid);
|
||||
}
|
||||
if (isNotBlank(next.getResourceType())) {
|
||||
Predicate typeCriteria = cb.equal(from.get("myResourceType").as(String.class), next.getResourceType());
|
||||
andPredicates.add(typeCriteria);
|
||||
}
|
||||
|
||||
if (nextIds.size() > 0) {
|
||||
Predicate idCriteria = cb.equal(from.get("myForcedId").as(String.class), next.getIdPart());
|
||||
andPredicates.add(idCriteria);
|
||||
|
||||
Collection<Object[]> views;
|
||||
if (theRequestPartitionId.isAllPartitions()) {
|
||||
views = myForcedIdDao.findByTypeAndForcedId(nextResourceType, nextIds);
|
||||
} else {
|
||||
if (theRequestPartitionId.isDefaultPartition()) {
|
||||
views = myForcedIdDao.findByTypeAndForcedIdInPartitionNull(nextResourceType, nextIds);
|
||||
} else if (theRequestPartitionId.hasDefaultPartitionId()) {
|
||||
views = myForcedIdDao.findByTypeAndForcedIdInPartitionIdsOrNullPartition(nextResourceType, nextIds, theRequestPartitionId.getPartitionIds());
|
||||
} else {
|
||||
views = myForcedIdDao.findByTypeAndForcedIdInPartitionIds(nextResourceType, nextIds, theRequestPartitionId.getPartitionIds());
|
||||
}
|
||||
}
|
||||
for (Object[] nextView : views) {
|
||||
String forcedId = (String) nextView[0];
|
||||
Long pid = (Long) nextView[1];
|
||||
ResourcePersistentId persistentId = new ResourcePersistentId(pid);
|
||||
retVal.add(persistentId);
|
||||
|
||||
String key = toForcedIdToPidKey(theRequestPartitionId, nextResourceType, forcedId);
|
||||
myMemoryCacheService.put(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, persistentId);
|
||||
}
|
||||
if (theRequestPartitionId.isDefaultPartition()) {
|
||||
Predicate partitionIdCriteria = cb.isNull(from.get("myPartitionIdValue").as(Integer.class));
|
||||
andPredicates.add(partitionIdCriteria);
|
||||
} else if (!theRequestPartitionId.isAllPartitions()) {
|
||||
Predicate partitionIdCriteria = from.get("myPartitionIdValue").as(Integer.class).in(theRequestPartitionId.getPartitionIds());
|
||||
andPredicates.add(partitionIdCriteria);
|
||||
}
|
||||
|
||||
predicates.add(cb.and(andPredicates.toArray(EMPTY_PREDICATE_ARRAY)));
|
||||
}
|
||||
|
||||
criteriaQuery.where(cb.or(predicates.toArray(EMPTY_PREDICATE_ARRAY)));
|
||||
|
||||
TypedQuery<ForcedId> query = myEntityManager.createQuery(criteriaQuery);
|
||||
List<ForcedId> results = query.getResultList();
|
||||
for (ForcedId nextId : results) {
|
||||
ResourcePersistentId persistentId = new ResourcePersistentId(nextId.getResourceId());
|
||||
populateAssociatedResourceId(nextId.getResourceType(), nextId.getForcedId(), persistentId);
|
||||
retVal.add(persistentId);
|
||||
|
||||
String key = toForcedIdToPidKey(theRequestPartitionId, nextId.getResourceType(), nextId.getForcedId());
|
||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, persistentId);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private void populateAssociatedResourceId(String nextResourceType, String forcedId, ResourcePersistentId persistentId) {
|
||||
IIdType resourceId = myFhirCtx.getVersion().newIdType();
|
||||
resourceId.setValue(nextResourceType + "/" + forcedId);
|
||||
persistentId.setAssociatedResourceId(resourceId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a persistent ID, returns the associated resource ID
|
||||
*/
|
||||
|
@ -501,6 +522,10 @@ public class IdHelperService {
|
|||
*/
|
||||
public void addResolvedPidToForcedId(ResourcePersistentId theResourcePersistentId, @Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, @Nullable String theForcedId) {
|
||||
if (theForcedId != null) {
|
||||
if (theResourcePersistentId.getAssociatedResourceId() == null) {
|
||||
populateAssociatedResourceId(theResourceType, theForcedId, theResourcePersistentId);
|
||||
}
|
||||
|
||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, theResourcePersistentId.getIdAsLong(), Optional.of(theForcedId));
|
||||
String key = toForcedIdToPidKey(theRequestPartitionId, theResourceType, theForcedId);
|
||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, theResourcePersistentId);
|
||||
|
|
|
@ -111,8 +111,8 @@ public class SearchParamWithInlineReferencesExtractor {
|
|||
mySearchParamRegistry = theSearchParamRegistry;
|
||||
}
|
||||
|
||||
public void populateFromResource(ResourceIndexedSearchParams theParams, TransactionDetails theTransactionDetails, ResourceTable theEntity, IBaseResource theResource, ResourceIndexedSearchParams theExistingParams, RequestDetails theRequest) {
|
||||
extractInlineReferences(theResource, theRequest);
|
||||
public void populateFromResource(ResourceIndexedSearchParams theParams, TransactionDetails theTransactionDetails, ResourceTable theEntity, IBaseResource theResource, ResourceIndexedSearchParams theExistingParams, RequestDetails theRequest, boolean theFailOnInvalidReference) {
|
||||
extractInlineReferences(theResource, theTransactionDetails, theRequest);
|
||||
|
||||
RequestPartitionId partitionId;
|
||||
if (myPartitionSettings.isPartitioningEnabled()) {
|
||||
|
@ -121,7 +121,7 @@ public class SearchParamWithInlineReferencesExtractor {
|
|||
partitionId = RequestPartitionId.allPartitions();
|
||||
}
|
||||
|
||||
mySearchParamExtractorService.extractFromResource(partitionId, theRequest, theParams, theEntity, theResource, theTransactionDetails, true);
|
||||
mySearchParamExtractorService.extractFromResource(partitionId, theRequest, theParams, theEntity, theResource, theTransactionDetails, theFailOnInvalidReference);
|
||||
|
||||
Set<Map.Entry<String, RuntimeSearchParam>> activeSearchParams = mySearchParamRegistry.getActiveSearchParams(theEntity.getResourceType()).entrySet();
|
||||
if (myDaoConfig.getIndexMissingFields() == DaoConfig.IndexEnabledEnum.ENABLED) {
|
||||
|
@ -245,7 +245,7 @@ public class SearchParamWithInlineReferencesExtractor {
|
|||
* Handle references within the resource that are match URLs, for example references like "Patient?identifier=foo". These match URLs are resolved and replaced with the ID of the
|
||||
* matching resource.
|
||||
*/
|
||||
public void extractInlineReferences(IBaseResource theResource, RequestDetails theRequest) {
|
||||
public void extractInlineReferences(IBaseResource theResource, TransactionDetails theTransactionDetails, RequestDetails theRequest) {
|
||||
if (!myDaoConfig.isAllowInlineMatchUrlReferences()) {
|
||||
return;
|
||||
}
|
||||
|
@ -277,7 +277,7 @@ public class SearchParamWithInlineReferencesExtractor {
|
|||
}
|
||||
Class<? extends IBaseResource> matchResourceType = matchResourceDef.getImplementingClass();
|
||||
//Attempt to find the target reference before creating a placeholder
|
||||
Set<ResourcePersistentId> matches = myMatchResourceUrlService.processMatchUrl(nextIdText, matchResourceType, theRequest);
|
||||
Set<ResourcePersistentId> matches = myMatchResourceUrlService.processMatchUrl(nextIdText, matchResourceType, theTransactionDetails, theRequest);
|
||||
|
||||
ResourcePersistentId match;
|
||||
if (matches.isEmpty()) {
|
||||
|
|
|
@ -26,8 +26,8 @@ import ca.uhn.fhir.jpa.dao.data.ISubscriptionTableDao;
|
|||
import ca.uhn.fhir.jpa.entity.SubscriptionTable;
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Subscription;
|
||||
|
@ -48,8 +48,8 @@ public class FhirResourceDaoSubscriptionR4 extends BaseHapiFhirResourceDao<Subsc
|
|||
}
|
||||
|
||||
@Override
|
||||
public Long getSubscriptionTablePidForSubscriptionResource(IIdType theId, RequestDetails theRequest) {
|
||||
ResourceTable entity = readEntityLatestVersion(theId, theRequest);
|
||||
public Long getSubscriptionTablePidForSubscriptionResource(IIdType theId, RequestDetails theRequest, TransactionDetails theTransactionDetails) {
|
||||
ResourceTable entity = readEntityLatestVersion(theId, theRequest, theTransactionDetails);
|
||||
SubscriptionTable table = mySubscriptionTableDao.findOneByResourcePid(entity.getId());
|
||||
if (table == null) {
|
||||
return null;
|
||||
|
@ -72,7 +72,7 @@ public class FhirResourceDaoSubscriptionR4 extends BaseHapiFhirResourceDao<Subsc
|
|||
ResourceTable retVal = super.updateEntity(theRequest, theResource, theEntity, theDeletedTimestampOrNull, thePerformIndexing, theUpdateVersion, theTransactionDetails, theForceUpdate, theCreateNewHistoryEntry);
|
||||
|
||||
if (theDeletedTimestampOrNull != null) {
|
||||
Long subscriptionId = getSubscriptionTablePidForSubscriptionResource(theEntity.getIdDt(), theRequest);
|
||||
Long subscriptionId = getSubscriptionTablePidForSubscriptionResource(theEntity.getIdDt(), theRequest, theTransactionDetails);
|
||||
if (subscriptionId != null) {
|
||||
mySubscriptionTableDao.deleteAllForSubscription(retVal);
|
||||
}
|
||||
|
|
|
@ -26,8 +26,8 @@ import ca.uhn.fhir.jpa.dao.data.ISubscriptionTableDao;
|
|||
import ca.uhn.fhir.jpa.entity.SubscriptionTable;
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r5.model.Subscription;
|
||||
|
@ -48,8 +48,8 @@ public class FhirResourceDaoSubscriptionR5 extends BaseHapiFhirResourceDao<Subsc
|
|||
}
|
||||
|
||||
@Override
|
||||
public Long getSubscriptionTablePidForSubscriptionResource(IIdType theId, RequestDetails theRequest) {
|
||||
ResourceTable entity = readEntityLatestVersion(theId, theRequest);
|
||||
public Long getSubscriptionTablePidForSubscriptionResource(IIdType theId, RequestDetails theRequest, TransactionDetails theTransactionDetails) {
|
||||
ResourceTable entity = readEntityLatestVersion(theId, theRequest, theTransactionDetails);
|
||||
SubscriptionTable table = mySubscriptionTableDao.findOneByResourcePid(entity.getId());
|
||||
if (table == null) {
|
||||
return null;
|
||||
|
@ -72,7 +72,7 @@ public class FhirResourceDaoSubscriptionR5 extends BaseHapiFhirResourceDao<Subsc
|
|||
ResourceTable retVal = super.updateEntity(theRequest, theResource, theEntity, theDeletedTimestampOrNull, thePerformIndexing, theUpdateVersion, theTransactionDetails, theForceUpdate, theCreateNewHistoryEntry);
|
||||
|
||||
if (theDeletedTimestampOrNull != null) {
|
||||
Long subscriptionId = getSubscriptionTablePidForSubscriptionResource(theEntity.getIdDt(), theRequest);
|
||||
Long subscriptionId = getSubscriptionTablePidForSubscriptionResource(theEntity.getIdDt(), theRequest, theTransactionDetails);
|
||||
if (subscriptionId != null) {
|
||||
mySubscriptionTableDao.deleteAllForSubscription(retVal);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,100 @@
|
|||
package ca.uhn.fhir.jpa.delete;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
|
||||
import ca.uhn.fhir.jpa.delete.job.DeleteExpungeJobConfig;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import org.springframework.batch.core.Job;
|
||||
import org.springframework.batch.core.JobExecution;
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
import org.springframework.batch.core.JobParametersInvalidException;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
|
||||
import javax.transaction.Transactional;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class DeleteExpungeJobSubmitterImpl implements IDeleteExpungeJobSubmitter {
|
||||
@Autowired
|
||||
private IBatchJobSubmitter myBatchJobSubmitter;
|
||||
@Autowired
|
||||
@Qualifier(BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME)
|
||||
private Job myDeleteExpungeJob;
|
||||
@Autowired
|
||||
FhirContext myFhirContext;
|
||||
@Autowired
|
||||
MatchUrlService myMatchUrlService;
|
||||
@Autowired
|
||||
IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
|
||||
@Autowired
|
||||
DaoConfig myDaoConfig;
|
||||
@Autowired
|
||||
IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||
|
||||
@Override
|
||||
@Transactional(Transactional.TxType.NEVER)
|
||||
public JobExecution submitJob(Integer theBatchSize, RequestDetails theRequest, List<String> theUrlsToDeleteExpunge) throws JobParametersInvalidException {
|
||||
List<RequestPartitionId> requestPartitionIds = requestPartitionIdsFromRequestAndUrls(theRequest, theUrlsToDeleteExpunge);
|
||||
if (!myDaoConfig.canDeleteExpunge()) {
|
||||
throw new ForbiddenOperationException("Delete Expunge not allowed: " + myDaoConfig.cannotDeleteExpungeReason());
|
||||
}
|
||||
|
||||
for (String url : theUrlsToDeleteExpunge) {
|
||||
HookParams params = new HookParams()
|
||||
.add(RequestDetails.class, theRequest)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequest)
|
||||
.add(String.class, url);
|
||||
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRE_DELETE_EXPUNGE, params);
|
||||
}
|
||||
|
||||
JobParameters jobParameters = DeleteExpungeJobConfig.buildJobParameters(theBatchSize, theUrlsToDeleteExpunge, requestPartitionIds);
|
||||
return myBatchJobSubmitter.runJob(myDeleteExpungeJob, jobParameters);
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will throw an exception if the user is not allowed to add the requested resource type on the partition determined by the request
|
||||
*/
|
||||
private List<RequestPartitionId> requestPartitionIdsFromRequestAndUrls(RequestDetails theRequest, List<String> theUrlsToDeleteExpunge) {
|
||||
List<RequestPartitionId> retval = new ArrayList<>();
|
||||
for (String url : theUrlsToDeleteExpunge) {
|
||||
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(url);
|
||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequest, resourceSearch.getResourceName());
|
||||
retval.add(requestPartitionId);
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,139 @@
|
|||
package ca.uhn.fhir.jpa.delete.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
|
||||
import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
|
||||
import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
|
||||
import ca.uhn.fhir.jpa.delete.model.RequestListJson;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.springframework.batch.core.Job;
|
||||
import org.springframework.batch.core.JobParameter;
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
import org.springframework.batch.core.JobParametersValidator;
|
||||
import org.springframework.batch.core.Step;
|
||||
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||
import org.springframework.batch.core.listener.ExecutionContextPromotionListener;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Lazy;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME;
|
||||
|
||||
/**
|
||||
* Spring batch Job configuration file. Contains all necessary plumbing to run a
|
||||
* Delete Expunge job.
|
||||
*/
|
||||
@Configuration
|
||||
public class DeleteExpungeJobConfig {
|
||||
public static final String DELETE_EXPUNGE_URL_LIST_STEP_NAME = "delete-expunge-url-list-step";
|
||||
private static final int MINUTES_IN_FUTURE_TO_DELETE_FROM = 1;
|
||||
|
||||
@Autowired
|
||||
private StepBuilderFactory myStepBuilderFactory;
|
||||
@Autowired
|
||||
private JobBuilderFactory myJobBuilderFactory;
|
||||
|
||||
@Bean(name = DELETE_EXPUNGE_JOB_NAME)
|
||||
@Lazy
|
||||
public Job deleteExpungeJob(FhirContext theFhirContext, MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) throws Exception {
|
||||
return myJobBuilderFactory.get(DELETE_EXPUNGE_JOB_NAME)
|
||||
.validator(deleteExpungeJobParameterValidator(theFhirContext, theMatchUrlService, theDaoRegistry))
|
||||
.start(deleteExpungeUrlListStep())
|
||||
.build();
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static JobParameters buildJobParameters(Integer theBatchSize, List<String> theUrlList, List<RequestPartitionId> theRequestPartitionIds) {
|
||||
Map<String, JobParameter> map = new HashMap<>();
|
||||
RequestListJson requestListJson = RequestListJson.fromUrlStringsAndRequestPartitionIds(theUrlList, theRequestPartitionIds);
|
||||
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_REQUEST_LIST, new JobParameter(requestListJson.toString()));
|
||||
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), MINUTES_IN_FUTURE_TO_DELETE_FROM)));
|
||||
if (theBatchSize != null) {
|
||||
map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue()));
|
||||
}
|
||||
JobParameters parameters = new JobParameters(map);
|
||||
return parameters;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public Step deleteExpungeUrlListStep() {
|
||||
return myStepBuilderFactory.get(DELETE_EXPUNGE_URL_LIST_STEP_NAME)
|
||||
.<List<Long>, List<String>>chunk(1)
|
||||
.reader(reverseCronologicalBatchResourcePidReader())
|
||||
.processor(deleteExpungeProcessor())
|
||||
.writer(sqlExecutorWriter())
|
||||
.listener(pidCountRecorderListener())
|
||||
.listener(promotionListener())
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public PidReaderCounterListener pidCountRecorderListener() {
|
||||
return new PidReaderCounterListener();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public ReverseCronologicalBatchResourcePidReader reverseCronologicalBatchResourcePidReader() {
|
||||
return new ReverseCronologicalBatchResourcePidReader();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public DeleteExpungeProcessor deleteExpungeProcessor() {
|
||||
return new DeleteExpungeProcessor();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public SqlExecutorWriter sqlExecutorWriter() {
|
||||
return new SqlExecutorWriter();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public JobParametersValidator deleteExpungeJobParameterValidator(FhirContext theFhirContext, MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) {
|
||||
return new DeleteExpungeJobParameterValidator(theMatchUrlService, theDaoRegistry);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ExecutionContextPromotionListener promotionListener() {
|
||||
ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener();
|
||||
|
||||
listener.setKeys(new String[]{SqlExecutorWriter.ENTITY_TOTAL_UPDATED_OR_DELETED, PidReaderCounterListener.RESOURCE_TOTAL_PROCESSED});
|
||||
|
||||
return listener;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,67 @@
|
|||
package ca.uhn.fhir.jpa.delete.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.delete.model.PartitionedUrl;
|
||||
import ca.uhn.fhir.jpa.delete.model.RequestListJson;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
import org.springframework.batch.core.JobParametersInvalidException;
|
||||
import org.springframework.batch.core.JobParametersValidator;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader.JOB_PARAM_REQUEST_LIST;
|
||||
|
||||
/**
|
||||
* This class will prevent a job from running any of the provided URLs are not valid on this server.
|
||||
*/
|
||||
public class DeleteExpungeJobParameterValidator implements JobParametersValidator {
|
||||
private final MatchUrlService myMatchUrlService;
|
||||
private final DaoRegistry myDaoRegistry;
|
||||
|
||||
public DeleteExpungeJobParameterValidator(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) {
|
||||
myMatchUrlService = theMatchUrlService;
|
||||
myDaoRegistry = theDaoRegistry;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validate(JobParameters theJobParameters) throws JobParametersInvalidException {
|
||||
if (theJobParameters == null) {
|
||||
throw new JobParametersInvalidException("This job requires Parameters: [urlList]");
|
||||
}
|
||||
|
||||
RequestListJson requestListJson = RequestListJson.fromJson(theJobParameters.getString(JOB_PARAM_REQUEST_LIST));
|
||||
for (PartitionedUrl partitionedUrl : requestListJson.getPartitionedUrls()) {
|
||||
String url = partitionedUrl.getUrl();
|
||||
try {
|
||||
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(url);
|
||||
String resourceName = resourceSearch.getResourceName();
|
||||
if (!myDaoRegistry.isResourceTypeSupported(resourceName)) {
|
||||
throw new JobParametersInvalidException("The resource type " + resourceName + " is not supported on this server.");
|
||||
}
|
||||
} catch (UnsupportedOperationException e) {
|
||||
throw new JobParametersInvalidException("Failed to parse " + ProviderConstants.OPERATION_DELETE_EXPUNGE + " " + JOB_PARAM_REQUEST_LIST + " item " + url + ": " + e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,123 @@
|
|||
package ca.uhn.fhir.jpa.delete.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.PartitionRunner;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ResourceForeignKey;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ResourceTableFKProvider;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.item.ItemProcessor;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.domain.SliceImpl;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Input: list of pids of resources to be deleted and expunged
|
||||
* Output: list of sql statements to be executed
|
||||
*/
|
||||
public class DeleteExpungeProcessor implements ItemProcessor<List<Long>, List<String>> {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeProcessor.class);
|
||||
|
||||
@Autowired
|
||||
ResourceTableFKProvider myResourceTableFKProvider;
|
||||
@Autowired
|
||||
DaoConfig myDaoConfig;
|
||||
@Autowired
|
||||
IdHelperService myIdHelper;
|
||||
@Autowired
|
||||
IResourceLinkDao myResourceLinkDao;
|
||||
@Autowired
|
||||
PartitionRunner myPartitionRunner;
|
||||
|
||||
@Override
|
||||
public List<String> process(List<Long> thePids) throws Exception {
|
||||
validateOkToDeleteAndExpunge(new SliceImpl<>(thePids));
|
||||
|
||||
List<String> retval = new ArrayList<>();
|
||||
|
||||
String pidListString = thePids.toString().replace("[", "(").replace("]", ")");
|
||||
List<ResourceForeignKey> resourceForeignKeys = myResourceTableFKProvider.getResourceForeignKeys();
|
||||
|
||||
for (ResourceForeignKey resourceForeignKey : resourceForeignKeys) {
|
||||
retval.add(deleteRecordsByColumnSql(pidListString, resourceForeignKey));
|
||||
}
|
||||
|
||||
// Lastly we need to delete records from the resource table all of these other tables link to:
|
||||
ResourceForeignKey resourceTablePk = new ResourceForeignKey("HFJ_RESOURCE", "RES_ID");
|
||||
retval.add(deleteRecordsByColumnSql(pidListString, resourceTablePk));
|
||||
return retval;
|
||||
}
|
||||
|
||||
public void validateOkToDeleteAndExpunge(Slice<Long> thePids) {
|
||||
if (!myDaoConfig.isEnforceReferentialIntegrityOnDelete()) {
|
||||
ourLog.info("Referential integrity on delete disabled. Skipping referential integrity check.");
|
||||
return;
|
||||
}
|
||||
|
||||
List<ResourceLink> conflictResourceLinks = Collections.synchronizedList(new ArrayList<>());
|
||||
myPartitionRunner.runInPartitionedThreads(thePids, someTargetPids -> findResourceLinksWithTargetPidIn(thePids.getContent(), someTargetPids, conflictResourceLinks));
|
||||
|
||||
if (conflictResourceLinks.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
ResourceLink firstConflict = conflictResourceLinks.get(0);
|
||||
|
||||
//NB-GGG: We previously instantiated these ID values from firstConflict.getSourceResource().getIdDt(), but in a situation where we
|
||||
//actually had to run delete conflict checks in multiple partitions, the executor service starts its own sessions on a per thread basis, and by the time
|
||||
//we arrive here, those sessions are closed. So instead, we resolve them from PIDs, which are eagerly loaded.
|
||||
String sourceResourceId = myIdHelper.resourceIdFromPidOrThrowException(firstConflict.getSourceResourcePid()).toVersionless().getValue();
|
||||
String targetResourceId = myIdHelper.resourceIdFromPidOrThrowException(firstConflict.getTargetResourcePid()).toVersionless().getValue();
|
||||
|
||||
throw new InvalidRequestException("DELETE with _expunge=true failed. Unable to delete " +
|
||||
targetResourceId + " because " + sourceResourceId + " refers to it via the path " + firstConflict.getSourcePath());
|
||||
}
|
||||
|
||||
public void findResourceLinksWithTargetPidIn(List<Long> theAllTargetPids, List<Long> theSomeTargetPids, List<ResourceLink> theConflictResourceLinks) {
|
||||
// We only need to find one conflict, so if we found one already in an earlier partition run, we can skip the rest of the searches
|
||||
if (theConflictResourceLinks.isEmpty()) {
|
||||
List<ResourceLink> conflictResourceLinks = myResourceLinkDao.findWithTargetPidIn(theSomeTargetPids).stream()
|
||||
// Filter out resource links for which we are planning to delete the source.
|
||||
// theAllTargetPids contains a list of all the pids we are planning to delete. So we only want
|
||||
// to consider a link to be a conflict if the source of that link is not in theAllTargetPids.
|
||||
.filter(link -> !theAllTargetPids.contains(link.getSourceResourcePid()))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
// We do this in two steps to avoid lock contention on this synchronized list
|
||||
theConflictResourceLinks.addAll(conflictResourceLinks);
|
||||
}
|
||||
}
|
||||
|
||||
private String deleteRecordsByColumnSql(String thePidListString, ResourceForeignKey theResourceForeignKey) {
|
||||
return "DELETE FROM " + theResourceForeignKey.table + " WHERE " + theResourceForeignKey.key + " IN " + thePidListString;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,57 @@
|
|||
package ca.uhn.fhir.jpa.delete.model;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.model.api.IModelJson;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
public class PartitionedUrl implements IModelJson {
|
||||
@JsonProperty("url")
|
||||
private String myUrl;
|
||||
|
||||
@JsonProperty("requestPartitionId")
|
||||
private RequestPartitionId myRequestPartitionId;
|
||||
|
||||
public PartitionedUrl() {
|
||||
}
|
||||
|
||||
public PartitionedUrl(String theUrl, RequestPartitionId theRequestPartitionId) {
|
||||
myUrl = theUrl;
|
||||
myRequestPartitionId = theRequestPartitionId;
|
||||
}
|
||||
|
||||
public String getUrl() {
|
||||
return myUrl;
|
||||
}
|
||||
|
||||
public void setUrl(String theUrl) {
|
||||
myUrl = theUrl;
|
||||
}
|
||||
|
||||
public RequestPartitionId getRequestPartitionId() {
|
||||
return myRequestPartitionId;
|
||||
}
|
||||
|
||||
public void setRequestPartitionId(RequestPartitionId theRequestPartitionId) {
|
||||
myRequestPartitionId = theRequestPartitionId;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,79 @@
|
|||
package ca.uhn.fhir.jpa.delete.model;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.model.api.IModelJson;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Serialize a list of URLs and partition ids so Spring Batch can store it as a String
|
||||
*/
|
||||
public class RequestListJson implements IModelJson {
|
||||
static final ObjectMapper ourObjectMapper = new ObjectMapper();
|
||||
|
||||
@JsonProperty("partitionedUrls")
|
||||
private List<PartitionedUrl> myPartitionedUrls;
|
||||
|
||||
public static RequestListJson fromUrlStringsAndRequestPartitionIds(List<String> theUrls, List<RequestPartitionId> theRequestPartitionIds) {
|
||||
assert theUrls.size() == theRequestPartitionIds.size();
|
||||
|
||||
RequestListJson retval = new RequestListJson();
|
||||
List<PartitionedUrl> partitionedUrls = new ArrayList<>();
|
||||
for (int i = 0; i < theUrls.size(); ++i) {
|
||||
partitionedUrls.add(new PartitionedUrl(theUrls.get(i), theRequestPartitionIds.get(i)));
|
||||
}
|
||||
retval.setPartitionedUrls(partitionedUrls);
|
||||
return retval;
|
||||
}
|
||||
|
||||
public static RequestListJson fromJson(String theJson) {
|
||||
try {
|
||||
return ourObjectMapper.readValue(theJson, RequestListJson.class);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new InternalErrorException("Failed to decode " + RequestListJson.class);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
try {
|
||||
return ourObjectMapper.writeValueAsString(this);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new InvalidRequestException("Failed to encode " + RequestListJson.class, e);
|
||||
}
|
||||
}
|
||||
|
||||
public List<PartitionedUrl> getPartitionedUrls() {
|
||||
return myPartitionedUrls;
|
||||
}
|
||||
|
||||
public void setPartitionedUrls(List<PartitionedUrl> thePartitionedUrls) {
|
||||
myPartitionedUrls = thePartitionedUrls;
|
||||
}
|
||||
}
|
|
@ -26,6 +26,8 @@ import org.hibernate.search.mapper.pojo.bridge.PropertyBridge;
|
|||
import org.hibernate.search.mapper.pojo.bridge.binding.PropertyBindingContext;
|
||||
import org.hibernate.search.mapper.pojo.bridge.mapping.programmatic.PropertyBinder;
|
||||
import org.hibernate.search.mapper.pojo.bridge.runtime.PropertyBridgeWriteContext;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
|
@ -38,6 +40,7 @@ public class TermConceptPropertyBinder implements PropertyBinder {
|
|||
|
||||
|
||||
public static final String CONCEPT_FIELD_PROPERTY_PREFIX = "PROP";
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(TermConceptPropertyBinder.class);
|
||||
|
||||
@Override
|
||||
public void bind(PropertyBindingContext thePropertyBindingContext) {
|
||||
|
@ -65,10 +68,10 @@ public class TermConceptPropertyBinder implements PropertyBinder {
|
|||
if (properties != null) {
|
||||
for (TermConceptProperty next : properties) {
|
||||
theDocument.addValue(CONCEPT_FIELD_PROPERTY_PREFIX + next.getKey(), next.getValue());
|
||||
System.out.println("Adding Prop: " + CONCEPT_FIELD_PROPERTY_PREFIX + next.getKey() + " -- " + next.getValue());
|
||||
ourLog.trace("Adding Prop: {}{} -- {}", CONCEPT_FIELD_PROPERTY_PREFIX, next.getKey(), next.getValue());
|
||||
if (next.getType() == TermConceptPropertyTypeEnum.CODING && isNotBlank(next.getDisplay())) {
|
||||
theDocument.addValue(CONCEPT_FIELD_PROPERTY_PREFIX + next.getKey(), next.getDisplay());
|
||||
System.out.println("Adding multivalue Prop: " + CONCEPT_FIELD_PROPERTY_PREFIX + next.getKey() + " -- " + next.getDisplay());
|
||||
theDocument.addValue(CONCEPT_FIELD_PROPERTY_PREFIX + next.getKey(), next.getDisplay());
|
||||
ourLog.trace("Adding multivalue Prop: {}{} -- {}", CONCEPT_FIELD_PROPERTY_PREFIX, next.getKey(), next.getDisplay());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.partition;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
|
@ -109,7 +110,7 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
|
|||
}
|
||||
|
||||
if (theRequest instanceof SystemRequestDetails) {
|
||||
requestPartitionId = getSystemRequestPartitionId(theRequest, nonPartitionableResource);
|
||||
requestPartitionId = getSystemRequestPartitionId((SystemRequestDetails) theRequest, nonPartitionableResource);
|
||||
// Interceptor call: STORAGE_PARTITION_IDENTIFY_READ
|
||||
} else if (hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_READ, myInterceptorBroadcaster, theRequest)) {
|
||||
HookParams params = new HookParams()
|
||||
|
@ -122,22 +123,18 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
|
|||
|
||||
validateRequestPartitionNotNull(requestPartitionId, Pointcut.STORAGE_PARTITION_IDENTIFY_READ);
|
||||
|
||||
return validateNormalizeAndNotifyHooksForRead(requestPartitionId, theRequest);
|
||||
return validateNormalizeAndNotifyHooksForRead(requestPartitionId, theRequest, theResourceType);
|
||||
}
|
||||
|
||||
return RequestPartitionId.allPartitions();
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* For system requests, read partition from tenant ID if present, otherwise set to DEFAULT. If the resource they are attempting to partition
|
||||
* is non-partitionable scream in the logs and set the partition to DEFAULT.
|
||||
*
|
||||
* @param theRequest
|
||||
* @param theNonPartitionableResource
|
||||
* @return
|
||||
*/
|
||||
private RequestPartitionId getSystemRequestPartitionId(RequestDetails theRequest, boolean theNonPartitionableResource) {
|
||||
private RequestPartitionId getSystemRequestPartitionId(SystemRequestDetails theRequest, boolean theNonPartitionableResource) {
|
||||
RequestPartitionId requestPartitionId;
|
||||
requestPartitionId = getSystemRequestPartitionId(theRequest);
|
||||
if (theNonPartitionableResource && !requestPartitionId.isDefaultPartition()) {
|
||||
|
@ -148,7 +145,7 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
|
|||
|
||||
/**
|
||||
* Determine the partition for a System Call (defined by the fact that the request is of type SystemRequestDetails)
|
||||
*
|
||||
* <p>
|
||||
* 1. If the tenant ID is set to the constant for all partitions, return all partitions
|
||||
* 2. If there is a tenant ID set in the request, use it.
|
||||
* 3. Otherwise, return the Default Partition.
|
||||
|
@ -157,7 +154,10 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
|
|||
* @return the {@link RequestPartitionId} to be used for this request.
|
||||
*/
|
||||
@Nonnull
|
||||
private RequestPartitionId getSystemRequestPartitionId(@Nonnull RequestDetails theRequest) {
|
||||
private RequestPartitionId getSystemRequestPartitionId(@Nonnull SystemRequestDetails theRequest) {
|
||||
if (theRequest.getRequestPartitionId() != null) {
|
||||
return theRequest.getRequestPartitionId();
|
||||
}
|
||||
if (theRequest.getTenantId() != null) {
|
||||
if (theRequest.getTenantId().equals(ALL_PARTITIONS_NAME)) {
|
||||
return RequestPartitionId.allPartitions();
|
||||
|
@ -186,7 +186,7 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
|
|||
}
|
||||
|
||||
if (theRequest instanceof SystemRequestDetails) {
|
||||
requestPartitionId = getSystemRequestPartitionId(theRequest, nonPartitionableResource);
|
||||
requestPartitionId = getSystemRequestPartitionId((SystemRequestDetails) theRequest, nonPartitionableResource);
|
||||
} else {
|
||||
//This is an external Request (e.g. ServletRequestDetails) so we want to figure out the partition via interceptor.
|
||||
HookParams params = new HookParams()// Interceptor call: STORAGE_PARTITION_IDENTIFY_CREATE
|
||||
|
@ -204,7 +204,7 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
|
|||
String resourceName = myFhirContext.getResourceType(theResource);
|
||||
validateSinglePartitionForCreate(requestPartitionId, resourceName, Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE);
|
||||
|
||||
return validateNormalizeAndNotifyHooksForRead(requestPartitionId, theRequest);
|
||||
return validateNormalizeAndNotifyHooksForRead(requestPartitionId, theRequest, theResourceType);
|
||||
}
|
||||
|
||||
return RequestPartitionId.allPartitions();
|
||||
|
@ -218,7 +218,7 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
|
|||
* If the partition has both, they are validated to ensure that they correspond.
|
||||
*/
|
||||
@Nonnull
|
||||
private RequestPartitionId validateNormalizeAndNotifyHooksForRead(@Nonnull RequestPartitionId theRequestPartitionId, RequestDetails theRequest) {
|
||||
private RequestPartitionId validateNormalizeAndNotifyHooksForRead(@Nonnull RequestPartitionId theRequestPartitionId, RequestDetails theRequest, String theResourceType) {
|
||||
RequestPartitionId retVal = theRequestPartitionId;
|
||||
|
||||
if (retVal.getPartitionNames() != null) {
|
||||
|
@ -229,11 +229,15 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
|
|||
|
||||
// Note: It's still possible that the partition only has a date but no name/id
|
||||
|
||||
HookParams params = new HookParams()
|
||||
.add(RequestPartitionId.class, retVal)
|
||||
.add(RequestDetails.class, theRequest)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequest);
|
||||
doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PARTITION_SELECTED, params);
|
||||
if (myInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_PARTITION_SELECTED)) {
|
||||
RuntimeResourceDefinition runtimeResourceDefinition = myFhirContext.getResourceDefinition(theResourceType);
|
||||
HookParams params = new HookParams()
|
||||
.add(RequestPartitionId.class, retVal)
|
||||
.add(RequestDetails.class, theRequest)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequest)
|
||||
.add(RuntimeResourceDefinition.class, runtimeResourceDefinition);
|
||||
doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PARTITION_SELECTED, params);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
|
||||
|
|
|
@ -26,6 +26,7 @@ import ca.uhn.fhir.interceptor.api.HookParams;
|
|||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.rest.api.EncodingEnum;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.ETagSupportEnum;
|
||||
|
@ -58,10 +59,23 @@ public class SystemRequestDetails extends RequestDetails {
|
|||
|
||||
private ListMultimap<String, String> myHeaders;
|
||||
|
||||
/**
|
||||
* If a SystemRequestDetails has a RequestPartitionId, it will take precedence over the tenantId
|
||||
*/
|
||||
private RequestPartitionId myRequestPartitionId;
|
||||
|
||||
public SystemRequestDetails(IInterceptorBroadcaster theInterceptorBroadcaster) {
|
||||
super(theInterceptorBroadcaster);
|
||||
}
|
||||
|
||||
public RequestPartitionId getRequestPartitionId() {
|
||||
return myRequestPartitionId;
|
||||
}
|
||||
|
||||
public void setRequestPartitionId(RequestPartitionId theRequestPartitionId) {
|
||||
myRequestPartitionId = theRequestPartitionId;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected byte[] getByteStreamRequestContents() {
|
||||
return new byte[0];
|
||||
|
|
|
@ -49,6 +49,7 @@ import ca.uhn.fhir.rest.api.server.RequestDetails;
|
|||
import ca.uhn.fhir.rest.param.DateRangeParam;
|
||||
import ca.uhn.fhir.rest.server.IResourceProvider;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import ca.uhn.fhir.util.CoverageIgnore;
|
||||
import ca.uhn.fhir.util.ParametersUtil;
|
||||
import org.hl7.fhir.instance.model.api.IBaseMetaType;
|
||||
|
@ -61,9 +62,9 @@ import org.springframework.beans.factory.annotation.Required;
|
|||
import javax.servlet.http.HttpServletRequest;
|
||||
import java.util.Date;
|
||||
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_META;
|
||||
import static ca.uhn.fhir.jpa.model.util.JpaConstants.OPERATION_META_ADD;
|
||||
import static ca.uhn.fhir.jpa.model.util.JpaConstants.OPERATION_META_DELETE;
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_META;
|
||||
|
||||
public abstract class BaseJpaResourceProvider<T extends IBaseResource> extends BaseJpaProvider implements IResourceProvider {
|
||||
|
||||
|
@ -188,25 +189,25 @@ public abstract class BaseJpaResourceProvider<T extends IBaseResource> extends B
|
|||
}
|
||||
}
|
||||
|
||||
@Operation(name = JpaConstants.OPERATION_EXPUNGE, idempotent = false, returnParameters = {
|
||||
@Operation(name = ProviderConstants.OPERATION_EXPUNGE, idempotent = false, returnParameters = {
|
||||
@OperationParam(name = JpaConstants.OPERATION_EXPUNGE_OUT_PARAM_EXPUNGE_COUNT, typeName = "integer")
|
||||
})
|
||||
public IBaseParameters expunge(
|
||||
@IdParam IIdType theIdParam,
|
||||
@OperationParam(name = JpaConstants.OPERATION_EXPUNGE_PARAM_LIMIT, typeName = "integer") IPrimitiveType<Integer> theLimit,
|
||||
@OperationParam(name = JpaConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_DELETED_RESOURCES, typeName = "boolean") IPrimitiveType<Boolean> theExpungeDeletedResources,
|
||||
@OperationParam(name = JpaConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_PREVIOUS_VERSIONS, typeName = "boolean") IPrimitiveType<Boolean> theExpungeOldVersions,
|
||||
@OperationParam(name = ProviderConstants.OPERATION_EXPUNGE_PARAM_LIMIT, typeName = "integer") IPrimitiveType<Integer> theLimit,
|
||||
@OperationParam(name = ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_DELETED_RESOURCES, typeName = "boolean") IPrimitiveType<Boolean> theExpungeDeletedResources,
|
||||
@OperationParam(name = ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_PREVIOUS_VERSIONS, typeName = "boolean") IPrimitiveType<Boolean> theExpungeOldVersions,
|
||||
RequestDetails theRequest) {
|
||||
return doExpunge(theIdParam, theLimit, theExpungeDeletedResources, theExpungeOldVersions, null, theRequest);
|
||||
}
|
||||
|
||||
@Operation(name = JpaConstants.OPERATION_EXPUNGE, idempotent = false, returnParameters = {
|
||||
@Operation(name = ProviderConstants.OPERATION_EXPUNGE, idempotent = false, returnParameters = {
|
||||
@OperationParam(name = JpaConstants.OPERATION_EXPUNGE_OUT_PARAM_EXPUNGE_COUNT, typeName = "integer")
|
||||
})
|
||||
public IBaseParameters expunge(
|
||||
@OperationParam(name = JpaConstants.OPERATION_EXPUNGE_PARAM_LIMIT, typeName = "integer") IPrimitiveType<Integer> theLimit,
|
||||
@OperationParam(name = JpaConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_DELETED_RESOURCES, typeName = "boolean") IPrimitiveType<Boolean> theExpungeDeletedResources,
|
||||
@OperationParam(name = JpaConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_PREVIOUS_VERSIONS, typeName = "boolean") IPrimitiveType<Boolean> theExpungeOldVersions,
|
||||
@OperationParam(name = ProviderConstants.OPERATION_EXPUNGE_PARAM_LIMIT, typeName = "integer") IPrimitiveType<Integer> theLimit,
|
||||
@OperationParam(name = ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_DELETED_RESOURCES, typeName = "boolean") IPrimitiveType<Boolean> theExpungeDeletedResources,
|
||||
@OperationParam(name = ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_PREVIOUS_VERSIONS, typeName = "boolean") IPrimitiveType<Boolean> theExpungeOldVersions,
|
||||
RequestDetails theRequest) {
|
||||
return doExpunge(null, theLimit, theExpungeDeletedResources, theExpungeOldVersions, null, theRequest);
|
||||
}
|
||||
|
|
|
@ -33,6 +33,7 @@ import ca.uhn.fhir.rest.annotation.Since;
|
|||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.param.DateRangeParam;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
@ -58,14 +59,14 @@ public class BaseJpaSystemProvider<T, MT> extends BaseJpaProvider implements IJp
|
|||
return myResourceReindexingSvc;
|
||||
}
|
||||
|
||||
@Operation(name = JpaConstants.OPERATION_EXPUNGE, idempotent = false, returnParameters = {
|
||||
@Operation(name = ProviderConstants.OPERATION_EXPUNGE, idempotent = false, returnParameters = {
|
||||
@OperationParam(name = JpaConstants.OPERATION_EXPUNGE_OUT_PARAM_EXPUNGE_COUNT, typeName = "integer")
|
||||
})
|
||||
public IBaseParameters expunge(
|
||||
@OperationParam(name = JpaConstants.OPERATION_EXPUNGE_PARAM_LIMIT, typeName = "integer") IPrimitiveType<Integer> theLimit,
|
||||
@OperationParam(name = JpaConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_DELETED_RESOURCES, typeName = "boolean") IPrimitiveType<Boolean> theExpungeDeletedResources,
|
||||
@OperationParam(name = JpaConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_PREVIOUS_VERSIONS, typeName = "boolean") IPrimitiveType<Boolean> theExpungeOldVersions,
|
||||
@OperationParam(name = JpaConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_EVERYTHING, typeName = "boolean") IPrimitiveType<Boolean> theExpungeEverything,
|
||||
@OperationParam(name = ProviderConstants.OPERATION_EXPUNGE_PARAM_LIMIT, typeName = "integer") IPrimitiveType<Integer> theLimit,
|
||||
@OperationParam(name = ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_DELETED_RESOURCES, typeName = "boolean") IPrimitiveType<Boolean> theExpungeDeletedResources,
|
||||
@OperationParam(name = ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_PREVIOUS_VERSIONS, typeName = "boolean") IPrimitiveType<Boolean> theExpungeOldVersions,
|
||||
@OperationParam(name = ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_EVERYTHING, typeName = "boolean") IPrimitiveType<Boolean> theExpungeEverything,
|
||||
RequestDetails theRequestDetails
|
||||
) {
|
||||
ExpungeOptions options = createExpungeOptions(theLimit, theExpungeDeletedResources, theExpungeOldVersions, theExpungeEverything);
|
||||
|
|
|
@ -796,7 +796,7 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
// Account for _include=[resourceType]:*
|
||||
String wantResourceType = null;
|
||||
if (!matchAll) {
|
||||
if (nextInclude.getParamName().equals("*")) {
|
||||
if ("*".equals(nextInclude.getParamName())) {
|
||||
wantResourceType = nextInclude.getParamType();
|
||||
matchAll = true;
|
||||
}
|
||||
|
|
|
@ -54,7 +54,8 @@ public class SearchParamPresenceSvcImpl implements ISearchParamPresenceSvc {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AddRemoveCount updatePresence(ResourceTable theResource, Map<String, Boolean> theParamNameToPresence) {
|
||||
public AddRemoveCount
|
||||
updatePresence(ResourceTable theResource, Map<String, Boolean> theParamNameToPresence) {
|
||||
AddRemoveCount retVal = new AddRemoveCount();
|
||||
if (myDaoConfig.getIndexMissingFields() == DaoConfig.IndexEnabledEnum.DISABLED) {
|
||||
return retVal;
|
||||
|
|
|
@ -132,6 +132,7 @@ import org.springframework.transaction.interceptor.NoRollbackRuleAttribute;
|
|||
import org.springframework.transaction.interceptor.RuleBasedTransactionAttribute;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
import org.springframework.util.comparator.Comparators;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
|
@ -938,7 +939,15 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
}
|
||||
});
|
||||
|
||||
PredicateFinalStep expansionStep = buildExpansionPredicate(theIncludeOrExclude, predicate);
|
||||
List<String> codes = theIncludeOrExclude
|
||||
.getConcept()
|
||||
.stream()
|
||||
.filter(Objects::nonNull)
|
||||
.map(ValueSet.ConceptReferenceComponent::getCode)
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
PredicateFinalStep expansionStep = buildExpansionPredicate(codes, predicate);
|
||||
final PredicateFinalStep finishedQuery;
|
||||
if (expansionStep == null) {
|
||||
finishedQuery = step;
|
||||
|
@ -973,9 +982,6 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
}
|
||||
}
|
||||
|
||||
// jpaQuery.setMaxResults(maxResultsPerBatch);
|
||||
// jpaQuery.setFirstResult(theQueryIndex * maxResultsPerBatch);
|
||||
|
||||
ourLog.debug("Beginning batch expansion for {} with max results per batch: {}", (theAdd ? "inclusion" : "exclusion"), maxResultsPerBatch);
|
||||
|
||||
StopWatch swForBatch = new StopWatch();
|
||||
|
@ -984,9 +990,22 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
SearchQuery<TermConcept> termConceptsQuery = searchSession.search(TermConcept.class)
|
||||
.where(f -> finishedQuery).toQuery();
|
||||
|
||||
System.out.println("About to query:" + termConceptsQuery.queryString());
|
||||
ourLog.trace("About to query: {}", termConceptsQuery.queryString());
|
||||
List<TermConcept> termConcepts = termConceptsQuery.fetchHits(theQueryIndex * maxResultsPerBatch, maxResultsPerBatch);
|
||||
|
||||
// If the include section had multiple codes, return the codes in the same order
|
||||
if (codes.size() > 1) {
|
||||
termConcepts = new ArrayList<>(termConcepts);
|
||||
Map<String, Integer> codeToIndex = new HashMap<>(codes.size());
|
||||
for (int i = 0; i < codes.size(); i++) {
|
||||
codeToIndex.put(codes.get(i), i);
|
||||
}
|
||||
termConcepts.sort(((o1, o2) -> {
|
||||
Integer idx1 = codeToIndex.get(o1.getCode());
|
||||
Integer idx2 = codeToIndex.get(o2.getCode());
|
||||
return Comparators.nullsHigh().compare(idx1, idx2);
|
||||
}));
|
||||
}
|
||||
|
||||
int resultsInBatch = termConcepts.size();
|
||||
int firstResult = theQueryIndex * maxResultsPerBatch;// TODO GGG HS we lose the ability to check the index of the first result, so just best-guessing it here.
|
||||
|
@ -1027,17 +1046,13 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
/**
|
||||
* Helper method which builds a predicate for the expansion
|
||||
*/
|
||||
private PredicateFinalStep buildExpansionPredicate(ValueSet.ConceptSetComponent theTheIncludeOrExclude, SearchPredicateFactory thePredicate) {
|
||||
private PredicateFinalStep buildExpansionPredicate(List<String> theCodes, SearchPredicateFactory thePredicate) {
|
||||
PredicateFinalStep expansionStep;
|
||||
/*
|
||||
* Include/Exclude Concepts
|
||||
*/
|
||||
List<Term> codes = theTheIncludeOrExclude
|
||||
.getConcept()
|
||||
List<Term> codes = theCodes
|
||||
.stream()
|
||||
.filter(Objects::nonNull)
|
||||
.map(ValueSet.ConceptReferenceComponent::getCode)
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.map(t -> new Term("myCode", t))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
|
|
|
@ -378,6 +378,9 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe
|
|||
b.append(new InstantType(new Date(theQuery.getQueryTimestamp())).getValueAsString());
|
||||
b.append(" took ").append(StopWatch.formatMillis(theQuery.getElapsedTime()));
|
||||
b.append(" on Thread: ").append(theQuery.getThreadName());
|
||||
if (theQuery.getSize() > 1) {
|
||||
b.append("\nExecution Count: ").append(theQuery.getSize()).append(" (parameters shown are for first execution)");
|
||||
}
|
||||
b.append("\nSQL:\n").append(formattedSql);
|
||||
if (theQuery.getStackTrace() != null) {
|
||||
b.append("\nStack:\n ");
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.hibernate.engine.jdbc.internal.BasicFormatterImpl;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.trim;
|
||||
|
||||
|
@ -93,11 +92,7 @@ public class SqlQuery {
|
|||
}
|
||||
}
|
||||
|
||||
if (mySize > 1) {
|
||||
retVal += "\nsize: " + mySize + "\n";
|
||||
}
|
||||
return trim(retVal);
|
||||
|
||||
}
|
||||
|
||||
public StackTraceElement[] getStackTrace() {
|
||||
|
|
|
@ -0,0 +1,145 @@
|
|||
package ca.uhn.fhir.jpa.batch.reader;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.delete.model.PartitionedUrl;
|
||||
import ca.uhn.fhir.jpa.delete.model.RequestListJson;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.github.jsonldjava.shaded.com.google.common.collect.Lists;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Calendar;
|
||||
import java.util.Collections;
|
||||
import java.util.GregorianCalendar;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class ReverseCronologicalBatchResourcePidReaderTest {
|
||||
static FhirContext ourFhirContext = FhirContext.forR4Cached();
|
||||
static String URL_A = "a";
|
||||
static String URL_B = "b";
|
||||
static String URL_C = "c";
|
||||
static Set<ResourcePersistentId> emptySet = Collections.emptySet();
|
||||
static RequestPartitionId partId = RequestPartitionId.defaultPartition();
|
||||
|
||||
Patient myPatient;
|
||||
|
||||
@Mock
|
||||
MatchUrlService myMatchUrlService;
|
||||
@Mock
|
||||
DaoRegistry myDaoRegistry;
|
||||
@Mock
|
||||
IFhirResourceDao<Patient> myPatientDao;
|
||||
|
||||
@InjectMocks
|
||||
ReverseCronologicalBatchResourcePidReader myReader = new ReverseCronologicalBatchResourcePidReader();
|
||||
|
||||
@BeforeEach
|
||||
public void before() throws JsonProcessingException {
|
||||
RequestListJson requestListJson = new RequestListJson();
|
||||
requestListJson.setPartitionedUrls(Lists.newArrayList(new PartitionedUrl(URL_A, partId), new PartitionedUrl(URL_B, partId), new PartitionedUrl(URL_C, partId)));
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
String requestListJsonString = mapper.writeValueAsString(requestListJson);
|
||||
myReader.setRequestListJson(requestListJsonString);
|
||||
|
||||
SearchParameterMap map = new SearchParameterMap();
|
||||
RuntimeResourceDefinition patientResDef = ourFhirContext.getResourceDefinition("Patient");
|
||||
when(myMatchUrlService.getResourceSearch(URL_A)).thenReturn(new ResourceSearch(patientResDef, map));
|
||||
when(myMatchUrlService.getResourceSearch(URL_B)).thenReturn(new ResourceSearch(patientResDef, map));
|
||||
when(myMatchUrlService.getResourceSearch(URL_C)).thenReturn(new ResourceSearch(patientResDef, map));
|
||||
when(myDaoRegistry.getResourceDao("Patient")).thenReturn(myPatientDao);
|
||||
myPatient = new Patient();
|
||||
when(myPatientDao.readByPid(any())).thenReturn(myPatient);
|
||||
Calendar cal = new GregorianCalendar(2021, 1, 1);
|
||||
myPatient.getMeta().setLastUpdated(cal.getTime());
|
||||
}
|
||||
|
||||
private Set<ResourcePersistentId> buildPidSet(Integer... thePids) {
|
||||
return Arrays.stream(thePids)
|
||||
.map(Long::new)
|
||||
.map(ResourcePersistentId::new)
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test3x1() throws Exception {
|
||||
when(myPatientDao.searchForIds(any(), any()))
|
||||
.thenReturn(buildPidSet(1, 2, 3))
|
||||
.thenReturn(emptySet)
|
||||
.thenReturn(buildPidSet(4, 5, 6))
|
||||
.thenReturn(emptySet)
|
||||
.thenReturn(buildPidSet(7, 8))
|
||||
.thenReturn(emptySet);
|
||||
|
||||
assertListEquals(myReader.read(), 1, 2, 3);
|
||||
assertListEquals(myReader.read(), 4, 5, 6);
|
||||
assertListEquals(myReader.read(), 7, 8);
|
||||
assertNull(myReader.read());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void test1x3start() throws Exception {
|
||||
when(myPatientDao.searchForIds(any(), any()))
|
||||
.thenReturn(buildPidSet(1, 2, 3))
|
||||
.thenReturn(buildPidSet(4, 5, 6))
|
||||
.thenReturn(buildPidSet(7, 8))
|
||||
.thenReturn(emptySet)
|
||||
.thenReturn(emptySet)
|
||||
.thenReturn(emptySet);
|
||||
|
||||
assertListEquals(myReader.read(), 1, 2, 3);
|
||||
assertListEquals(myReader.read(), 4, 5, 6);
|
||||
assertListEquals(myReader.read(), 7, 8);
|
||||
assertNull(myReader.read());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test1x3end() throws Exception {
|
||||
when(myPatientDao.searchForIds(any(), any()))
|
||||
.thenReturn(emptySet)
|
||||
.thenReturn(emptySet)
|
||||
.thenReturn(buildPidSet(1, 2, 3))
|
||||
.thenReturn(buildPidSet(4, 5, 6))
|
||||
.thenReturn(buildPidSet(7, 8))
|
||||
.thenReturn(emptySet);
|
||||
|
||||
assertListEquals(myReader.read(), 1, 2, 3);
|
||||
assertListEquals(myReader.read(), 4, 5, 6);
|
||||
assertListEquals(myReader.read(), 7, 8);
|
||||
assertNull(myReader.read());
|
||||
}
|
||||
|
||||
private void assertListEquals(List<Long> theList, Integer... theValues) {
|
||||
assertThat(theList, hasSize(theValues.length));
|
||||
for (int i = 0; i < theList.size(); ++i) {
|
||||
assertEquals(theList.get(i), Long.valueOf(theValues[i]));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -1,75 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.bulk;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.core.BatchStatus;
|
||||
import org.springframework.batch.core.JobExecution;
|
||||
import org.springframework.batch.core.JobInstance;
|
||||
import org.springframework.batch.core.explore.JobExplorer;
|
||||
import org.springframework.batch.core.repository.dao.JobExecutionDao;
|
||||
import org.springframework.batch.core.repository.dao.JobInstanceDao;
|
||||
import org.springframework.batch.core.repository.dao.MapJobExecutionDao;
|
||||
import org.springframework.batch.core.repository.dao.MapJobInstanceDao;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.awaitility.Awaitility.await;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
public class BaseBatchJobR4Test extends BaseJpaR4Test {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BaseBatchJobR4Test.class);
|
||||
@Autowired
|
||||
private JobExplorer myJobExplorer;
|
||||
// @Autowired
|
||||
// private JobExecutionDao myMapJobExecutionDao;
|
||||
// @Autowired
|
||||
// private JobInstanceDao myMapJobInstanceDao;
|
||||
//
|
||||
// @AfterEach
|
||||
// public void after() {
|
||||
// ((MapJobExecutionDao)myMapJobExecutionDao).clear();
|
||||
// ((MapJobInstanceDao)myMapJobInstanceDao).clear();
|
||||
// }
|
||||
|
||||
protected List<JobExecution> awaitAllBulkJobCompletions(String... theJobNames) {
|
||||
assert theJobNames.length > 0;
|
||||
|
||||
List<JobInstance> bulkExport = new ArrayList<>();
|
||||
for (String nextName : theJobNames) {
|
||||
bulkExport.addAll(myJobExplorer.findJobInstancesByJobName(nextName, 0, 100));
|
||||
}
|
||||
if (bulkExport.isEmpty()) {
|
||||
List<String> wantNames = Arrays.asList(theJobNames);
|
||||
List<String> haveNames = myJobExplorer.getJobNames();
|
||||
fail("There are no jobs running - Want names " + wantNames + " and have names " + haveNames);
|
||||
}
|
||||
List<JobExecution> bulkExportExecutions = bulkExport.stream().flatMap(jobInstance -> myJobExplorer.getJobExecutions(jobInstance).stream()).collect(Collectors.toList());
|
||||
awaitJobCompletions(bulkExportExecutions);
|
||||
|
||||
// Return the final state
|
||||
bulkExportExecutions = bulkExport.stream().flatMap(jobInstance -> myJobExplorer.getJobExecutions(jobInstance).stream()).collect(Collectors.toList());
|
||||
return bulkExportExecutions;
|
||||
}
|
||||
|
||||
protected void awaitJobCompletions(Collection<JobExecution> theJobs) {
|
||||
theJobs.forEach(jobExecution -> awaitJobCompletion(jobExecution));
|
||||
}
|
||||
|
||||
protected void awaitJobCompletion(JobExecution theJobExecution) {
|
||||
await().atMost(120, TimeUnit.SECONDS).until(() -> {
|
||||
JobExecution jobExecution = myJobExplorer.getJobExecution(theJobExecution.getId());
|
||||
ourLog.info("JobExecution {} currently has status: {}- Failures if any: {}", theJobExecution.getId(), jobExecution.getStatus(), jobExecution.getFailureExceptions());
|
||||
return jobExecution.getStatus() == BatchStatus.COMPLETED || jobExecution.getStatus() == BatchStatus.FAILED;
|
||||
});
|
||||
}
|
||||
|
||||
}
|
|
@ -15,6 +15,7 @@ import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
|||
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
|
||||
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportCollectionEntity;
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
|
||||
|
@ -26,6 +27,7 @@ import ca.uhn.fhir.parser.IParser;
|
|||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.test.utilities.BatchJobHelper;
|
||||
import ca.uhn.fhir.util.HapiExtensions;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import com.google.common.base.Charsets;
|
||||
|
@ -80,7 +82,7 @@ import static org.mockito.Mockito.mock;
|
|||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
||||
public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test {
|
||||
public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
||||
|
||||
public static final String TEST_FILTER = "Patient?gender=female";
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BulkDataExportSvcImplR4Test.class);
|
||||
|
@ -94,6 +96,8 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test {
|
|||
private IBulkDataExportSvc myBulkDataExportSvc;
|
||||
@Autowired
|
||||
private IBatchJobSubmitter myBatchJobSubmitter;
|
||||
@Autowired
|
||||
private BatchJobHelper myBatchJobHelper;
|
||||
|
||||
@Autowired
|
||||
@Qualifier(BatchJobsConfig.BULK_EXPORT_JOB_NAME)
|
||||
|
@ -321,10 +325,11 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test {
|
|||
}
|
||||
|
||||
private void awaitAllBulkJobCompletions() {
|
||||
awaitAllBulkJobCompletions(
|
||||
myBatchJobHelper.awaitAllBulkJobCompletions(
|
||||
BatchJobsConfig.BULK_EXPORT_JOB_NAME,
|
||||
BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME,
|
||||
BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME
|
||||
BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME,
|
||||
BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -589,7 +594,7 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test {
|
|||
|
||||
JobExecution jobExecution = myBatchJobSubmitter.runJob(myBulkJob, paramBuilder.toJobParameters());
|
||||
|
||||
awaitJobCompletion(jobExecution);
|
||||
myBatchJobHelper.awaitJobCompletion(jobExecution);
|
||||
String jobUUID = (String) jobExecution.getExecutionContext().get("jobUUID");
|
||||
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobUUID);
|
||||
|
||||
|
@ -615,7 +620,7 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test {
|
|||
|
||||
JobExecution jobExecution = myBatchJobSubmitter.runJob(myBulkJob, paramBuilder.toJobParameters());
|
||||
|
||||
awaitJobCompletion(jobExecution);
|
||||
myBatchJobHelper.awaitJobCompletion(jobExecution);
|
||||
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
|
||||
|
||||
assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
|
||||
|
@ -733,7 +738,7 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test {
|
|||
|
||||
JobExecution jobExecution = myBatchJobSubmitter.runJob(myPatientBulkJob, paramBuilder.toJobParameters());
|
||||
|
||||
awaitJobCompletion(jobExecution);
|
||||
myBatchJobHelper.awaitJobCompletion(jobExecution);
|
||||
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
|
||||
|
||||
assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
|
||||
|
|
|
@ -5,7 +5,7 @@ import ca.uhn.fhir.interceptor.api.HookParams;
|
|||
import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
|
||||
import ca.uhn.fhir.interceptor.api.Interceptor;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.bulk.BaseBatchJobR4Test;
|
||||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
|
||||
|
@ -13,12 +13,14 @@ import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
|
|||
import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkImportJobDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkImportJobFileDao;
|
||||
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
|
||||
import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.test.utilities.BatchJobHelper;
|
||||
import ca.uhn.fhir.test.utilities.ITestDataBuilder;
|
||||
import ca.uhn.fhir.util.BundleBuilder;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
@ -54,7 +56,7 @@ import static org.mockito.Mockito.mock;
|
|||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
||||
public class BulkDataImportR4Test extends BaseBatchJobR4Test implements ITestDataBuilder {
|
||||
public class BulkDataImportR4Test extends BaseJpaR4Test implements ITestDataBuilder {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BulkDataImportR4Test.class);
|
||||
@Autowired
|
||||
|
@ -67,6 +69,8 @@ public class BulkDataImportR4Test extends BaseBatchJobR4Test implements ITestDat
|
|||
private JobExplorer myJobExplorer;
|
||||
@Autowired
|
||||
private JobRegistry myJobRegistry;
|
||||
@Autowired
|
||||
private BatchJobHelper myBatchJobHelper;
|
||||
|
||||
@AfterEach
|
||||
public void after() {
|
||||
|
@ -90,7 +94,7 @@ public class BulkDataImportR4Test extends BaseBatchJobR4Test implements ITestDat
|
|||
boolean activateJobOutcome = mySvc.activateNextReadyJob();
|
||||
assertTrue(activateJobOutcome);
|
||||
|
||||
List<JobExecution> executions = awaitAllBulkJobCompletions();
|
||||
List<JobExecution> executions = awaitAllBulkImportJobCompletion();
|
||||
assertEquals("testFlow_TransactionRows", executions.get(0).getJobParameters().getString(BulkExportJobConfig.JOB_DESCRIPTION));
|
||||
|
||||
runInTransaction(() -> {
|
||||
|
@ -127,7 +131,7 @@ public class BulkDataImportR4Test extends BaseBatchJobR4Test implements ITestDat
|
|||
boolean activateJobOutcome = mySvc.activateNextReadyJob();
|
||||
assertTrue(activateJobOutcome);
|
||||
|
||||
awaitAllBulkJobCompletions();
|
||||
awaitAllBulkImportJobCompletion();
|
||||
|
||||
ArgumentCaptor<HookParams> paramsCaptor = ArgumentCaptor.forClass(HookParams.class);
|
||||
verify(interceptor, times(50)).invoke(any(), paramsCaptor.capture());
|
||||
|
@ -207,8 +211,8 @@ public class BulkDataImportR4Test extends BaseBatchJobR4Test implements ITestDat
|
|||
assertEquals(true, job.isRestartable());
|
||||
}
|
||||
|
||||
protected List<JobExecution> awaitAllBulkJobCompletions() {
|
||||
return awaitAllBulkJobCompletions(BULK_IMPORT_JOB_NAME);
|
||||
protected List<JobExecution> awaitAllBulkImportJobCompletion() {
|
||||
return myBatchJobHelper.awaitAllBulkJobCompletions(BatchJobsConfig.BULK_IMPORT_JOB_NAME);
|
||||
}
|
||||
|
||||
@Interceptor
|
||||
|
@ -223,7 +227,5 @@ public class BulkDataImportR4Test extends BaseBatchJobR4Test implements ITestDat
|
|||
throw new InternalErrorException(ERROR_MESSAGE);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -8,6 +8,8 @@ import ca.uhn.fhir.jpa.subscription.channel.config.SubscriptionChannelConfig;
|
|||
import ca.uhn.fhir.jpa.subscription.match.config.SubscriptionProcessorConfig;
|
||||
import ca.uhn.fhir.jpa.subscription.match.deliver.resthook.SubscriptionDeliveringRestHookSubscriber;
|
||||
import ca.uhn.fhir.jpa.subscription.submit.config.SubscriptionSubmitterConfig;
|
||||
import ca.uhn.fhir.test.utilities.BatchJobHelper;
|
||||
import org.springframework.batch.core.explore.JobExplorer;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
|
@ -62,4 +64,9 @@ public class TestJPAConfig {
|
|||
public SubscriptionDeliveringRestHookSubscriber stoppableSubscriptionDeliveringRestHookSubscriber() {
|
||||
return new StoppableSubscriptionDeliveringRestHookSubscriber();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public BatchJobHelper batchJobHelper(JobExplorer theJobExplorer) {
|
||||
return new BatchJobHelper(theJobExplorer);
|
||||
}
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue