New batch framework / Bulk Import (#3387)
* Work on new batch framework * Work on new batch framework * Work on new batch framework * Work on batch * Compile working * Work on bulk import * Adjust import * Work * Work on batch * Bump version * WOrk on new batch processes * Work on bath * Bump to PRE4 * Build fixes * CLeanup * Small tweak * Add exception code * Test fixes * Test fixes * Test fix * Additional synchronization * Add license headers * Test fixes * Test fixes * Add changelogs * Address PG * Test fix * Test fix * Test fixes * Review notes * Work on tests * Test fixes * Test fix * Work on tests * Tets fix * Test fixes * Test fixes * Add missing exception codes * Test fix * Test fixes * More test fixing * License headers * Test fix * Add new test logging * Work on tests * Test fixes * Test fix * Resolve fixme * Try to avoid test failure * Add import command * Work on storage * Fix error codes * Fixes * License header * Build fix * Build fixes * Fix dep
This commit is contained in:
parent
65776bbab3
commit
b7d1ae217d
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.0.0-PRE3-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE3-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE3-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -47,6 +47,8 @@ public enum FhirVersionEnum {
|
|||
|
||||
R5("org.hl7.fhir.r5.hapi.ctx.FhirR5", null, true, new R5Version());
|
||||
|
||||
// If you add new constants, add to the various methods below too!
|
||||
|
||||
private final FhirVersionEnum myEquivalent;
|
||||
private final boolean myIsRi;
|
||||
private final String myVersionClass;
|
||||
|
@ -147,6 +149,29 @@ public enum FhirVersionEnum {
|
|||
String provideVersion();
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a FHIR model object type, determine which version of FHIR it is for
|
||||
*/
|
||||
public static FhirVersionEnum determineVersionForType(Class<?> theFhirType) {
|
||||
switch (theFhirType.getName()) {
|
||||
case "ca.uhn.fhir.model.api.BaseElement":
|
||||
return DSTU2;
|
||||
case "org.hl7.fhir.dstu2.model.Base":
|
||||
return DSTU2_HL7ORG;
|
||||
case "org.hl7.fhir.dstu3.model.Base":
|
||||
return DSTU3;
|
||||
case "org.hl7.fhir.r4.model.Base":
|
||||
return R4;
|
||||
case "org.hl7.fhir.r5.model.Base":
|
||||
return R5;
|
||||
case "java.lang.Object":
|
||||
return null;
|
||||
default:
|
||||
return determineVersionForType(theFhirType.getSuperclass());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static class Version implements IVersionProvider {
|
||||
|
||||
private String myVersion;
|
||||
|
|
|
@ -25,7 +25,7 @@ public final class Msg {
|
|||
|
||||
/**
|
||||
* IMPORTANT: Please update the following comment after you add a new code
|
||||
* Last code value: 2038
|
||||
* Last code value: 2065
|
||||
*/
|
||||
|
||||
private Msg() {}
|
||||
|
|
|
@ -0,0 +1,41 @@
|
|||
package ca.uhn.fhir.model.api.annotation;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Core Library
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
|
||||
/**
|
||||
* This annotation should be added to any {@link ca.uhn.fhir.model.api.IModelJson}
|
||||
* model fields
|
||||
* that contain a password or other credentials. Data in such a field should not be
|
||||
* serialized back to users.
|
||||
*
|
||||
* Note that there is not yet any global automatic processing for this annotation.
|
||||
* Perhaps in the future.
|
||||
*/
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(ElementType.FIELD)
|
||||
public @interface PasswordField {
|
||||
}
|
|
@ -22,6 +22,7 @@ package ca.uhn.fhir.util;
|
|||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.model.api.IModelJson;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
|
@ -32,6 +33,7 @@ import javax.annotation.Nonnull;
|
|||
import java.io.IOException;
|
||||
import java.io.StringWriter;
|
||||
import java.io.Writer;
|
||||
import java.util.List;
|
||||
|
||||
public class JsonUtil {
|
||||
|
||||
|
@ -51,28 +53,45 @@ public class JsonUtil {
|
|||
/**
|
||||
* Parse JSON
|
||||
*/
|
||||
public static <T> T deserialize(@Nonnull String theInput, @Nonnull Class<T> theType) throws IOException {
|
||||
return ourMapperPrettyPrint.readerFor(theType).readValue(theInput);
|
||||
public static <T> T deserialize(@Nonnull String theInput, @Nonnull Class<T> theType) {
|
||||
try {
|
||||
return ourMapperPrettyPrint.readerFor(theType).readValue(theInput);
|
||||
} catch (IOException e) {
|
||||
// Should not happen
|
||||
throw new InternalErrorException(Msg.code(2060) + e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse JSON
|
||||
*/
|
||||
public static <T> List<T> deserializeList(@Nonnull String theInput, @Nonnull Class<T> theType) throws IOException {
|
||||
return ourMapperPrettyPrint.readerForListOf(theType).readValue(theInput);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode JSON
|
||||
*/
|
||||
public static String serialize(@Nonnull Object theInput) throws IOException {
|
||||
public static String serialize(@Nonnull Object theInput) {
|
||||
return serialize(theInput, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode JSON
|
||||
*/
|
||||
public static String serialize(@Nonnull Object theInput, boolean thePrettyPrint) throws IOException {
|
||||
StringWriter sw = new StringWriter();
|
||||
if (thePrettyPrint) {
|
||||
ourMapperPrettyPrint.writeValue(sw, theInput);
|
||||
} else {
|
||||
ourMapperNonPrettyPrint.writeValue(sw, theInput);
|
||||
public static String serialize(@Nonnull Object theInput, boolean thePrettyPrint) {
|
||||
try {
|
||||
StringWriter sw = new StringWriter();
|
||||
if (thePrettyPrint) {
|
||||
ourMapperPrettyPrint.writeValue(sw, theInput);
|
||||
} else {
|
||||
ourMapperNonPrettyPrint.writeValue(sw, theInput);
|
||||
}
|
||||
return sw.toString();
|
||||
} catch (IOException e) {
|
||||
// Should not happen
|
||||
throw new InternalErrorException(Msg.code(2061) + e);
|
||||
}
|
||||
return sw.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -56,6 +56,11 @@ import static org.apache.commons.lang3.StringUtils.isBlank;
|
|||
*/
|
||||
public class ParametersUtil {
|
||||
|
||||
public static Optional<String> getNamedParameterValueAsString(FhirContext theCtx, IBaseParameters theParameters, String theParameterName) {
|
||||
Function<IPrimitiveType<?>, String> mapper = t -> defaultIfBlank(t.getValueAsString(), null);
|
||||
return extractNamedParameters(theCtx, theParameters, theParameterName, mapper).stream().findFirst();
|
||||
}
|
||||
|
||||
public static List<String> getNamedParameterValuesAsString(FhirContext theCtx, IBaseParameters theParameters, String theParameterName) {
|
||||
Function<IPrimitiveType<?>, String> mapper = t -> defaultIfBlank(t.getValueAsString(), null);
|
||||
return extractNamedParameters(theCtx, theParameters, theParameterName, mapper);
|
||||
|
@ -70,6 +75,10 @@ public class ParametersUtil {
|
|||
return getNamedParameterValuesAsInteger(theCtx, theParameters, theParameterName).stream().findFirst();
|
||||
}
|
||||
|
||||
public static Optional<IBase> getNamedParameter(FhirContext theCtx, IBaseResource theParameters, String theParameterName) {
|
||||
return getNamedParameters(theCtx, theParameters, theParameterName).stream().findFirst();
|
||||
}
|
||||
|
||||
public static List<IBase> getNamedParameters(FhirContext theCtx, IBaseResource theParameters, String theParameterName) {
|
||||
Validate.notNull(theParameters, "theParameters must not be null");
|
||||
RuntimeResourceDefinition resDef = theCtx.getResourceDefinition(theParameters.getClass());
|
||||
|
@ -311,6 +320,13 @@ public class ParametersUtil {
|
|||
addPart(theContext, theParameter, theName, value);
|
||||
}
|
||||
|
||||
public static void addPartUrl(FhirContext theContext, IBase theParameter, String theName, String theCode) {
|
||||
IPrimitiveType<String> value = (IPrimitiveType<String>) theContext.getElementDefinition("url").newInstance();
|
||||
value.setValue(theCode);
|
||||
|
||||
addPart(theContext, theParameter, theName, value);
|
||||
}
|
||||
|
||||
public static void addPartBoolean(FhirContext theContext, IBase theParameter, String theName, Boolean theValue) {
|
||||
addPart(theContext, theParameter, theName, theContext.getPrimitiveBoolean(theValue));
|
||||
}
|
||||
|
|
|
@ -179,7 +179,7 @@ public class StopWatch {
|
|||
*/
|
||||
public String formatThroughput(long theNumOperations, TimeUnit theUnit) {
|
||||
double throughput = getThroughput(theNumOperations, theUnit);
|
||||
return new DecimalFormat("0.0").format(throughput);
|
||||
return formatThroughput(throughput);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -191,7 +191,18 @@ public class StopWatch {
|
|||
*/
|
||||
public String getEstimatedTimeRemaining(double theCompleteToDate, double theTotal) {
|
||||
double millis = getMillis();
|
||||
long millisRemaining = (long) (((theTotal / theCompleteToDate) * millis) - (millis));
|
||||
return formatEstimatedTimeRemaining(theCompleteToDate, theTotal, millis);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given an amount of something completed so far, and a total amount, calculates how long it will take for something to complete
|
||||
*
|
||||
* @param theCompleteToDate The amount so far
|
||||
* @param theTotal The total (must be higher than theCompleteToDate
|
||||
* @return A formatted amount of time
|
||||
*/
|
||||
public static String formatEstimatedTimeRemaining(double theCompleteToDate, double theTotal, double millis) {
|
||||
long millisRemaining = (long) (((theTotal / theCompleteToDate) * millis) - millis);
|
||||
return formatMillis(millisRemaining);
|
||||
}
|
||||
|
||||
|
@ -234,21 +245,8 @@ public class StopWatch {
|
|||
* @see #formatThroughput(long, TimeUnit)
|
||||
*/
|
||||
public double getThroughput(long theNumOperations, TimeUnit theUnit) {
|
||||
if (theNumOperations <= 0) {
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
long millisElapsed = Math.max(1, getMillis());
|
||||
long periodMillis = theUnit.toMillis(1);
|
||||
|
||||
double denominator = ((double) millisElapsed) / ((double) periodMillis);
|
||||
|
||||
double throughput = (double) theNumOperations / denominator;
|
||||
if (throughput > theNumOperations) {
|
||||
throughput = theNumOperations;
|
||||
}
|
||||
|
||||
return throughput;
|
||||
long millis = getMillis();
|
||||
return getThroughput(theNumOperations, millis, theUnit);
|
||||
}
|
||||
|
||||
public void restart() {
|
||||
|
@ -284,44 +282,35 @@ public class StopWatch {
|
|||
return formatMillis(getMillis());
|
||||
}
|
||||
|
||||
private static class TaskTiming {
|
||||
private long myStart;
|
||||
private long myEnd;
|
||||
private String myTaskName;
|
||||
/**
|
||||
* Format a throughput number (output does not include units)
|
||||
*/
|
||||
public static String formatThroughput(double throughput) {
|
||||
return new DecimalFormat("0.0").format(throughput);
|
||||
}
|
||||
|
||||
public long getEnd() {
|
||||
if (myEnd == 0) {
|
||||
return now();
|
||||
}
|
||||
return myEnd;
|
||||
/**
|
||||
* Calculate throughput
|
||||
*
|
||||
* @param theNumOperations The number of operations completed
|
||||
* @param theMillisElapsed The time elapsed
|
||||
* @param theUnit The unit for the throughput
|
||||
*/
|
||||
public static double getThroughput(long theNumOperations, long theMillisElapsed, TimeUnit theUnit) {
|
||||
if (theNumOperations <= 0) {
|
||||
return 0.0f;
|
||||
}
|
||||
long millisElapsed = Math.max(1, theMillisElapsed);
|
||||
long periodMillis = theUnit.toMillis(1);
|
||||
|
||||
double denominator = ((double) millisElapsed) / ((double) periodMillis);
|
||||
|
||||
double throughput = (double) theNumOperations / denominator;
|
||||
if (throughput > theNumOperations) {
|
||||
throughput = theNumOperations;
|
||||
}
|
||||
|
||||
public TaskTiming setEnd(long theEnd) {
|
||||
myEnd = theEnd;
|
||||
return this;
|
||||
}
|
||||
|
||||
public long getMillis() {
|
||||
return getEnd() - getStart();
|
||||
}
|
||||
|
||||
public long getStart() {
|
||||
return myStart;
|
||||
}
|
||||
|
||||
public TaskTiming setStart(long theStart) {
|
||||
myStart = theStart;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getTaskName() {
|
||||
return myTaskName;
|
||||
}
|
||||
|
||||
public TaskTiming setTaskName(String theTaskName) {
|
||||
myTaskName = theTaskName;
|
||||
return this;
|
||||
}
|
||||
return throughput;
|
||||
}
|
||||
|
||||
private static NumberFormat getDayFormat() {
|
||||
|
@ -429,4 +418,44 @@ public class StopWatch {
|
|||
ourNowForUnitTest = theNowForUnitTest;
|
||||
}
|
||||
|
||||
private static class TaskTiming {
|
||||
private long myStart;
|
||||
private long myEnd;
|
||||
private String myTaskName;
|
||||
|
||||
public long getEnd() {
|
||||
if (myEnd == 0) {
|
||||
return now();
|
||||
}
|
||||
return myEnd;
|
||||
}
|
||||
|
||||
public TaskTiming setEnd(long theEnd) {
|
||||
myEnd = theEnd;
|
||||
return this;
|
||||
}
|
||||
|
||||
public long getMillis() {
|
||||
return getEnd() - getStart();
|
||||
}
|
||||
|
||||
public long getStart() {
|
||||
return myStart;
|
||||
}
|
||||
|
||||
public TaskTiming setStart(long theStart) {
|
||||
myStart = theStart;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getTaskName() {
|
||||
return myTaskName;
|
||||
}
|
||||
|
||||
public TaskTiming setTaskName(String theTaskName) {
|
||||
myTaskName = theTaskName;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE3-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -3,14 +3,14 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>6.0.0-PRE3-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE4-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE3-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.0.0-PRE3-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -38,13 +38,6 @@
|
|||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-deploy-plugin</artifactId>
|
||||
<configuration>
|
||||
<skip>true</skip>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE3-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -187,6 +187,7 @@ public abstract class BaseApp {
|
|||
commands.add(new ImportCsvToConceptMapCommand());
|
||||
commands.add(new HapiFlywayMigrateDatabaseCommand());
|
||||
commands.add(new CreatePackageCommand());
|
||||
commands.add(new BulkImportCommand());
|
||||
return commands;
|
||||
}
|
||||
|
||||
|
|
|
@ -46,6 +46,7 @@ import org.apache.http.client.methods.HttpGet;
|
|||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.util.Base64Utils;
|
||||
|
@ -343,7 +344,18 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
|
|||
}
|
||||
}
|
||||
|
||||
public Integer getAndParseNonNegativeIntegerParam(CommandLine theCommandLine, String theName) throws ParseException {
|
||||
int minimum = 0;
|
||||
return doGetAndParseIntegerParam(theCommandLine, theName, minimum);
|
||||
}
|
||||
|
||||
public Integer getAndParsePositiveIntegerParam(CommandLine theCommandLine, String theName) throws ParseException {
|
||||
int minimum = 1;
|
||||
return doGetAndParseIntegerParam(theCommandLine, theName, minimum);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private Integer doGetAndParseIntegerParam(CommandLine theCommandLine, String theName, int minimum) throws ParseException {
|
||||
String value = theCommandLine.getOptionValue(theName);
|
||||
value = trim(value);
|
||||
if (isBlank(value)) {
|
||||
|
@ -352,12 +364,12 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
|
|||
|
||||
try {
|
||||
int valueInt = Integer.parseInt(value);
|
||||
if (valueInt < 1) {
|
||||
throw new ParseException(Msg.code(1576) + "Value for argument " + theName + " must be a positive integer, got: " + value);
|
||||
if (valueInt < minimum) {
|
||||
throw new ParseException(Msg.code(1576) + "Value for argument " + theName + " must be an integer >= " + minimum + ", got: " + value);
|
||||
}
|
||||
return valueInt;
|
||||
} catch (NumberFormatException e) {
|
||||
throw new ParseException(Msg.code(1577) + "Value for argument " + theName + " must be a positive integer, got: " + value);
|
||||
throw new ParseException(Msg.code(1577) + "Value for argument " + theName + " must be an integer >= " + minimum + ", got: " + value);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,227 @@
|
|||
package ca.uhn.fhir.cli;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Command Line Client - API
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.batch2.jobs.imprt.BulkImportFileServlet;
|
||||
import ca.uhn.fhir.batch2.jobs.imprt.BulkDataImportProvider;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.client.api.IGenericClient;
|
||||
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
|
||||
import ca.uhn.fhir.util.ParametersUtil;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.LineIterator;
|
||||
import org.eclipse.jetty.server.Connector;
|
||||
import org.eclipse.jetty.server.Server;
|
||||
import org.eclipse.jetty.server.ServerConnector;
|
||||
import org.eclipse.jetty.servlet.ServletContextHandler;
|
||||
import org.eclipse.jetty.servlet.ServletHolder;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.File;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
public class BulkImportCommand extends BaseCommand {
|
||||
|
||||
public static final String BULK_IMPORT = "bulk-import";
|
||||
public static final String SOURCE_BASE = "source-base";
|
||||
public static final String SOURCE_DIRECTORY = "source-directory";
|
||||
public static final String TARGET_BASE = "target-base";
|
||||
public static final String PORT = "port";
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BulkImportCommand.class);
|
||||
private static volatile boolean ourEndNow;
|
||||
private BulkImportFileServlet myServlet;
|
||||
private Server myServer;
|
||||
private Integer myPort;
|
||||
|
||||
@Override
|
||||
public String getCommandDescription() {
|
||||
return "Initiates a bulk import against a FHIR server using the $import " +
|
||||
"operation, and creates a local HTTP server to serve the contents. " +
|
||||
"This command does not currently support HTTPS so it is only intended " +
|
||||
"for testing scenarios.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getCommandName() {
|
||||
return BULK_IMPORT;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options options = new Options();
|
||||
addFhirVersionOption(options);
|
||||
addRequiredOption(options, null, PORT, PORT, "The port to listen on. If set to 0, an available free port will be selected.");
|
||||
addOptionalOption(options, null, SOURCE_BASE, "base url", "The URL to advertise as the base URL for accessing the files (i.e. this is the address that this command will declare that it is listening on). If not present, the server will default to \"http://localhost:[port]\" which will only work if the server is on the same host.");
|
||||
addRequiredOption(options, null, SOURCE_DIRECTORY, "directory", "The source directory. This directory will be scanned for files with an extension of .json or .ndjson and any files in this directory will be assumed to be NDJSON and uploaded. This command will read the first resource from each file to verify its resource type, and will assume that all resources in the file are of the same type.");
|
||||
addRequiredOption(options, null, TARGET_BASE, "base url", "The base URL of the target FHIR server.");
|
||||
addBasicAuthOption(options);
|
||||
return options;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run(CommandLine theCommandLine) throws ParseException, ExecutionException {
|
||||
ourEndNow = false;
|
||||
|
||||
parseFhirContext(theCommandLine);
|
||||
|
||||
String baseDirectory = theCommandLine.getOptionValue(SOURCE_DIRECTORY);
|
||||
myPort = getAndParseNonNegativeIntegerParam(theCommandLine, PORT);
|
||||
|
||||
ourLog.info("Scanning directory for NDJSON files: {}", baseDirectory);
|
||||
List<String> resourceTypes = new ArrayList<>();
|
||||
List<File> files = new ArrayList<>();
|
||||
scanDirectoryForJsonFiles(baseDirectory, resourceTypes, files);
|
||||
ourLog.info("Found {} files", files.size());
|
||||
|
||||
ourLog.info("Starting server on port: {}", myPort);
|
||||
List<String> indexes = startServer(myPort, files);
|
||||
String sourceBaseUrl = "http://localhost:" + myPort;
|
||||
if (theCommandLine.hasOption(SOURCE_BASE)) {
|
||||
sourceBaseUrl = theCommandLine.getOptionValue(SOURCE_BASE);
|
||||
}
|
||||
ourLog.info("Server has been started in port: {}", myPort);
|
||||
|
||||
String targetBaseUrl = theCommandLine.getOptionValue(TARGET_BASE);
|
||||
ourLog.info("Initiating bulk import against server: {}", targetBaseUrl);
|
||||
IGenericClient client = newClient(theCommandLine, TARGET_BASE, BASIC_AUTH_PARAM, BEARER_TOKEN_PARAM_LONGOPT);
|
||||
client.registerInterceptor(new LoggingInterceptor(false));
|
||||
|
||||
IBaseParameters request = createRequest(sourceBaseUrl, indexes, resourceTypes);
|
||||
IBaseResource outcome = client
|
||||
.operation()
|
||||
.onServer()
|
||||
.named(JpaConstants.OPERATION_IMPORT)
|
||||
.withParameters(request)
|
||||
.returnResourceType(myFhirCtx.getResourceDefinition("OperationOutcome").getImplementingClass())
|
||||
.withAdditionalHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC)
|
||||
.execute();
|
||||
|
||||
ourLog.info("Got response: {}", myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
ourLog.info("Bulk import is now running. Do not terminate this command until all files have been downloaded.");
|
||||
|
||||
while (true) {
|
||||
if (ourEndNow) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private IBaseParameters createRequest(String theBaseUrl, List<String> theIndexes, List<String> theResourceTypes) {
|
||||
|
||||
FhirContext ctx = getFhirContext();
|
||||
IBaseParameters retVal = ParametersUtil.newInstance(ctx);
|
||||
|
||||
ParametersUtil.addParameterToParameters(ctx, retVal, BulkDataImportProvider.PARAM_INPUT_FORMAT, "code", Constants.CT_FHIR_NDJSON);
|
||||
ParametersUtil.addParameterToParameters(ctx, retVal, BulkDataImportProvider.PARAM_INPUT_SOURCE, "code", theBaseUrl);
|
||||
|
||||
IBase storageDetail = ParametersUtil.addParameterToParameters(ctx, retVal, BulkDataImportProvider.PARAM_STORAGE_DETAIL);
|
||||
ParametersUtil.addPartString(ctx, storageDetail, BulkDataImportProvider.PARAM_STORAGE_DETAIL_TYPE, BulkDataImportProvider.PARAM_STORAGE_DETAIL_TYPE_VAL_HTTPS);
|
||||
|
||||
for (int i = 0; i < theIndexes.size(); i++) {
|
||||
IBase input = ParametersUtil.addParameterToParameters(ctx, retVal, BulkDataImportProvider.PARAM_INPUT);
|
||||
ParametersUtil.addPartCode(ctx, input, BulkDataImportProvider.PARAM_INPUT_TYPE, theResourceTypes.get(i));
|
||||
String nextUrl = theBaseUrl + "/download?index=" + theIndexes.get(i);
|
||||
ParametersUtil.addPartUrl(ctx, input, BulkDataImportProvider.PARAM_INPUT_URL, nextUrl);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private List<String> startServer(int thePort, List<File> files) {
|
||||
List<String> indexes = new ArrayList<>();
|
||||
myServer = new Server(thePort);
|
||||
|
||||
myServlet = new BulkImportFileServlet();
|
||||
for (File t : files) {
|
||||
BulkImportFileServlet.IFileSupplier fileSupplier = () -> new FileReader(t);
|
||||
indexes.add(myServlet.registerFile(fileSupplier));
|
||||
}
|
||||
|
||||
ServletHolder servletHolder = new ServletHolder(myServlet);
|
||||
|
||||
ServletContextHandler contextHandler = new ServletContextHandler();
|
||||
contextHandler.setContextPath("/");
|
||||
contextHandler.addServlet(servletHolder, "/*");
|
||||
|
||||
myServer.setHandler(contextHandler);
|
||||
try {
|
||||
myServer.start();
|
||||
} catch (Exception e) {
|
||||
throw new CommandFailureException(Msg.code(2057) + e.getMessage(), e);
|
||||
}
|
||||
|
||||
Connector[] connectors = myServer.getConnectors();
|
||||
myPort = ((ServerConnector) (connectors[0])).getLocalPort();
|
||||
|
||||
return indexes;
|
||||
}
|
||||
|
||||
private void scanDirectoryForJsonFiles(String baseDirectory, List<String> types, List<File> files) {
|
||||
try {
|
||||
File directory = new File(baseDirectory);
|
||||
FileUtils
|
||||
.streamFiles(directory, false, "json", "ndjson", "JSON", "NDJSON")
|
||||
.filter(t -> t.isFile())
|
||||
.filter(t -> t.exists())
|
||||
.forEach(t -> files.add(t));
|
||||
if (files.isEmpty()) {
|
||||
throw new CommandFailureException(Msg.code(2058) + "No .json/.ndjson files found in directory: " + directory.getAbsolutePath());
|
||||
}
|
||||
|
||||
FhirContext ctx = getFhirContext();
|
||||
for (File next : files) {
|
||||
try (Reader reader = new FileReader(next)) {
|
||||
LineIterator lineIterator = new LineIterator(reader);
|
||||
String firstLine = lineIterator.next();
|
||||
IBaseResource resource = ctx.newJsonParser().parseResource(firstLine);
|
||||
types.add(myFhirCtx.getResourceType(resource));
|
||||
}
|
||||
}
|
||||
|
||||
} catch (IOException e) {
|
||||
throw new CommandFailureException(Msg.code(2059) + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
public static void setEndNowForUnitTest(boolean theEndNow) {
|
||||
ourEndNow = theEndNow;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,130 @@
|
|||
package ca.uhn.fhir.cli;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.batch2.jobs.imprt.BulkImportJobParameters;
|
||||
import ca.uhn.fhir.batch2.jobs.imprt.BulkDataImportProvider;
|
||||
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.rest.server.interceptor.LoggingInterceptor;
|
||||
import ca.uhn.fhir.test.utilities.HttpClientExtension;
|
||||
import ca.uhn.fhir.test.utilities.server.RestfulServerExtension;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Captor;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
import static org.awaitility.Awaitility.await;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.timeout;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public class BulkImportCommandTest {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BulkImportCommandTest.class);
|
||||
|
||||
static {
|
||||
System.setProperty("test", "true");
|
||||
}
|
||||
|
||||
@RegisterExtension
|
||||
public HttpClientExtension myHttpClientExtension = new HttpClientExtension();
|
||||
@Mock
|
||||
private IJobCoordinator myJobCoordinator;
|
||||
private final BulkDataImportProvider myProvider = new BulkDataImportProvider();
|
||||
private final FhirContext myCtx = FhirContext.forR4Cached();
|
||||
@RegisterExtension
|
||||
public RestfulServerExtension myRestfulServerExtension = new RestfulServerExtension(myCtx, myProvider)
|
||||
.registerInterceptor(new LoggingInterceptor());
|
||||
private Path myTempDir;
|
||||
@Captor
|
||||
private ArgumentCaptor<JobInstanceStartRequest> myStartCaptor;
|
||||
|
||||
@BeforeEach
|
||||
public void beforeEach() throws IOException {
|
||||
myProvider.setFhirContext(myCtx);
|
||||
myProvider.setJobCoordinator(myJobCoordinator);
|
||||
myTempDir = Files.createTempDirectory("hapifhir");
|
||||
ourLog.info("Created temp directory: {}", myTempDir);
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void afterEach() throws IOException {
|
||||
ourLog.info("Deleting temp directory: {}", myTempDir);
|
||||
FileUtils.deleteDirectory(myTempDir.toFile());
|
||||
BulkImportCommand.setEndNowForUnitTest(true);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBulkImport() throws IOException {
|
||||
|
||||
String fileContents1 = "{\"resourceType\":\"Observation\"}\n{\"resourceType\":\"Observation\"}";
|
||||
String fileContents2 = "{\"resourceType\":\"Patient\"}\n{\"resourceType\":\"Patient\"}";
|
||||
writeNdJsonFileToTempDirectory(fileContents1, "file1.json");
|
||||
writeNdJsonFileToTempDirectory(fileContents2, "file2.json");
|
||||
|
||||
when(myJobCoordinator.startInstance(any())).thenReturn("THE-JOB-ID");
|
||||
|
||||
// Start the command in a separate thread
|
||||
new Thread(() -> App.main(new String[]{
|
||||
BulkImportCommand.BULK_IMPORT,
|
||||
"--" + BaseCommand.FHIR_VERSION_PARAM_LONGOPT, "r4",
|
||||
"--" + BulkImportCommand.PORT, "0",
|
||||
"--" + BulkImportCommand.SOURCE_DIRECTORY, myTempDir.toAbsolutePath().toString(),
|
||||
"--" + BulkImportCommand.TARGET_BASE, myRestfulServerExtension.getBaseUrl()
|
||||
})).start();
|
||||
|
||||
ourLog.info("Waiting for initiation requests");
|
||||
await().until(() -> myRestfulServerExtension.getRequestContentTypes().size(), equalTo(2));
|
||||
ourLog.info("Initiation requests complete");
|
||||
|
||||
verify(myJobCoordinator, timeout(10000).times(1)).startInstance(myStartCaptor.capture());
|
||||
|
||||
JobInstanceStartRequest startRequest = myStartCaptor.getValue();
|
||||
BulkImportJobParameters jobParameters = startRequest.getParameters(BulkImportJobParameters.class);
|
||||
|
||||
// Reverse order because Patient should be first
|
||||
assertEquals(2, jobParameters.getNdJsonUrls().size());
|
||||
assertEquals(fileContents2, fetchFile(jobParameters.getNdJsonUrls().get(0)));
|
||||
assertEquals(fileContents1, fetchFile(jobParameters.getNdJsonUrls().get(1)));
|
||||
}
|
||||
|
||||
private String fetchFile(String url) throws IOException {
|
||||
String outcome;
|
||||
try (CloseableHttpResponse response = myHttpClientExtension.getClient().execute(new HttpGet(url))) {
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
outcome = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
}
|
||||
return outcome;
|
||||
}
|
||||
|
||||
private void writeNdJsonFileToTempDirectory(String fileContents1, String fileName) throws IOException {
|
||||
try (Writer w = new FileWriter(new File(myTempDir.toFile(), fileName))) {
|
||||
w.append(fileContents1);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>6.0.0-PRE3-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE3-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.0.0-PRE3-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE3-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE3-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE3-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.0.0-PRE3-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE3-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: add
|
||||
issue: 3387
|
||||
title: "A new batch operation framework for executing long running background jobs has been created. This new
|
||||
framework is called 'Batch2', and will eventually replace Spring Batch. This framework is intended to be
|
||||
much more resilient to failures as well as much more paralellized than Spring Batch jobs."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: add
|
||||
issue: 3387
|
||||
title: "Support has now (finally!) been added for the FHIR Bulk Import ($import) operation. This operation
|
||||
is the first operation to leverage the new Batch2 framework."
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 3387
|
||||
title: "A race condition in the Subscription processor meant that a Subscription could fail to
|
||||
register right away if it was modified immediately after being created. Note that this issue only
|
||||
affects rapid changes, and only caused the subscription to be unregistered for a maximum of one minute
|
||||
after its initial creation so the impact of this issue is expected to be low."
|
|
@ -0,0 +1,11 @@
|
|||
---
|
||||
- item:
|
||||
type: "add"
|
||||
title: "The version of a few dependencies have been bumped to the latest versions
|
||||
(dependent HAPI modules listed in brackets):
|
||||
<ul>
|
||||
<li>FlywayDB (JPA): 8.4.4 -> 8.5.0</li>
|
||||
<li>Postgresql (JPA): 42.3.2 -> 42.3.3 (Addresses WS-2022-0080)</li>
|
||||
</ul>
|
||||
"
|
||||
|
|
@ -11,7 +11,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE3-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE3-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE3-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
|
|
@ -201,6 +201,10 @@ public abstract class BaseSchedulerServiceImpl implements ISchedulerService, Sma
|
|||
if (isSchedulingDisabled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
assert theJobDefinition.getId() != null;
|
||||
assert theJobDefinition.getJobClass() != null;
|
||||
|
||||
ourLog.info("Scheduling {} job {} with interval {}", theInstanceName, theJobDefinition.getId(), StopWatch.formatMillis(theIntervalMillis));
|
||||
if (theJobDefinition.getGroup() == null) {
|
||||
theJobDefinition.setGroup(myDefaultGroup);
|
||||
|
|
|
@ -313,23 +313,35 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe
|
|||
/**
|
||||
* Log all captured INSERT queries
|
||||
*/
|
||||
public void logInsertQueries() {
|
||||
List<String> queries = getInsertQueries()
|
||||
public int logInsertQueries() {
|
||||
List<SqlQuery> insertQueries = getInsertQueries();
|
||||
List<String> queries = insertQueries
|
||||
.stream()
|
||||
.map(CircularQueueCaptureQueriesListener::formatQueryAsSql)
|
||||
.collect(Collectors.toList());
|
||||
ourLog.info("Insert Queries:\n{}", String.join("\n", queries));
|
||||
|
||||
return insertQueries
|
||||
.stream()
|
||||
.map(t -> t.getSize())
|
||||
.reduce(0, Integer::sum);
|
||||
}
|
||||
|
||||
/**
|
||||
* Log all captured INSERT queries
|
||||
*/
|
||||
public void logUpdateQueries() {
|
||||
List<String> queries = getUpdateQueries()
|
||||
public int logUpdateQueries() {
|
||||
List<SqlQuery> updateQueries = getUpdateQueries();
|
||||
List<String> queries = updateQueries
|
||||
.stream()
|
||||
.map(CircularQueueCaptureQueriesListener::formatQueryAsSql)
|
||||
.collect(Collectors.toList());
|
||||
ourLog.info("Update Queries:\n{}", String.join("\n", queries));
|
||||
|
||||
return updateQueries
|
||||
.stream()
|
||||
.map(t -> t.getSize())
|
||||
.reduce(0, Integer::sum);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -349,12 +361,18 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe
|
|||
/**
|
||||
* Log all captured DELETE queries
|
||||
*/
|
||||
public void logDeleteQueries() {
|
||||
List<String> queries = getDeleteQueries()
|
||||
public int logDeleteQueries() {
|
||||
List<SqlQuery> deleteQueries = getDeleteQueries();
|
||||
List<String> queries = deleteQueries
|
||||
.stream()
|
||||
.map(CircularQueueCaptureQueriesListener::formatQueryAsSql)
|
||||
.collect(Collectors.toList());
|
||||
ourLog.info("Delete Queries:\n{}", String.join("\n", queries));
|
||||
|
||||
return deleteQueries
|
||||
.stream()
|
||||
.map(t -> t.getSize())
|
||||
.reduce(0, Integer::sum);
|
||||
}
|
||||
|
||||
public int countSelectQueries() {
|
||||
|
@ -362,15 +380,24 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe
|
|||
}
|
||||
|
||||
public int countInsertQueries() {
|
||||
return getInsertQueries().size();
|
||||
return getInsertQueries()
|
||||
.stream()
|
||||
.map(t->t.getSize())
|
||||
.reduce(0, Integer::sum);
|
||||
}
|
||||
|
||||
public int countUpdateQueries() {
|
||||
return getUpdateQueries().size();
|
||||
return getUpdateQueries()
|
||||
.stream()
|
||||
.map(t->t.getSize())
|
||||
.reduce(0, Integer::sum);
|
||||
}
|
||||
|
||||
public int countDeleteQueries() {
|
||||
return getDeleteQueries().size();
|
||||
return getDeleteQueries()
|
||||
.stream()
|
||||
.map(t->t.getSize())
|
||||
.reduce(0, Integer::sum);
|
||||
}
|
||||
|
||||
public int countSelectQueriesForCurrentThread() {
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE3-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE4-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -132,6 +132,16 @@
|
|||
<artifactId>hapi-fhir-batch</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-storage-batch2</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-storage-batch2-jobs</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-storage</artifactId>
|
||||
|
|
|
@ -0,0 +1,53 @@
|
|||
package ca.uhn.fhir.jpa.batch2;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobPersistence;
|
||||
import ca.uhn.fhir.batch2.config.BaseBatch2Config;
|
||||
import ca.uhn.fhir.batch2.impl.SynchronizedJobPersistenceWrapper;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Primary;
|
||||
|
||||
@Configuration
|
||||
public class JpaBatch2Config extends BaseBatch2Config {
|
||||
|
||||
@Bean
|
||||
public IJobPersistence batch2JobInstancePersister(IBatch2JobInstanceRepository theJobInstanceRepository, IBatch2WorkChunkRepository theWorkChunkRepository) {
|
||||
return new JpaJobPersistenceImpl(theJobInstanceRepository, theWorkChunkRepository);
|
||||
}
|
||||
|
||||
@Primary
|
||||
@Bean
|
||||
public IJobPersistence batch2JobInstancePersisterWrapper(IBatch2JobInstanceRepository theJobInstanceRepository, IBatch2WorkChunkRepository theWorkChunkRepository) {
|
||||
IJobPersistence retVal = batch2JobInstancePersister(theJobInstanceRepository, theWorkChunkRepository);
|
||||
// Avoid H2 synchronization issues caused by
|
||||
// https://github.com/h2database/h2database/issues/1808
|
||||
if ("true".equals(System.getProperty("unit_test_mode"))) {
|
||||
retVal = new SynchronizedJobPersistenceWrapper(retVal);
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,224 @@
|
|||
package ca.uhn.fhir.jpa.batch2;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobPersistence;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||
import ca.uhn.fhir.batch2.model.WorkChunk;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
|
||||
import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity;
|
||||
import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity;
|
||||
import ca.uhn.fhir.util.JsonUtil;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.transaction.Transactional;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
|
||||
@Transactional
|
||||
public class JpaJobPersistenceImpl implements IJobPersistence {
|
||||
|
||||
private final IBatch2JobInstanceRepository myJobInstanceRepository;
|
||||
private final IBatch2WorkChunkRepository myWorkChunkRepository;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public JpaJobPersistenceImpl(IBatch2JobInstanceRepository theJobInstanceRepository, IBatch2WorkChunkRepository theWorkChunkRepository) {
|
||||
Validate.notNull(theJobInstanceRepository);
|
||||
Validate.notNull(theWorkChunkRepository);
|
||||
myJobInstanceRepository = theJobInstanceRepository;
|
||||
myWorkChunkRepository = theWorkChunkRepository;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String storeWorkChunk(String theJobDefinitionId, int theJobDefinitionVersion, String theTargetStepId, String theInstanceId, int theSequence, String theDataSerialized) {
|
||||
Batch2WorkChunkEntity entity = new Batch2WorkChunkEntity();
|
||||
entity.setId(UUID.randomUUID().toString());
|
||||
entity.setSequence(theSequence);
|
||||
entity.setJobDefinitionId(theJobDefinitionId);
|
||||
entity.setJobDefinitionVersion(theJobDefinitionVersion);
|
||||
entity.setTargetStepId(theTargetStepId);
|
||||
entity.setInstanceId(theInstanceId);
|
||||
entity.setSerializedData(theDataSerialized);
|
||||
entity.setCreateTime(new Date());
|
||||
entity.setStatus(StatusEnum.QUEUED);
|
||||
myWorkChunkRepository.save(entity);
|
||||
return entity.getId();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<WorkChunk> fetchWorkChunkSetStartTimeAndMarkInProgress(String theChunkId) {
|
||||
myWorkChunkRepository.updateChunkStatusForStart(theChunkId, new Date(), StatusEnum.IN_PROGRESS);
|
||||
Optional<Batch2WorkChunkEntity> chunk = myWorkChunkRepository.findById(theChunkId);
|
||||
return chunk.map(t -> toChunk(t, true));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String storeNewInstance(JobInstance theInstance) {
|
||||
Validate.isTrue(isBlank(theInstance.getInstanceId()));
|
||||
|
||||
Batch2JobInstanceEntity entity = new Batch2JobInstanceEntity();
|
||||
entity.setId(UUID.randomUUID().toString());
|
||||
entity.setDefinitionId(theInstance.getJobDefinitionId());
|
||||
entity.setDefinitionVersion(theInstance.getJobDefinitionVersion());
|
||||
entity.setStatus(theInstance.getStatus());
|
||||
entity.setParams(theInstance.getParameters());
|
||||
entity.setCreateTime(new Date());
|
||||
|
||||
entity = myJobInstanceRepository.save(entity);
|
||||
return entity.getId();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<JobInstance> fetchInstanceAndMarkInProgress(String theInstanceId) {
|
||||
myJobInstanceRepository.updateInstanceStatus(theInstanceId, StatusEnum.IN_PROGRESS);
|
||||
return fetchInstance(theInstanceId);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nonnull
|
||||
public Optional<JobInstance> fetchInstance(String theInstanceId) {
|
||||
return myJobInstanceRepository.findById(theInstanceId).map(t -> toInstance(t));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<JobInstance> fetchInstances(int thePageSize, int thePageIndex) {
|
||||
return myJobInstanceRepository.fetchAll(PageRequest.of(thePageIndex, thePageSize)).stream().map(t -> toInstance(t)).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
private WorkChunk toChunk(Batch2WorkChunkEntity theEntity, boolean theIncludeData) {
|
||||
WorkChunk retVal = new WorkChunk();
|
||||
retVal.setId(theEntity.getId());
|
||||
retVal.setSequence(theEntity.getSequence());
|
||||
retVal.setJobDefinitionId(theEntity.getJobDefinitionId());
|
||||
retVal.setJobDefinitionVersion(theEntity.getJobDefinitionVersion());
|
||||
retVal.setInstanceId(theEntity.getInstanceId());
|
||||
retVal.setTargetStepId(theEntity.getTargetStepId());
|
||||
retVal.setStatus(theEntity.getStatus());
|
||||
retVal.setCreateTime(theEntity.getCreateTime());
|
||||
retVal.setStartTime(theEntity.getStartTime());
|
||||
retVal.setEndTime(theEntity.getEndTime());
|
||||
retVal.setErrorMessage(theEntity.getErrorMessage());
|
||||
retVal.setErrorCount(theEntity.getErrorCount());
|
||||
retVal.setRecordsProcessed(theEntity.getRecordsProcessed());
|
||||
if (theIncludeData) {
|
||||
if (theEntity.getSerializedData() != null) {
|
||||
retVal.setData(theEntity.getSerializedData());
|
||||
}
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private JobInstance toInstance(Batch2JobInstanceEntity theEntity) {
|
||||
JobInstance retVal = new JobInstance();
|
||||
retVal.setInstanceId(theEntity.getId());
|
||||
retVal.setJobDefinitionId(theEntity.getDefinitionId());
|
||||
retVal.setJobDefinitionVersion(theEntity.getDefinitionVersion());
|
||||
retVal.setStatus(theEntity.getStatus());
|
||||
retVal.setCancelled(theEntity.isCancelled());
|
||||
retVal.setStartTime(theEntity.getStartTime());
|
||||
retVal.setCreateTime(theEntity.getCreateTime());
|
||||
retVal.setEndTime(theEntity.getEndTime());
|
||||
retVal.setCombinedRecordsProcessed(theEntity.getCombinedRecordsProcessed());
|
||||
retVal.setCombinedRecordsProcessedPerSecond(theEntity.getCombinedRecordsProcessedPerSecond());
|
||||
retVal.setTotalElapsedMillis(theEntity.getTotalElapsedMillis());
|
||||
retVal.setWorkChunksPurged(theEntity.getWorkChunksPurged());
|
||||
retVal.setProgress(theEntity.getProgress());
|
||||
retVal.setErrorMessage(theEntity.getErrorMessage());
|
||||
retVal.setErrorCount(theEntity.getErrorCount());
|
||||
retVal.setEstimatedTimeRemaining(theEntity.getEstimatedTimeRemaining());
|
||||
retVal.setParameters(theEntity.getParams());
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void markWorkChunkAsErroredAndIncrementErrorCount(String theChunkId, String theErrorMessage) {
|
||||
myWorkChunkRepository.updateChunkStatusAndIncrementErrorCountForEndError(theChunkId, new Date(), theErrorMessage, StatusEnum.ERRORED);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void markWorkChunkAsFailed(String theChunkId, String theErrorMessage) {
|
||||
myWorkChunkRepository.updateChunkStatusAndIncrementErrorCountForEndError(theChunkId, new Date(), theErrorMessage, StatusEnum.FAILED);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void markWorkChunkAsCompletedAndClearData(String theChunkId, int theRecordsProcessed) {
|
||||
myWorkChunkRepository.updateChunkStatusAndClearDataForEndSuccess(theChunkId, new Date(), theRecordsProcessed, StatusEnum.COMPLETED);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<WorkChunk> fetchWorkChunksWithoutData(String theInstanceId, int thePageSize, int thePageIndex) {
|
||||
List<Batch2WorkChunkEntity> chunks = myWorkChunkRepository.fetchChunks(PageRequest.of(thePageIndex, thePageSize), theInstanceId);
|
||||
return chunks.stream().map(t -> toChunk(t, false)).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void updateInstance(JobInstance theInstance) {
|
||||
Optional<Batch2JobInstanceEntity> instanceOpt = myJobInstanceRepository.findById(theInstance.getInstanceId());
|
||||
Batch2JobInstanceEntity instance = instanceOpt.orElseThrow(() -> new IllegalArgumentException("Unknown instance ID: " + theInstance.getInstanceId()));
|
||||
|
||||
instance.setStartTime(theInstance.getStartTime());
|
||||
instance.setEndTime(theInstance.getEndTime());
|
||||
instance.setStatus(theInstance.getStatus());
|
||||
instance.setCancelled(theInstance.isCancelled());
|
||||
instance.setCombinedRecordsProcessed(theInstance.getCombinedRecordsProcessed());
|
||||
instance.setCombinedRecordsProcessedPerSecond(theInstance.getCombinedRecordsProcessedPerSecond());
|
||||
instance.setTotalElapsedMillis(theInstance.getTotalElapsedMillis());
|
||||
instance.setWorkChunksPurged(theInstance.isWorkChunksPurged());
|
||||
instance.setProgress(theInstance.getProgress());
|
||||
instance.setErrorMessage(theInstance.getErrorMessage());
|
||||
instance.setErrorCount(theInstance.getErrorCount());
|
||||
instance.setEstimatedTimeRemaining(theInstance.getEstimatedTimeRemaining());
|
||||
|
||||
myJobInstanceRepository.save(instance);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteInstanceAndChunks(String theInstanceId) {
|
||||
myWorkChunkRepository.deleteAllForInstance(theInstanceId);
|
||||
myJobInstanceRepository.deleteById(theInstanceId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteChunks(String theInstanceId) {
|
||||
myWorkChunkRepository.deleteAllForInstance(theInstanceId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void markInstanceAsCompleted(String theInstanceId) {
|
||||
myJobInstanceRepository.updateInstanceStatus(theInstanceId, StatusEnum.COMPLETED);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void cancelInstance(String theInstanceId) {
|
||||
myJobInstanceRepository.updateInstanceCancelled(theInstanceId, true);
|
||||
}
|
||||
}
|
|
@ -23,12 +23,13 @@ package ca.uhn.fhir.jpa.bulk.export.job;
|
|||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
|
||||
import ca.uhn.fhir.jpa.batch.log.Logs;
|
||||
import ca.uhn.fhir.jpa.dao.IResultIterator;
|
||||
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
|
||||
import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.index.IJpaIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
|
||||
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
|
||||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||
|
@ -76,11 +77,13 @@ public class GroupBulkItemReader extends BaseJpaBulkItemReader implements ItemRe
|
|||
private boolean myMdmEnabled;
|
||||
|
||||
@Autowired
|
||||
private IdHelperService myIdHelperService;
|
||||
private IIdHelperService myIdHelperService;
|
||||
@Autowired
|
||||
private IMdmLinkDao myMdmLinkDao;
|
||||
@Autowired
|
||||
private MdmExpansionCacheSvc myMdmExpansionCacheSvc;
|
||||
@Autowired
|
||||
private IJpaIdHelperService myJpaIdHelperService;
|
||||
|
||||
@Override
|
||||
protected Iterator<ResourcePersistentId> getResourcePidIterator() {
|
||||
|
@ -117,13 +120,13 @@ public class GroupBulkItemReader extends BaseJpaBulkItemReader implements ItemRe
|
|||
private Iterator<ResourcePersistentId> getExpandedPatientIterator() {
|
||||
List<String> members = getMembers();
|
||||
List<IIdType> ids = members.stream().map(member -> new IdDt("Patient/" + member)).collect(Collectors.toList());
|
||||
List<Long> pidsOrThrowException = myIdHelperService.getPidsOrThrowException(ids);
|
||||
List<Long> pidsOrThrowException =myJpaIdHelperService.getPidsOrThrowException(ids);
|
||||
Set<Long> patientPidsToExport = new HashSet<>(pidsOrThrowException);
|
||||
|
||||
if (myMdmEnabled) {
|
||||
SystemRequestDetails srd = SystemRequestDetails.newSystemRequestAllPartitions();
|
||||
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId), srd);
|
||||
Long pidOrNull = myIdHelperService.getPidOrNull(group);
|
||||
Long pidOrNull = myJpaIdHelperService.getPidOrNull(group);
|
||||
List<IMdmLinkDao.MdmPidTuple> goldenPidSourcePidTuple = myMdmLinkDao.expandPidsFromGroupPidGivenMatchResult(pidOrNull, MdmMatchResultEnum.MATCH);
|
||||
goldenPidSourcePidTuple.forEach(tuple -> {
|
||||
patientPidsToExport.add(tuple.getGoldenPid());
|
||||
|
@ -199,7 +202,7 @@ public class GroupBulkItemReader extends BaseJpaBulkItemReader implements ItemRe
|
|||
Set<String> expandedIds = new HashSet<>();
|
||||
SystemRequestDetails requestDetails = SystemRequestDetails.newSystemRequestAllPartitions();
|
||||
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId), requestDetails);
|
||||
Long pidOrNull = myIdHelperService.getPidOrNull(group);
|
||||
Long pidOrNull = myJpaIdHelperService.getPidOrNull(group);
|
||||
|
||||
//Attempt to perform MDM Expansion of membership
|
||||
if (myMdmEnabled) {
|
||||
|
|
|
@ -1,196 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.bulk.imprt.provider;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
import ca.uhn.fhir.rest.annotation.Operation;
|
||||
import ca.uhn.fhir.rest.annotation.OperationParam;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.EncodingEnum;
|
||||
import ca.uhn.fhir.rest.api.PreferHeader;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.RestfulServerUtils;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.util.OperationOutcomeUtil;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.io.input.ReaderInputStream;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.HeaderElement;
|
||||
import org.apache.http.NameValuePair;
|
||||
import org.apache.http.message.BasicHeaderValueParser;
|
||||
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.hl7.fhir.r4.model.InstantType;
|
||||
import org.slf4j.Logger;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import static org.slf4j.LoggerFactory.getLogger;
|
||||
|
||||
|
||||
public class BulkDataImportProvider {
|
||||
private static final Logger ourLog = getLogger(BulkDataImportProvider.class);
|
||||
|
||||
@Autowired
|
||||
private IBulkDataImportSvc myBulkDataImportSvc;
|
||||
|
||||
@Autowired
|
||||
private FhirContext myFhirContext;
|
||||
|
||||
@VisibleForTesting
|
||||
public void setFhirContextForUnitTest(FhirContext theFhirContext) {
|
||||
myFhirContext = theFhirContext;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setBulkDataImportSvcForUnitTests(IBulkDataImportSvc theBulkDataImportSvc) {
|
||||
myBulkDataImportSvc = theBulkDataImportSvc;
|
||||
}
|
||||
|
||||
/**
|
||||
* $import
|
||||
*/
|
||||
@Operation(name = JpaConstants.OPERATION_IMPORT, global = false /* set to true once we can handle this */, manualResponse = true, idempotent = true, manualRequest = true)
|
||||
public void imprt(
|
||||
@OperationParam(name = JpaConstants.PARAM_IMPORT_JOB_DESCRIPTION, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theJobDescription,
|
||||
@OperationParam(name = JpaConstants.PARAM_IMPORT_PROCESSING_MODE, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theProcessingMode,
|
||||
@OperationParam(name = JpaConstants.PARAM_IMPORT_FILE_COUNT, min = 0, max = 1, typeName = "integer") IPrimitiveType<Integer> theFileCount,
|
||||
@OperationParam(name = JpaConstants.PARAM_IMPORT_BATCH_SIZE, min = 0, max = 1, typeName = "integer") IPrimitiveType<Integer> theBatchSize,
|
||||
ServletRequestDetails theRequestDetails
|
||||
) throws IOException {
|
||||
validatePreferAsyncHeader(theRequestDetails);
|
||||
|
||||
// Import requests are expected to be in NDJson format.
|
||||
if (RestfulServerUtils.determineRequestEncodingNoDefault(theRequestDetails) != EncodingEnum.NDJSON) {
|
||||
throw new InvalidRequestException(Msg.code(9001) + " An NDJson content type, like " + Constants.CT_FHIR_NDJSON.toString() + " must be provided for $import.");
|
||||
}
|
||||
|
||||
BulkImportJobJson theImportJobJson = new BulkImportJobJson();
|
||||
theImportJobJson.setJobDescription(theJobDescription == null ? null : theJobDescription.getValueAsString());
|
||||
|
||||
theImportJobJson.setProcessingMode(theProcessingMode == null ? JobFileRowProcessingModeEnum.FHIR_TRANSACTION : JobFileRowProcessingModeEnum.valueOf(theProcessingMode.getValueAsString()));
|
||||
theImportJobJson.setBatchSize(theBatchSize == null ? 1 : theBatchSize.getValue());
|
||||
theImportJobJson.setFileCount(theFileCount == null ? 1 : theFileCount.getValue());
|
||||
|
||||
// For now, we expect theImportJobJson.getFileCount() to be 1.
|
||||
// In the future, the arguments to $import can be changed to allow additional files to be attached to an existing, known job.
|
||||
// Then, when the correct number of files have been attached, the job would be started automatically.
|
||||
if (theImportJobJson.getFileCount() != 1) {
|
||||
throw new InvalidRequestException(Msg.code(9002) + " $import requires " + JpaConstants.PARAM_IMPORT_FILE_COUNT.toString() + " to be exactly 1.");
|
||||
}
|
||||
|
||||
List<BulkImportJobFileJson> theInitialFiles = new ArrayList<BulkImportJobFileJson>();
|
||||
|
||||
BulkImportJobFileJson theJobFile = new BulkImportJobFileJson();
|
||||
theJobFile.setTenantName(theRequestDetails.getTenantId());
|
||||
if (theJobDescription != null) {
|
||||
theJobFile.setDescription(theJobDescription.getValueAsString());
|
||||
}
|
||||
|
||||
IParser myParser = myFhirContext.newNDJsonParser();
|
||||
|
||||
// We validate the NDJson by parsing it and then re-writing it.
|
||||
// In the future, we could add a parameter to skip validation if desired.
|
||||
theJobFile.setContents(myParser.encodeResourceToString(myParser.parseResource(theRequestDetails.getInputStream())));
|
||||
|
||||
theInitialFiles.add(theJobFile);
|
||||
|
||||
// Start the job.
|
||||
// In a future change, we could add an additional parameter to add files to an existing job.
|
||||
// In that world, we would only create a new job if we weren't provided an existing job ID that is to
|
||||
// be augmented.
|
||||
String theJob = myBulkDataImportSvc.createNewJob(theImportJobJson, theInitialFiles);
|
||||
myBulkDataImportSvc.markJobAsReadyForActivation(theJob);
|
||||
writePollingLocationToResponseHeaders(theRequestDetails, theJob);
|
||||
}
|
||||
|
||||
/**
|
||||
* $import-poll-status
|
||||
*/
|
||||
@Operation(name = JpaConstants.OPERATION_IMPORT_POLL_STATUS, manualResponse = true, idempotent = true)
|
||||
public void importPollStatus(
|
||||
@OperationParam(name = JpaConstants.PARAM_IMPORT_POLL_STATUS_JOB_ID, typeName = "string", min = 0, max = 1) IPrimitiveType<String> theJobId,
|
||||
ServletRequestDetails theRequestDetails
|
||||
) throws IOException {
|
||||
HttpServletResponse response = theRequestDetails.getServletResponse();
|
||||
theRequestDetails.getServer().addHeadersToResponse(response);
|
||||
IBulkDataImportSvc.JobInfo status = myBulkDataImportSvc.getJobStatus(theJobId.getValueAsString());
|
||||
IBaseOperationOutcome oo;
|
||||
switch (status.getStatus()) {
|
||||
case STAGING:
|
||||
case READY:
|
||||
case RUNNING:
|
||||
response.setStatus(Constants.STATUS_HTTP_202_ACCEPTED);
|
||||
response.addHeader(Constants.HEADER_X_PROGRESS, "Status set to " + status.getStatus() + " at " + new InstantType(status.getStatusTime()).getValueAsString());
|
||||
response.addHeader(Constants.HEADER_RETRY_AFTER, "120");
|
||||
break;
|
||||
case COMPLETE:
|
||||
response.setStatus(Constants.STATUS_HTTP_200_OK);
|
||||
response.setContentType(Constants.CT_FHIR_JSON);
|
||||
// Create an OperationOutcome response
|
||||
oo = OperationOutcomeUtil.newInstance(myFhirContext);
|
||||
myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToWriter(oo, response.getWriter());
|
||||
response.getWriter().close();
|
||||
break;
|
||||
case ERROR:
|
||||
response.setStatus(Constants.STATUS_HTTP_500_INTERNAL_ERROR);
|
||||
response.setContentType(Constants.CT_FHIR_JSON);
|
||||
// Create an OperationOutcome response
|
||||
oo = OperationOutcomeUtil.newInstance(myFhirContext);
|
||||
OperationOutcomeUtil.addIssue(myFhirContext, oo, "error", status.getStatusMessage(), null, null);
|
||||
myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToWriter(oo, response.getWriter());
|
||||
response.getWriter().close();
|
||||
}
|
||||
}
|
||||
|
||||
public void writePollingLocationToResponseHeaders(ServletRequestDetails theRequestDetails, String theJob) {
|
||||
String serverBase = getServerBase(theRequestDetails);
|
||||
String pollLocation = serverBase + "/" + JpaConstants.OPERATION_IMPORT_POLL_STATUS + "?" + JpaConstants.PARAM_IMPORT_POLL_STATUS_JOB_ID + "=" + theJob;
|
||||
HttpServletResponse response = theRequestDetails.getServletResponse();
|
||||
// Add standard headers
|
||||
theRequestDetails.getServer().addHeadersToResponse(response);
|
||||
// Successful 202 Accepted
|
||||
response.addHeader(Constants.HEADER_CONTENT_LOCATION, pollLocation);
|
||||
response.setStatus(Constants.STATUS_HTTP_202_ACCEPTED);
|
||||
}
|
||||
|
||||
private String getServerBase(ServletRequestDetails theRequestDetails) {
|
||||
return StringUtils.removeEnd(theRequestDetails.getServerBaseForRequest(), "/");
|
||||
}
|
||||
|
||||
private void validatePreferAsyncHeader(ServletRequestDetails theRequestDetails) {
|
||||
String preferHeader = theRequestDetails.getHeader(Constants.HEADER_PREFER);
|
||||
PreferHeader prefer = RestfulServerUtils.parsePreferHeader(null, preferHeader);
|
||||
if (prefer.getRespondAsync() == false) {
|
||||
throw new InvalidRequestException(Msg.code(9003) + " Must request async processing for $import");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.cache;
|
|||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
|
@ -59,7 +60,7 @@ public class ResourceVersionSvcDaoImpl implements IResourceVersionSvc {
|
|||
@Autowired
|
||||
IResourceTableDao myResourceTableDao;
|
||||
@Autowired
|
||||
IdHelperService myIdHelperService;
|
||||
IIdHelperService myIdHelperService;
|
||||
|
||||
@Override
|
||||
@Nonnull
|
||||
|
|
|
@ -21,7 +21,6 @@ import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
|
|||
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportSvcImpl;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.provider.BulkDataImportProvider;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.svc.BulkDataImportSvcImpl;
|
||||
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
|
||||
import ca.uhn.fhir.jpa.cache.ResourceVersionSvcDaoImpl;
|
||||
|
@ -41,7 +40,8 @@ import ca.uhn.fhir.jpa.dao.expunge.ResourceExpungeService;
|
|||
import ca.uhn.fhir.jpa.dao.expunge.ResourceTableFKProvider;
|
||||
import ca.uhn.fhir.jpa.dao.index.DaoResourceLinkResolver;
|
||||
import ca.uhn.fhir.jpa.dao.index.DaoSearchParamSynchronizer;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.index.IJpaIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.index.JpaIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.index.SearchParamWithInlineReferencesExtractor;
|
||||
import ca.uhn.fhir.jpa.dao.mdm.MdmLinkExpandSvc;
|
||||
import ca.uhn.fhir.jpa.dao.predicate.PredicateBuilder;
|
||||
|
@ -480,12 +480,6 @@ public class JpaConfig {
|
|||
return new BulkDataImportSvcImpl();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
public BulkDataImportProvider bulkDataImportProvider() {
|
||||
return new BulkDataImportProvider();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public PersistedJpaBundleProviderFactory persistedJpaBundleProviderFactory() {
|
||||
return new PersistedJpaBundleProviderFactory();
|
||||
|
@ -735,8 +729,8 @@ public class JpaConfig {
|
|||
}
|
||||
|
||||
@Bean
|
||||
public IdHelperService idHelperService() {
|
||||
return new IdHelperService();
|
||||
public IJpaIdHelperService jpaIdHelperService() {
|
||||
return new JpaIdHelperService();
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
|
|
@ -26,6 +26,9 @@ import org.springframework.context.annotation.Bean;
|
|||
import org.springframework.context.annotation.Primary;
|
||||
|
||||
public class FhirContextR4Config {
|
||||
|
||||
public static final String DEFAULT_PRESERVE_VERSION_REFS = "AuditEvent.entity.what";
|
||||
|
||||
@Bean(name = "primaryFhirContext")
|
||||
@Primary
|
||||
public FhirContext fhirContextR4() {
|
||||
|
@ -33,7 +36,7 @@ public class FhirContextR4Config {
|
|||
|
||||
// Don't strip versions in some places
|
||||
ParserOptions parserOptions = retVal.getParserOptions();
|
||||
parserOptions.setDontStripVersionsFromReferencesAtPaths("AuditEvent.entity.what");
|
||||
parserOptions.setDontStripVersionsFromReferencesAtPaths(DEFAULT_PRESERVE_VERSION_REFS);
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
|
||||
import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition;
|
||||
import ca.uhn.fhir.context.BaseRuntimeElementDefinition;
|
||||
|
@ -9,6 +8,7 @@ import ca.uhn.fhir.context.FhirVersionEnum;
|
|||
import ca.uhn.fhir.context.RuntimeChildResourceDefinition;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
|
@ -17,6 +17,7 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
|||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.api.dao.IJpaDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
|
||||
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
|
||||
|
@ -25,7 +26,6 @@ import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
|||
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ExpungeService;
|
||||
import ca.uhn.fhir.jpa.dao.index.DaoSearchParamSynchronizer;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.index.SearchParamWithInlineReferencesExtractor;
|
||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.delete.DeleteConflictService;
|
||||
|
@ -187,12 +187,11 @@ import static org.apache.commons.lang3.StringUtils.trim;
|
|||
@Repository
|
||||
public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStorageDao implements IDao, IJpaDao<T>, ApplicationContextAware {
|
||||
|
||||
// total attempts to do a tag transaction
|
||||
private static final int TOTAL_TAG_READ_ATTEMPTS = 10;
|
||||
|
||||
public static final long INDEX_STATUS_INDEXED = 1L;
|
||||
public static final long INDEX_STATUS_INDEXING_FAILED = 2L;
|
||||
public static final String NS_JPA_PROFILE = "https://github.com/hapifhir/hapi-fhir/ns/jpa/profile";
|
||||
// total attempts to do a tag transaction
|
||||
private static final int TOTAL_TAG_READ_ATTEMPTS = 10;
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BaseHapiFhirDao.class);
|
||||
private static boolean ourValidationDisabledForUnitTest;
|
||||
private static boolean ourDisableIncrementOnUpdateForUnitTest = false;
|
||||
|
@ -200,7 +199,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||
protected EntityManager myEntityManager;
|
||||
@Autowired
|
||||
protected IdHelperService myIdHelperService;
|
||||
protected IIdHelperService myIdHelperService;
|
||||
@Autowired
|
||||
protected IForcedIdDao myForcedIdDao;
|
||||
@Autowired
|
||||
|
@ -1035,18 +1034,24 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
}
|
||||
} else if (theEntity instanceof ResourceTable) {
|
||||
ResourceTable resource = (ResourceTable) theEntity;
|
||||
version = theEntity.getVersion();
|
||||
ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theEntity.getId(), version);
|
||||
((ResourceTable) theEntity).setCurrentVersionEntity(history);
|
||||
ResourceHistoryTable history;
|
||||
if (resource.getCurrentVersionEntity() != null) {
|
||||
history = resource.getCurrentVersionEntity();
|
||||
} else {
|
||||
version = theEntity.getVersion();
|
||||
history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theEntity.getId(), version);
|
||||
((ResourceTable) theEntity).setCurrentVersionEntity(history);
|
||||
|
||||
while (history == null) {
|
||||
if (version > 1L) {
|
||||
version--;
|
||||
history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theEntity.getId(), version);
|
||||
} else {
|
||||
return null;
|
||||
while (history == null) {
|
||||
if (version > 1L) {
|
||||
version--;
|
||||
history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theEntity.getId(), version);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resourceBytes = history.getResource();
|
||||
resourceEncoding = history.getEncoding();
|
||||
resourceText = history.getResourceTextVc();
|
||||
|
@ -1352,7 +1357,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
}
|
||||
|
||||
if (theUpdateVersion) {
|
||||
entity.setVersion(entity.getVersion() + 1);
|
||||
long newVersion = entity.getVersion() + 1;
|
||||
entity.setVersion(newVersion);
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -1470,6 +1476,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
|
||||
ourLog.debug("Saving history entry {}", historyEntry.getIdDt());
|
||||
myResourceHistoryTableDao.save(historyEntry);
|
||||
theEntity.setCurrentVersionEntity(historyEntry);
|
||||
|
||||
// Save resource source
|
||||
String source = null;
|
||||
|
|
|
@ -20,9 +20,9 @@ package ca.uhn.fhir.jpa.dao;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
|
@ -35,6 +35,7 @@ import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
|
|||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
|
||||
import ca.uhn.fhir.jpa.api.model.LazyDaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.delete.DeleteConflictUtil;
|
||||
|
@ -159,6 +160,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
@Autowired(required = false)
|
||||
protected IFulltextSearchSvc mySearchDao;
|
||||
@Autowired
|
||||
protected HapiTransactionService myTransactionService;
|
||||
@Autowired
|
||||
private MatchResourceUrlService myMatchResourceUrlService;
|
||||
@Autowired
|
||||
private IResourceReindexingSvc myResourceReindexingSvc;
|
||||
|
@ -169,8 +172,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
@Autowired
|
||||
private IRequestPartitionHelperSvc myRequestPartitionHelperService;
|
||||
@Autowired
|
||||
protected HapiTransactionService myTransactionService;
|
||||
@Autowired
|
||||
private MatchUrlService myMatchUrlService;
|
||||
@Autowired
|
||||
private IDeleteExpungeJobSubmitter myDeleteExpungeJobSubmitter;
|
||||
|
@ -491,7 +492,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
/**
|
||||
* Creates a base method outcome for a delete request for the provided ID.
|
||||
*
|
||||
* <p>
|
||||
* Additional information may be set on the outcome.
|
||||
*
|
||||
* @param theId - the id of the object being deleted. Eg: Patient/123
|
||||
|
@ -1371,11 +1372,20 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
private ResourceTable readEntityLatestVersion(IIdType theId, @Nonnull RequestPartitionId theRequestPartitionId, TransactionDetails theTransactionDetails) {
|
||||
validateResourceTypeAndThrowInvalidRequestException(theId);
|
||||
|
||||
if (theTransactionDetails.isResolvedResourceIdEmpty(theId.toUnqualifiedVersionless())) {
|
||||
throw new ResourceNotFoundException(Msg.code(1997) + theId);
|
||||
ResourcePersistentId persistentId = null;
|
||||
if (theTransactionDetails != null) {
|
||||
if (theTransactionDetails.isResolvedResourceIdEmpty(theId.toUnqualifiedVersionless())) {
|
||||
throw new ResourceNotFoundException(Msg.code(1997) + theId);
|
||||
}
|
||||
if (theTransactionDetails.hasResolvedResourceIds()) {
|
||||
persistentId = theTransactionDetails.getResolvedResourceId(theId);
|
||||
}
|
||||
}
|
||||
|
||||
if (persistentId == null) {
|
||||
persistentId = myIdHelperService.resolveResourcePersistentIds(theRequestPartitionId, getResourceName(), theId.getIdPart());
|
||||
}
|
||||
|
||||
ResourcePersistentId persistentId = myIdHelperService.resolveResourcePersistentIds(theRequestPartitionId, getResourceName(), theId.getIdPart());
|
||||
ResourceTable entity = myEntityManager.find(ResourceTable.class, persistentId.getId());
|
||||
if (entity == null) {
|
||||
throw new ResourceNotFoundException(Msg.code(1998) + theId);
|
||||
|
@ -1915,10 +1925,14 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setIdHelperSvcForUnitTest(IdHelperService theIdHelperService) {
|
||||
public void setIdHelperSvcForUnitTest(IIdHelperService theIdHelperService) {
|
||||
myIdHelperService = theIdHelperService;
|
||||
}
|
||||
|
||||
private static ResourceIndexedSearchParams toResourceIndexedSearchParams(ResourceTable theEntity) {
|
||||
return new ResourceIndexedSearchParams(theEntity);
|
||||
}
|
||||
|
||||
private static class IdChecker implements IValidatorModule {
|
||||
|
||||
private final ValidationModeEnum myMode;
|
||||
|
@ -1944,8 +1958,4 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
}
|
||||
|
||||
private static ResourceIndexedSearchParams toResourceIndexedSearchParams(ResourceTable theEntity) {
|
||||
return new ResourceIndexedSearchParams(theEntity);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -3,10 +3,15 @@ package ca.uhn.fhir.jpa.dao;
|
|||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.search.builder.SearchBuilder;
|
||||
import ca.uhn.fhir.jpa.util.QueryChunker;
|
||||
import ca.uhn.fhir.jpa.util.ResourceCountCache;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
@ -19,10 +24,18 @@ import org.springframework.transaction.annotation.Transactional;
|
|||
|
||||
import javax.annotation.Nullable;
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.persistence.TypedQuery;
|
||||
import javax.persistence.criteria.CriteriaBuilder;
|
||||
import javax.persistence.criteria.CriteriaQuery;
|
||||
import javax.persistence.criteria.JoinType;
|
||||
import javax.persistence.criteria.Predicate;
|
||||
import javax.persistence.criteria.Root;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
|
@ -47,6 +60,7 @@ import java.util.Map;
|
|||
public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends BaseHapiFhirDao<IBaseResource> implements IFhirSystemDao<T, MT> {
|
||||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirSystemDao.class);
|
||||
public static final Predicate[] EMPTY_PREDICATE_ARRAY = new Predicate[0];
|
||||
public ResourceCountCache myResourceCountsCache;
|
||||
@Autowired
|
||||
private TransactionProcessor myTransactionProcessor;
|
||||
|
@ -121,6 +135,114 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
|
|||
return myTransactionProcessor.transaction(theRequestDetails, theRequest, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.MANDATORY)
|
||||
public void preFetchResources(List<ResourcePersistentId> theResolvedIds) {
|
||||
List<Long> pids = theResolvedIds
|
||||
.stream()
|
||||
.map(t -> t.getIdAsLong())
|
||||
.collect(Collectors.toList());
|
||||
|
||||
new QueryChunker<Long>().chunk(pids, ids->{
|
||||
|
||||
/*
|
||||
* Pre-fetch the resources we're touching in this transaction in mass - this reduced the
|
||||
* number of database round trips.
|
||||
*
|
||||
* The thresholds below are kind of arbitrary. It's not
|
||||
* actually guaranteed that this pre-fetching will help (e.g. if a Bundle contains
|
||||
* a bundle of NOP conditional creates for example, the pre-fetching is actually loading
|
||||
* more data than would otherwise be loaded).
|
||||
*
|
||||
* However, for realistic average workloads, this should reduce the number of round trips.
|
||||
*/
|
||||
if (ids.size() >= 2) {
|
||||
List<ResourceTable> loadedResourceTableEntries = new ArrayList<>();
|
||||
preFetchIndexes(ids, "forcedId", "myForcedId", loadedResourceTableEntries);
|
||||
|
||||
List<Long> entityIds;
|
||||
|
||||
entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsStringPopulated()).map(t->t.getId()).collect(Collectors.toList());
|
||||
if (entityIds.size() > 0) {
|
||||
preFetchIndexes(entityIds, "string", "myParamsString", null);
|
||||
}
|
||||
|
||||
entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsTokenPopulated()).map(t->t.getId()).collect(Collectors.toList());
|
||||
if (entityIds.size() > 0) {
|
||||
preFetchIndexes(entityIds, "token", "myParamsToken", null);
|
||||
}
|
||||
|
||||
entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsDatePopulated()).map(t->t.getId()).collect(Collectors.toList());
|
||||
if (entityIds.size() > 0) {
|
||||
preFetchIndexes(entityIds, "date", "myParamsDate", null);
|
||||
}
|
||||
|
||||
entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsQuantityPopulated()).map(t->t.getId()).collect(Collectors.toList());
|
||||
if (entityIds.size() > 0) {
|
||||
preFetchIndexes(entityIds, "quantity", "myParamsQuantity", null);
|
||||
}
|
||||
|
||||
entityIds = loadedResourceTableEntries.stream().filter(t -> t.isHasLinks()).map(t->t.getId()).collect(Collectors.toList());
|
||||
if (entityIds.size() > 0) {
|
||||
preFetchIndexes(entityIds, "resourceLinks", "myResourceLinks", null);
|
||||
}
|
||||
|
||||
entityIds = loadedResourceTableEntries.stream().filter(t -> t.isHasTags()).map(t->t.getId()).collect(Collectors.toList());
|
||||
if (entityIds.size() > 0) {
|
||||
myResourceTagDao.findByResourceIds(entityIds);
|
||||
preFetchIndexes(entityIds, "tags", "myTags", null);
|
||||
}
|
||||
|
||||
new QueryChunker<ResourceTable>().chunk(loadedResourceTableEntries, SearchBuilder.getMaximumPageSize() / 2, entries -> {
|
||||
|
||||
Map<Long, ResourceTable> entities = entries
|
||||
.stream()
|
||||
.collect(Collectors.toMap(t -> t.getId(), t -> t));
|
||||
|
||||
CriteriaBuilder b = myEntityManager.getCriteriaBuilder();
|
||||
CriteriaQuery<ResourceHistoryTable> q = b.createQuery(ResourceHistoryTable.class);
|
||||
Root<ResourceHistoryTable> from = q.from(ResourceHistoryTable.class);
|
||||
|
||||
from.fetch("myProvenance", JoinType.LEFT);
|
||||
|
||||
List<Predicate> orPredicates = new ArrayList<>();
|
||||
for (ResourceTable next : entries) {
|
||||
Predicate resId = b.equal(from.get("myResourceId"), next.getId());
|
||||
Predicate resVer = b.equal(from.get("myResourceVersion"), next.getVersion());
|
||||
orPredicates.add(b.and(resId, resVer));
|
||||
}
|
||||
q.where(b.or(orPredicates.toArray(EMPTY_PREDICATE_ARRAY)));
|
||||
List<ResourceHistoryTable> resultList = myEntityManager.createQuery(q).getResultList();
|
||||
for (ResourceHistoryTable next : resultList) {
|
||||
ResourceTable nextEntity = entities.get(next.getResourceId());
|
||||
if (nextEntity != null) {
|
||||
nextEntity.setCurrentVersionEntity(next);
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
private void preFetchIndexes(List<Long> theIds, String typeDesc, String fieldName, @Nullable List<ResourceTable> theEntityListToPopulate) {
|
||||
new QueryChunker<Long>().chunk(theIds, ids->{
|
||||
TypedQuery<ResourceTable> query = myEntityManager.createQuery("FROM ResourceTable r LEFT JOIN FETCH r." + fieldName + " WHERE r.myId IN ( :IDS )", ResourceTable.class);
|
||||
query.setParameter("IDS", ids);
|
||||
List<ResourceTable> indexFetchOutcome = query.getResultList();
|
||||
ourLog.debug("Pre-fetched {} {}} indexes", indexFetchOutcome.size(), typeDesc);
|
||||
if (theEntityListToPopulate != null) {
|
||||
theEntityListToPopulate.addAll(indexFetchOutcome);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
|
|
|
@ -20,11 +20,11 @@ package ca.uhn.fhir.jpa.dao;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
|
@ -71,7 +71,7 @@ public class HistoryBuilder {
|
|||
@Autowired
|
||||
private FhirContext myCtx;
|
||||
@Autowired
|
||||
private IdHelperService myIdHelperService;
|
||||
private IIdHelperService myIdHelperService;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
|
|
|
@ -32,6 +32,7 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
|
|||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceSearchViewDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
|
@ -149,7 +150,7 @@ public class LegacySearchBuilder implements ISearchBuilder {
|
|||
@Autowired
|
||||
private FhirContext myContext;
|
||||
@Autowired
|
||||
private IdHelperService myIdHelperService;
|
||||
private IIdHelperService myIdHelperService;
|
||||
@Autowired(required = false)
|
||||
private IFulltextSearchSvc myFulltextSearchSvc;
|
||||
@Autowired(required = false)
|
||||
|
|
|
@ -24,7 +24,9 @@ import ca.uhn.fhir.context.FhirContext;
|
|||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
|
@ -49,6 +51,7 @@ import org.hl7.fhir.instance.model.api.IIdType;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import javax.persistence.EntityManager;
|
||||
|
@ -80,12 +83,14 @@ public class TransactionProcessor extends BaseTransactionProcessor {
|
|||
|
||||
public static final Pattern SINGLE_PARAMETER_MATCH_URL_PATTERN = Pattern.compile("^[^?]+[?][a-z0-9-]+=[^&,]+$");
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(TransactionProcessor.class);
|
||||
@Autowired
|
||||
private ApplicationContext myApplicationContext;
|
||||
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||
private EntityManager myEntityManager;
|
||||
@Autowired(required = false)
|
||||
private HapiFhirHibernateJpaDialect myHapiFhirHibernateJpaDialect;
|
||||
@Autowired
|
||||
private IdHelperService myIdHelperService;
|
||||
private IIdHelperService myIdHelperService;
|
||||
@Autowired
|
||||
private PartitionSettings myPartitionSettings;
|
||||
@Autowired
|
||||
|
@ -271,38 +276,8 @@ public class TransactionProcessor extends BaseTransactionProcessor {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Pre-fetch the resources we're touching in this transaction in mass - this reduced the
|
||||
* number of database round trips.
|
||||
*
|
||||
* The thresholds below are kind of arbitrary. It's not
|
||||
* actually guaranteed that this pre-fetching will help (e.g. if a Bundle contains
|
||||
* a bundle of NOP conditional creates for example, the pre-fetching is actually loading
|
||||
* more data than would otherwise be loaded).
|
||||
*
|
||||
* However, for realistic average workloads, this should reduce the number of round trips.
|
||||
*/
|
||||
if (idsToPreFetch.size() > 2) {
|
||||
List<ResourceTable> loadedResourceTableEntries = preFetchIndexes(idsToPreFetch, "forcedId", "myForcedId");
|
||||
|
||||
if (loadedResourceTableEntries.stream().filter(t -> t.isParamsStringPopulated()).count() > 1) {
|
||||
preFetchIndexes(idsToPreFetch, "string", "myParamsString");
|
||||
}
|
||||
if (loadedResourceTableEntries.stream().filter(t -> t.isParamsTokenPopulated()).count() > 1) {
|
||||
preFetchIndexes(idsToPreFetch, "token", "myParamsToken");
|
||||
}
|
||||
if (loadedResourceTableEntries.stream().filter(t -> t.isParamsDatePopulated()).count() > 1) {
|
||||
preFetchIndexes(idsToPreFetch, "date", "myParamsDate");
|
||||
}
|
||||
if (loadedResourceTableEntries.stream().filter(t -> t.isParamsDatePopulated()).count() > 1) {
|
||||
preFetchIndexes(idsToPreFetch, "quantity", "myParamsQuantity");
|
||||
}
|
||||
if (loadedResourceTableEntries.stream().filter(t -> t.isHasLinks()).count() > 1) {
|
||||
preFetchIndexes(idsToPreFetch, "resourceLinks", "myResourceLinks");
|
||||
}
|
||||
|
||||
}
|
||||
IFhirSystemDao<?,?> systemDao = myApplicationContext.getBean(IFhirSystemDao.class);
|
||||
systemDao.preFetchResources(ResourcePersistentId.fromLongList(idsToPreFetch));
|
||||
|
||||
}
|
||||
|
||||
|
@ -352,14 +327,6 @@ public class TransactionProcessor extends BaseTransactionProcessor {
|
|||
nextSearchParameterMap.setResolved(true);
|
||||
}
|
||||
|
||||
private List<ResourceTable> preFetchIndexes(List<Long> ids, String typeDesc, String fieldName) {
|
||||
TypedQuery<ResourceTable> query = myEntityManager.createQuery("FROM ResourceTable r LEFT JOIN FETCH r." + fieldName + " WHERE r.myId IN ( :IDS )", ResourceTable.class);
|
||||
query.setParameter("IDS", ids);
|
||||
List<ResourceTable> indexFetchOutcome = query.getResultList();
|
||||
ourLog.debug("Pre-fetched {} {}} indexes", indexFetchOutcome.size(), typeDesc);
|
||||
return indexFetchOutcome;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void flushSession(Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome) {
|
||||
try {
|
||||
|
@ -393,10 +360,15 @@ public class TransactionProcessor extends BaseTransactionProcessor {
|
|||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setIdHelperServiceForUnitTest(IdHelperService theIdHelperService) {
|
||||
public void setIdHelperServiceForUnitTest(IIdHelperService theIdHelperService) {
|
||||
myIdHelperService = theIdHelperService;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setApplicationContextForUnitTest(ApplicationContext theAppCtx) {
|
||||
myApplicationContext = theAppCtx;
|
||||
}
|
||||
|
||||
private static class MatchUrlToResolve {
|
||||
|
||||
private final String myRequestUrl;
|
||||
|
|
|
@ -0,0 +1,45 @@
|
|||
package ca.uhn.fhir.jpa.dao.data;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||
import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public interface IBatch2JobInstanceRepository extends JpaRepository<Batch2JobInstanceEntity, String>, IHapiFhirJpaRepository {
|
||||
|
||||
@Modifying
|
||||
@Query("UPDATE Batch2JobInstanceEntity e SET e.myStatus = :status WHERE e.myId = :id")
|
||||
void updateInstanceStatus(@Param("id") String theInstanceId, @Param("status") StatusEnum theInProgress);
|
||||
|
||||
@Query("SELECT e FROM Batch2JobInstanceEntity e ORDER BY e.myCreateTime ASC")
|
||||
List<Batch2JobInstanceEntity> fetchAll(Pageable thePageRequest);
|
||||
|
||||
@Modifying
|
||||
@Query("UPDATE Batch2JobInstanceEntity e SET e.myCancelled = :cancelled WHERE e.myId = :id")
|
||||
void updateInstanceCancelled(@Param("id") String theInstanceId, @Param("cancelled") boolean theCancelled);
|
||||
}
|
|
@ -0,0 +1,54 @@
|
|||
package ca.uhn.fhir.jpa.dao.data;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||
import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
public interface IBatch2WorkChunkRepository extends JpaRepository<Batch2WorkChunkEntity, String>, IHapiFhirJpaRepository {
|
||||
|
||||
@Query("SELECT e FROM Batch2WorkChunkEntity e WHERE e.myInstanceId = :instanceId ORDER BY e.mySequence ASC")
|
||||
List<Batch2WorkChunkEntity> fetchChunks(Pageable thePageRequest, @Param("instanceId") String theInstanceId);
|
||||
|
||||
@Modifying
|
||||
@Query("UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myEndTime = :et, e.myRecordsProcessed = :rp, e.mySerializedData = null WHERE e.myId = :id")
|
||||
void updateChunkStatusAndClearDataForEndSuccess(@Param("id") String theChunkId, @Param("et") Date theEndTime, @Param("rp") int theRecordsProcessed, @Param("status") StatusEnum theInProgress);
|
||||
|
||||
@Modifying
|
||||
@Query("UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myEndTime = :et, e.myErrorMessage = :em, e.myErrorCount = e.myErrorCount + 1 WHERE e.myId = :id")
|
||||
void updateChunkStatusAndIncrementErrorCountForEndError(@Param("id") String theChunkId, @Param("et") Date theEndTime, @Param("em") String theErrorMessage, @Param("status") StatusEnum theInProgress);
|
||||
|
||||
@Modifying
|
||||
@Query("UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myStartTime = :st WHERE e.myId = :id")
|
||||
void updateChunkStatusForStart(@Param("id") String theChunkId, @Param("st") Date theStartedTime, @Param("status") StatusEnum theInProgress);
|
||||
|
||||
@Modifying
|
||||
@Query("DELETE FROM Batch2WorkChunkEntity e WHERE e.myInstanceId = :instanceId")
|
||||
void deleteAllForInstance(@Param("instanceId") String theInstanceId);
|
||||
}
|
|
@ -23,6 +23,8 @@ package ca.uhn.fhir.jpa.dao.expunge;
|
|||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity;
|
||||
import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity;
|
||||
import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
|
||||
import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity;
|
||||
import ca.uhn.fhir.jpa.entity.PartitionEntity;
|
||||
|
@ -126,6 +128,8 @@ public class ExpungeEverythingService {
|
|||
counter.addAndGet(doExpungeEverythingQuery("UPDATE " + TermCodeSystem.class.getSimpleName() + " d SET d.myCurrentVersion = null"));
|
||||
return null;
|
||||
});
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(Batch2WorkChunkEntity.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(Batch2JobInstanceEntity.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(NpmPackageVersionResourceEntity.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(NpmPackageVersionEntity.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(NpmPackageEntity.class));
|
||||
|
|
|
@ -26,6 +26,8 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
|
|||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedComboStringUniqueDao;
|
||||
|
@ -75,6 +77,8 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||
public class ResourceExpungeService implements IResourceExpungeService {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ResourceExpungeService.class);
|
||||
|
||||
@Autowired
|
||||
private IForcedIdDao myForcedIdDao;
|
||||
@Autowired
|
||||
private IResourceTableDao myResourceTableDao;
|
||||
@Autowired
|
||||
|
@ -104,7 +108,7 @@ public class ResourceExpungeService implements IResourceExpungeService {
|
|||
@Autowired
|
||||
private IResourceTagDao myResourceTagDao;
|
||||
@Autowired
|
||||
private IdHelperService myIdHelperService;
|
||||
private IIdHelperService myIdHelperService;
|
||||
@Autowired
|
||||
private IResourceHistoryTagDao myResourceHistoryTagDao;
|
||||
@Autowired
|
||||
|
@ -257,7 +261,7 @@ public class ResourceExpungeService implements IResourceExpungeService {
|
|||
ForcedId forcedId = resource.getForcedId();
|
||||
resource.setForcedId(null);
|
||||
myResourceTableDao.saveAndFlush(resource);
|
||||
myIdHelperService.delete(forcedId);
|
||||
myForcedIdDao.deleteByPid(forcedId.getId());
|
||||
}
|
||||
|
||||
myResourceTableDao.deleteByPid(resource.getId());
|
||||
|
|
|
@ -31,6 +31,7 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
|||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.searchparam.extractor.IResourceLinkResolver;
|
||||
|
@ -73,7 +74,7 @@ public class DaoResourceLinkResolver implements IResourceLinkResolver {
|
|||
@Autowired
|
||||
private FhirContext myContext;
|
||||
@Autowired
|
||||
private IdHelperService myIdHelperService;
|
||||
private IIdHelperService myIdHelperService;
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
|
||||
|
|
|
@ -0,0 +1,96 @@
|
|||
package ca.uhn.fhir.jpa.dao.index;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* This class is an analog to {@link IIdHelperService} but with additional JPA server methods
|
||||
* added.
|
||||
*
|
||||
* JA 2022-02-17 - I moved these methods out of IdHelperService because I want to reuse
|
||||
* IdHelperService in storage-engine-neutral batch tasks such as bulk import. These methods
|
||||
* are all just being used by MDM, so they're JPA specific. I believe it should be possible
|
||||
* though to just replace all of these calls with equivalents from IdHelperService,
|
||||
* at which point this interface and its implementation could just go away.
|
||||
*
|
||||
* All of the methods here aren't partition aware, so it's not great to use them
|
||||
* anyhow. The equivalents in {@link IIdHelperService} are probably a bit more
|
||||
* clunky because you have to convert between {@link Long} and
|
||||
* {@link ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId} to use them,
|
||||
* but they also have caching and partition awareness so the tradeoff for that
|
||||
* extra effort is that they are better.
|
||||
*/
|
||||
public interface IJpaIdHelperService extends IIdHelperService {
|
||||
|
||||
/**
|
||||
* @deprecated This method doesn't take a partition ID as input, so it is unsafe. It
|
||||
* should be reworked to include the partition ID before any new use is incorporated
|
||||
*/
|
||||
@Deprecated
|
||||
@Nonnull
|
||||
List<Long> getPidsOrThrowException(List<IIdType> theIds);
|
||||
|
||||
|
||||
/**
|
||||
* @deprecated This method doesn't take a partition ID as input, so it is unsafe. It
|
||||
* should be reworked to include the partition ID before any new use is incorporated
|
||||
*/
|
||||
@Deprecated
|
||||
@Nullable
|
||||
Long getPidOrNull(IBaseResource theResource);
|
||||
|
||||
|
||||
/**
|
||||
* @deprecated This method doesn't take a partition ID as input, so it is unsafe. It
|
||||
* should be reworked to include the partition ID before any new use is incorporated
|
||||
*/
|
||||
@Deprecated
|
||||
@Nonnull
|
||||
Long getPidOrThrowException(IIdType theId);
|
||||
|
||||
@Nonnull
|
||||
Long getPidOrThrowException(@Nonnull IAnyResource theResource);
|
||||
|
||||
IIdType resourceIdFromPidOrThrowException(Long thePid);
|
||||
|
||||
/**
|
||||
* Given a set of PIDs, return a set of public FHIR Resource IDs.
|
||||
* This function will resolve a forced ID if it resolves, and if it fails to resolve to a forced it, will just return the pid
|
||||
* Example:
|
||||
* Let's say we have Patient/1(pid == 1), Patient/pat1 (pid == 2), Patient/3 (pid == 3), their pids would resolve as follows:
|
||||
* <p>
|
||||
* [1,2,3] -> ["1","pat1","3"]
|
||||
*
|
||||
* @param thePids The Set of pids you would like to resolve to external FHIR Resource IDs.
|
||||
* @return A Set of strings representing the FHIR IDs of the pids.
|
||||
*/
|
||||
Set<String> translatePidsToFhirResourceIds(Set<Long> thePids);
|
||||
|
||||
}
|
|
@ -24,13 +24,14 @@ import ca.uhn.fhir.context.FhirContext;
|
|||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
|
||||
import ca.uhn.fhir.jpa.model.cross.ResourceLookup;
|
||||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.search.builder.SearchBuilder;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.jpa.util.QueryChunker;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
|
@ -43,8 +44,6 @@ import com.google.common.collect.ListMultimap;
|
|||
import com.google.common.collect.MultimapBuilder;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
@ -95,9 +94,9 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
* </p>
|
||||
*/
|
||||
@Service
|
||||
public class IdHelperService {
|
||||
public class IdHelperService implements IIdHelperService {
|
||||
public static final Predicate[] EMPTY_PREDICATE_ARRAY = new Predicate[0];
|
||||
private static final String RESOURCE_PID = "RESOURCE_PID";
|
||||
public static final String RESOURCE_PID = "RESOURCE_PID";
|
||||
@Autowired
|
||||
protected IForcedIdDao myForcedIdDao;
|
||||
@Autowired
|
||||
|
@ -119,16 +118,13 @@ public class IdHelperService {
|
|||
myDontCheckActiveTransactionForUnitTest = theDontCheckActiveTransactionForUnitTest;
|
||||
}
|
||||
|
||||
public void delete(ForcedId forcedId) {
|
||||
myForcedIdDao.deleteByPid(forcedId.getId());
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a forced ID, convert it to it's Long value. Since you are allowed to use string IDs for resources, we need to
|
||||
* convert those to the underlying Long values that are stored, for lookup and comparison purposes.
|
||||
*
|
||||
* @throws ResourceNotFoundException If the ID can not be found
|
||||
*/
|
||||
@Override
|
||||
@Nonnull
|
||||
public IResourceLookup resolveResourceIdentity(@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theResourceId) throws ResourceNotFoundException {
|
||||
assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive();
|
||||
|
@ -161,6 +157,7 @@ public class IdHelperService {
|
|||
* If any resource is not found, it will throw ResourceNotFound exception
|
||||
* (and no map will be returned)
|
||||
*/
|
||||
@Override
|
||||
@Nonnull
|
||||
public Map<String, ResourcePersistentId> resolveResourcePersistentIds(@Nonnull RequestPartitionId theRequestPartitionId,
|
||||
String theResourceType,
|
||||
|
@ -208,6 +205,7 @@ public class IdHelperService {
|
|||
*
|
||||
* @throws ResourceNotFoundException If the ID can not be found
|
||||
*/
|
||||
@Override
|
||||
@Nonnull
|
||||
public ResourcePersistentId resolveResourcePersistentIds(@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theId) {
|
||||
Validate.notNull(theId, "theId must not be null");
|
||||
|
@ -225,6 +223,7 @@ public class IdHelperService {
|
|||
* <p>
|
||||
* In {@link ca.uhn.fhir.jpa.api.config.DaoConfig.ClientIdStrategyEnum#ANY} mode it will always return true.
|
||||
*/
|
||||
@Override
|
||||
public boolean idRequiresForcedId(String theId) {
|
||||
return myDaoConfig.getResourceClientIdStrategy() == DaoConfig.ClientIdStrategyEnum.ANY || !isValidPid(theId);
|
||||
}
|
||||
|
@ -240,104 +239,117 @@ public class IdHelperService {
|
|||
* This implementation will always try to use a cache for performance, meaning that it can resolve resources that
|
||||
* are deleted (but note that forced IDs can't change, so the cache can't return incorrect results)
|
||||
*/
|
||||
@Override
|
||||
@Nonnull
|
||||
public List<ResourcePersistentId> resolveResourcePersistentIdsWithCache(RequestPartitionId theRequestPartitionId, List<IIdType> theIds) {
|
||||
boolean onlyForcedIds = false;
|
||||
return resolveResourcePersistentIdsWithCache(theRequestPartitionId, theIds, onlyForcedIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a collection of resource IDs (resource type + id), resolves the internal persistent IDs.
|
||||
* <p>
|
||||
* This implementation will always try to use a cache for performance, meaning that it can resolve resources that
|
||||
* are deleted (but note that forced IDs can't change, so the cache can't return incorrect results)
|
||||
*
|
||||
* @param theOnlyForcedIds If <code>true</code>, resources which are not existing forced IDs will not be resolved
|
||||
*/
|
||||
@Override
|
||||
@Nonnull
|
||||
public List<ResourcePersistentId> resolveResourcePersistentIdsWithCache(RequestPartitionId theRequestPartitionId, List<IIdType> theIds, boolean theOnlyForcedIds) {
|
||||
assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive();
|
||||
|
||||
List<ResourcePersistentId> retVal = new ArrayList<>(theIds.size());
|
||||
|
||||
new QueryChunker<IIdType>().chunk(theIds, ids -> doResolveResourcePersistentIdsWithCache(theRequestPartitionId, ids, retVal));
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private void doResolveResourcePersistentIdsWithCache(RequestPartitionId theRequestPartitionId, List<IIdType> theInputIds, List<ResourcePersistentId> theOutputListToPopulate) {
|
||||
for (IIdType id : theInputIds) {
|
||||
for (IIdType id : theIds) {
|
||||
if (!id.hasIdPart()) {
|
||||
throw new InvalidRequestException(Msg.code(1101) + "Parameter value missing in request");
|
||||
}
|
||||
}
|
||||
|
||||
if (theInputIds.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
Set<IIdType> idsToCheck = new HashSet<>(theInputIds.size());
|
||||
for (IIdType nextId : theInputIds) {
|
||||
if (myDaoConfig.getResourceClientIdStrategy() != DaoConfig.ClientIdStrategyEnum.ANY) {
|
||||
if (nextId.isIdPartValidLong()) {
|
||||
theOutputListToPopulate.add(new ResourcePersistentId(nextId.getIdPartAsLong()).setAssociatedResourceId(nextId));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
String key = toForcedIdToPidKey(theRequestPartitionId, nextId.getResourceType(), nextId.getIdPart());
|
||||
ResourcePersistentId cachedId = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key);
|
||||
if (cachedId != null) {
|
||||
theOutputListToPopulate.add(cachedId);
|
||||
continue;
|
||||
}
|
||||
|
||||
idsToCheck.add(nextId);
|
||||
}
|
||||
|
||||
if (idsToCheck.size() > 0) {
|
||||
CriteriaBuilder cb = myEntityManager.getCriteriaBuilder();
|
||||
CriteriaQuery<ForcedId> criteriaQuery = cb.createQuery(ForcedId.class);
|
||||
Root<ForcedId> from = criteriaQuery.from(ForcedId.class);
|
||||
|
||||
List<Predicate> predicates = new ArrayList<>(idsToCheck.size());
|
||||
for (IIdType next : idsToCheck) {
|
||||
|
||||
List<Predicate> andPredicates = new ArrayList<>(3);
|
||||
|
||||
if (isNotBlank(next.getResourceType())) {
|
||||
Predicate typeCriteria = cb.equal(from.get("myResourceType").as(String.class), next.getResourceType());
|
||||
andPredicates.add(typeCriteria);
|
||||
}
|
||||
|
||||
Predicate idCriteria = cb.equal(from.get("myForcedId").as(String.class), next.getIdPart());
|
||||
andPredicates.add(idCriteria);
|
||||
|
||||
if (theRequestPartitionId.isDefaultPartition() && myPartitionSettings.getDefaultPartitionId() == null) {
|
||||
Predicate partitionIdCriteria = cb.isNull(from.get("myPartitionIdValue").as(Integer.class));
|
||||
andPredicates.add(partitionIdCriteria);
|
||||
} else if (!theRequestPartitionId.isAllPartitions()) {
|
||||
List<Integer> partitionIds = theRequestPartitionId.getPartitionIds();
|
||||
partitionIds = replaceDefaultPartitionIdIfNonNull(myPartitionSettings, partitionIds);
|
||||
|
||||
if (partitionIds.size() > 1) {
|
||||
Predicate partitionIdCriteria = from.get("myPartitionIdValue").as(Integer.class).in(partitionIds);
|
||||
andPredicates.add(partitionIdCriteria);
|
||||
} else {
|
||||
Predicate partitionIdCriteria = cb.equal(from.get("myPartitionIdValue").as(Integer.class), partitionIds.get(0));
|
||||
andPredicates.add(partitionIdCriteria);
|
||||
if (!theIds.isEmpty()) {
|
||||
Set<IIdType> idsToCheck = new HashSet<>(theIds.size());
|
||||
for (IIdType nextId : theIds) {
|
||||
if (myDaoConfig.getResourceClientIdStrategy() != DaoConfig.ClientIdStrategyEnum.ANY) {
|
||||
if (nextId.isIdPartValidLong()) {
|
||||
if (!theOnlyForcedIds) {
|
||||
retVal.add(new ResourcePersistentId(nextId.getIdPartAsLong()).setAssociatedResourceId(nextId));
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
predicates.add(cb.and(andPredicates.toArray(EMPTY_PREDICATE_ARRAY)));
|
||||
String key = toForcedIdToPidKey(theRequestPartitionId, nextId.getResourceType(), nextId.getIdPart());
|
||||
ResourcePersistentId cachedId = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key);
|
||||
if (cachedId != null) {
|
||||
retVal.add(cachedId);
|
||||
continue;
|
||||
}
|
||||
|
||||
idsToCheck.add(nextId);
|
||||
}
|
||||
new QueryChunker<IIdType>().chunk(idsToCheck, SearchBuilder.getMaximumPageSize() / 2, ids -> doResolvePersistentIds(theRequestPartitionId, ids, retVal));
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private void doResolvePersistentIds(RequestPartitionId theRequestPartitionId, List<IIdType> theIds, List<ResourcePersistentId> theOutputListToPopulate) {
|
||||
CriteriaBuilder cb = myEntityManager.getCriteriaBuilder();
|
||||
CriteriaQuery<ForcedId> criteriaQuery = cb.createQuery(ForcedId.class);
|
||||
Root<ForcedId> from = criteriaQuery.from(ForcedId.class);
|
||||
|
||||
List<Predicate> predicates = new ArrayList<>(theIds.size());
|
||||
for (IIdType next : theIds) {
|
||||
|
||||
List<Predicate> andPredicates = new ArrayList<>(3);
|
||||
|
||||
if (isNotBlank(next.getResourceType())) {
|
||||
Predicate typeCriteria = cb.equal(from.get("myResourceType").as(String.class), next.getResourceType());
|
||||
andPredicates.add(typeCriteria);
|
||||
}
|
||||
|
||||
criteriaQuery.where(cb.or(predicates.toArray(EMPTY_PREDICATE_ARRAY)));
|
||||
Predicate idCriteria = cb.equal(from.get("myForcedId").as(String.class), next.getIdPart());
|
||||
andPredicates.add(idCriteria);
|
||||
|
||||
TypedQuery<ForcedId> query = myEntityManager.createQuery(criteriaQuery);
|
||||
List<ForcedId> results = query.getResultList();
|
||||
for (ForcedId nextId : results) {
|
||||
// Check if the nextId has a resource ID. It may have a null resource ID if a commit is still pending.
|
||||
if (nextId.getResourceId() != null) {
|
||||
ResourcePersistentId persistentId = new ResourcePersistentId(nextId.getResourceId());
|
||||
populateAssociatedResourceId(nextId.getResourceType(), nextId.getForcedId(), persistentId);
|
||||
theOutputListToPopulate.add(persistentId);
|
||||
if (theRequestPartitionId.isDefaultPartition() && myPartitionSettings.getDefaultPartitionId() == null) {
|
||||
Predicate partitionIdCriteria = cb.isNull(from.get("myPartitionIdValue").as(Integer.class));
|
||||
andPredicates.add(partitionIdCriteria);
|
||||
} else if (!theRequestPartitionId.isAllPartitions()) {
|
||||
List<Integer> partitionIds = theRequestPartitionId.getPartitionIds();
|
||||
partitionIds = replaceDefaultPartitionIdIfNonNull(myPartitionSettings, partitionIds);
|
||||
|
||||
String key = toForcedIdToPidKey(theRequestPartitionId, nextId.getResourceType(), nextId.getForcedId());
|
||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, persistentId);
|
||||
if (partitionIds.size() > 1) {
|
||||
Predicate partitionIdCriteria = from.get("myPartitionIdValue").as(Integer.class).in(partitionIds);
|
||||
andPredicates.add(partitionIdCriteria);
|
||||
} else {
|
||||
Predicate partitionIdCriteria = cb.equal(from.get("myPartitionIdValue").as(Integer.class), partitionIds.get(0));
|
||||
andPredicates.add(partitionIdCriteria);
|
||||
}
|
||||
}
|
||||
|
||||
predicates.add(cb.and(andPredicates.toArray(EMPTY_PREDICATE_ARRAY)));
|
||||
}
|
||||
|
||||
criteriaQuery.where(cb.or(predicates.toArray(EMPTY_PREDICATE_ARRAY)));
|
||||
|
||||
TypedQuery<ForcedId> query = myEntityManager.createQuery(criteriaQuery);
|
||||
List<ForcedId> results = query.getResultList();
|
||||
for (ForcedId nextId : results) {
|
||||
// Check if the nextId has a resource ID. It may have a null resource ID if a commit is still pending.
|
||||
if (nextId.getResourceId() != null) {
|
||||
ResourcePersistentId persistentId = new ResourcePersistentId(nextId.getResourceId());
|
||||
populateAssociatedResourceId(nextId.getResourceType(), nextId.getForcedId(), persistentId);
|
||||
theOutputListToPopulate.add(persistentId);
|
||||
|
||||
String key = toForcedIdToPidKey(theRequestPartitionId, nextId.getResourceType(), nextId.getForcedId());
|
||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, persistentId);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
private void populateAssociatedResourceId(String nextResourceType, String forcedId, ResourcePersistentId persistentId) {
|
||||
IIdType resourceId = myFhirCtx.getVersion().newIdType();
|
||||
resourceId.setValue(nextResourceType + "/" + forcedId);
|
||||
|
@ -348,6 +360,7 @@ public class IdHelperService {
|
|||
* Given a persistent ID, returns the associated resource ID
|
||||
*/
|
||||
@Nonnull
|
||||
@Override
|
||||
public IIdType translatePidIdToForcedId(FhirContext theCtx, String theResourceType, ResourcePersistentId theId) {
|
||||
if (theId.getAssociatedResourceId() != null) {
|
||||
return theId.getAssociatedResourceId();
|
||||
|
@ -365,6 +378,7 @@ public class IdHelperService {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<String> translatePidIdToForcedIdWithCache(ResourcePersistentId theId) {
|
||||
return myMemoryCacheService.get(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, theId.getIdAsLong(), pid -> myForcedIdDao.findByResourcePid(pid).map(t -> t.getForcedId()));
|
||||
}
|
||||
|
@ -527,31 +541,7 @@ public class IdHelperService {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a set of PIDs, return a set of public FHIR Resource IDs.
|
||||
* This function will resolve a forced ID if it resolves, and if it fails to resolve to a forced it, will just return the pid
|
||||
* Example:
|
||||
* Let's say we have Patient/1(pid == 1), Patient/pat1 (pid == 2), Patient/3 (pid == 3), their pids would resolve as follows:
|
||||
* <p>
|
||||
* [1,2,3] -> ["1","pat1","3"]
|
||||
*
|
||||
* @param thePids The Set of pids you would like to resolve to external FHIR Resource IDs.
|
||||
* @return A Set of strings representing the FHIR IDs of the pids.
|
||||
*/
|
||||
public Set<String> translatePidsToFhirResourceIds(Set<Long> thePids) {
|
||||
assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive();
|
||||
|
||||
Map<Long, Optional<String>> pidToForcedIdMap = translatePidsToForcedIds(thePids);
|
||||
|
||||
//If the result of the translation is an empty optional, it means there is no forced id, and we can use the PID as the resource ID.
|
||||
Set<String> resolvedResourceIds = pidToForcedIdMap.entrySet().stream()
|
||||
.map(entry -> entry.getValue().isPresent() ? entry.getValue().get() : entry.getKey().toString())
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
return resolvedResourceIds;
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<Long, Optional<String>> translatePidsToForcedIds(Set<Long> thePids) {
|
||||
assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive();
|
||||
|
||||
|
@ -585,76 +575,10 @@ public class IdHelperService {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated This method doesn't take a partition ID as input, so it is unsafe. It
|
||||
* should be reworked to include the partition ID before any new use is incorporated
|
||||
*/
|
||||
@Deprecated
|
||||
@Nullable
|
||||
public Long getPidOrNull(IBaseResource theResource) {
|
||||
assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive();
|
||||
|
||||
IAnyResource anyResource = (IAnyResource) theResource;
|
||||
Long retVal = (Long) anyResource.getUserData(RESOURCE_PID);
|
||||
if (retVal == null) {
|
||||
IIdType id = theResource.getIdElement();
|
||||
try {
|
||||
retVal = this.resolveResourcePersistentIds(RequestPartitionId.allPartitions(), id.getResourceType(), id.getIdPart()).getIdAsLong();
|
||||
} catch (ResourceNotFoundException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated This method doesn't take a partition ID as input, so it is unsafe. It
|
||||
* should be reworked to include the partition ID before any new use is incorporated
|
||||
*/
|
||||
@Deprecated
|
||||
@Nonnull
|
||||
public Long getPidOrThrowException(IIdType theId) {
|
||||
assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive();
|
||||
|
||||
List<IIdType> ids = Collections.singletonList(theId);
|
||||
List<ResourcePersistentId> resourcePersistentIds = this.resolveResourcePersistentIdsWithCache(RequestPartitionId.allPartitions(), ids);
|
||||
return resourcePersistentIds.get(0).getIdAsLong();
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated This method doesn't take a partition ID as input, so it is unsafe. It
|
||||
* should be reworked to include the partition ID before any new use is incorporated
|
||||
*/
|
||||
@Deprecated
|
||||
@Nonnull
|
||||
public List<Long> getPidsOrThrowException(List<IIdType> theIds) {
|
||||
assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive();
|
||||
|
||||
List<ResourcePersistentId> resourcePersistentIds = this.resolveResourcePersistentIdsWithCache(RequestPartitionId.allPartitions(), theIds);
|
||||
return resourcePersistentIds.stream().map(ResourcePersistentId::getIdAsLong).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public Long getPidOrThrowException(@Nonnull IAnyResource theResource) {
|
||||
Long retVal = (Long) theResource.getUserData(RESOURCE_PID);
|
||||
if (retVal == null) {
|
||||
throw new IllegalStateException(Msg.code(1102) + String.format("Unable to find %s in the user data for %s with ID %s", RESOURCE_PID, theResource, theResource.getId())
|
||||
);
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
public IIdType resourceIdFromPidOrThrowException(Long thePid) {
|
||||
Optional<ResourceTable> optionalResource = myResourceTableDao.findById(thePid);
|
||||
if (!optionalResource.isPresent()) {
|
||||
throw new ResourceNotFoundException(Msg.code(1103) + "Requested resource not found");
|
||||
}
|
||||
return optionalResource.get().getIdDt().toVersionless();
|
||||
}
|
||||
|
||||
/**
|
||||
* Pre-cache a PID-to-Resource-ID mapping for later retrieval by {@link #translatePidsToForcedIds(Set)} and related methods
|
||||
*/
|
||||
@Override
|
||||
public void addResolvedPidToForcedId(ResourcePersistentId theResourcePersistentId, @Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, @Nullable String theForcedId, @Nullable Date theDeletedAt) {
|
||||
if (theForcedId != null) {
|
||||
if (theResourcePersistentId.getAssociatedResourceId() == null) {
|
||||
|
|
|
@ -0,0 +1,153 @@
|
|||
package ca.uhn.fhir.jpa.dao.index;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.jpa.dao.index.IdHelperService.RESOURCE_PID;
|
||||
|
||||
/**
|
||||
* See {@link IJpaIdHelperService} for an explanation of this class.
|
||||
*/
|
||||
public class JpaIdHelperService extends IdHelperService implements IJpaIdHelperService, IIdHelperService {
|
||||
@Autowired
|
||||
protected IResourceTableDao myResourceTableDao;
|
||||
@Autowired
|
||||
private IIdHelperService myIdHelperService;
|
||||
|
||||
/**
|
||||
* @deprecated This method doesn't take a partition ID as input, so it is unsafe. It
|
||||
* should be reworked to include the partition ID before any new use is incorporated
|
||||
*/
|
||||
@Override
|
||||
@Deprecated
|
||||
@Nonnull
|
||||
public List<Long> getPidsOrThrowException(List<IIdType> theIds) {
|
||||
List<ResourcePersistentId> resourcePersistentIds = myIdHelperService.resolveResourcePersistentIdsWithCache(RequestPartitionId.allPartitions(), theIds);
|
||||
return resourcePersistentIds.stream().map(ResourcePersistentId::getIdAsLong).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @deprecated This method doesn't take a partition ID as input, so it is unsafe. It
|
||||
* should be reworked to include the partition ID before any new use is incorporated
|
||||
*/
|
||||
@Override
|
||||
@Deprecated
|
||||
@Nullable
|
||||
public Long getPidOrNull(IBaseResource theResource) {
|
||||
|
||||
IAnyResource anyResource = (IAnyResource) theResource;
|
||||
Long retVal = (Long) anyResource.getUserData(RESOURCE_PID);
|
||||
if (retVal == null) {
|
||||
IIdType id = theResource.getIdElement();
|
||||
try {
|
||||
retVal = myIdHelperService.resolveResourcePersistentIds(RequestPartitionId.allPartitions(), id.getResourceType(), id.getIdPart()).getIdAsLong();
|
||||
} catch (ResourceNotFoundException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @deprecated This method doesn't take a partition ID as input, so it is unsafe. It
|
||||
* should be reworked to include the partition ID before any new use is incorporated
|
||||
*/
|
||||
@Override
|
||||
@Deprecated
|
||||
@Nonnull
|
||||
public Long getPidOrThrowException(IIdType theId) {
|
||||
assert TransactionSynchronizationManager.isSynchronizationActive();
|
||||
|
||||
List<IIdType> ids = Collections.singletonList(theId);
|
||||
List<ResourcePersistentId> resourcePersistentIds = myIdHelperService.resolveResourcePersistentIdsWithCache(RequestPartitionId.allPartitions(), ids);
|
||||
return resourcePersistentIds.get(0).getIdAsLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nonnull
|
||||
public Long getPidOrThrowException(@Nonnull IAnyResource theResource) {
|
||||
Long retVal = (Long) theResource.getUserData(RESOURCE_PID);
|
||||
if (retVal == null) {
|
||||
throw new IllegalStateException(Msg.code(1102) + String.format("Unable to find %s in the user data for %s with ID %s", RESOURCE_PID, theResource, theResource.getId())
|
||||
);
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IIdType resourceIdFromPidOrThrowException(Long thePid) {
|
||||
Optional<ResourceTable> optionalResource = myResourceTableDao.findById(thePid);
|
||||
if (!optionalResource.isPresent()) {
|
||||
throw new ResourceNotFoundException(Msg.code(1103) + "Requested resource not found");
|
||||
}
|
||||
return optionalResource.get().getIdDt().toVersionless();
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a set of PIDs, return a set of public FHIR Resource IDs.
|
||||
* This function will resolve a forced ID if it resolves, and if it fails to resolve to a forced it, will just return the pid
|
||||
* Example:
|
||||
* Let's say we have Patient/1(pid == 1), Patient/pat1 (pid == 2), Patient/3 (pid == 3), their pids would resolve as follows:
|
||||
* <p>
|
||||
* [1,2,3] -> ["1","pat1","3"]
|
||||
*
|
||||
* @param thePids The Set of pids you would like to resolve to external FHIR Resource IDs.
|
||||
* @return A Set of strings representing the FHIR IDs of the pids.
|
||||
*/
|
||||
@Override
|
||||
public Set<String> translatePidsToFhirResourceIds(Set<Long> thePids) {
|
||||
assert TransactionSynchronizationManager.isSynchronizationActive();
|
||||
|
||||
Map<Long, Optional<String>> pidToForcedIdMap = myIdHelperService.translatePidsToForcedIds(thePids);
|
||||
|
||||
//If the result of the translation is an empty optional, it means there is no forced id, and we can use the PID as the resource ID.
|
||||
Set<String> resolvedResourceIds = pidToForcedIdMap.entrySet().stream()
|
||||
.map(entry -> entry.getValue().isPresent() ? entry.getValue().get() : entry.getKey().toString())
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
return resolvedResourceIds;
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -27,6 +27,7 @@ import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
|||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||
import ca.uhn.fhir.jpa.dao.MatchResourceUrlService;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedComboStringUniqueDao;
|
||||
|
@ -91,7 +92,7 @@ public class SearchParamWithInlineReferencesExtractor {
|
|||
@Autowired
|
||||
private FhirContext myContext;
|
||||
@Autowired
|
||||
private IdHelperService myIdHelperService;
|
||||
private IIdHelperService myIdHelperService;
|
||||
@Autowired
|
||||
private ISearchParamRegistry mySearchParamRegistry;
|
||||
@Autowired
|
||||
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.dao.mdm;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.index.IJpaIdHelperService;
|
||||
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -36,16 +36,17 @@ public class MdmLinkDeleteSvc {
|
|||
@Autowired
|
||||
private IMdmLinkDao myMdmLinkDao;
|
||||
@Autowired
|
||||
private IdHelperService myIdHelperService;
|
||||
private IJpaIdHelperService myIdHelperService;
|
||||
|
||||
/**
|
||||
* Delete all {@link ca.uhn.fhir.jpa.entity.MdmLink} records with any reference to this resource. (Used by Expunge.)
|
||||
*
|
||||
* @param theResource
|
||||
* @return the number of records deleted
|
||||
*/
|
||||
public int deleteWithAnyReferenceTo(IBaseResource theResource) {
|
||||
Long pid = myIdHelperService.getPidOrThrowException(theResource.getIdElement());
|
||||
int removed = myMdmLinkDao.deleteWithAnyReferenceToPid(pid);
|
||||
int removed = myMdmLinkDao.deleteWithAnyReferenceToPid(pid);
|
||||
if (removed > 0) {
|
||||
ourLog.info("Removed {} MDM links with references to {}", removed, theResource.getIdElement().toVersionless());
|
||||
}
|
||||
|
@ -54,7 +55,7 @@ public class MdmLinkDeleteSvc {
|
|||
|
||||
public int deleteNonRedirectWithAnyReferenceTo(IBaseResource theResource) {
|
||||
Long pid = myIdHelperService.getPidOrThrowException(theResource.getIdElement());
|
||||
int removed = myMdmLinkDao.deleteWithAnyReferenceToPidAndMatchResultNot(pid, MdmMatchResultEnum.REDIRECT);
|
||||
int removed = myMdmLinkDao.deleteWithAnyReferenceToPidAndMatchResultNot(pid, MdmMatchResultEnum.REDIRECT);
|
||||
if (removed > 0) {
|
||||
ourLog.info("Removed {} non-redirect MDM links with references to {}", removed, theResource.getIdElement().toVersionless());
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.dao.mdm;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.index.IJpaIdHelperService;
|
||||
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
|
||||
import ca.uhn.fhir.mdm.log.Logs;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
|
@ -43,7 +43,7 @@ public class MdmLinkExpandSvc {
|
|||
@Autowired
|
||||
private IMdmLinkDao myMdmLinkDao;
|
||||
@Autowired
|
||||
private IdHelperService myIdHelperService;
|
||||
private IJpaIdHelperService myIdHelperService;
|
||||
|
||||
public MdmLinkExpandSvc() {
|
||||
}
|
||||
|
|
|
@ -20,7 +20,6 @@ package ca.uhn.fhir.jpa.dao.predicate;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
|
||||
import ca.uhn.fhir.context.BaseRuntimeElementDefinition;
|
||||
import ca.uhn.fhir.context.ConfigurationException;
|
||||
|
@ -28,6 +27,7 @@ import ca.uhn.fhir.context.RuntimeChildChoiceDefinition;
|
|||
import ca.uhn.fhir.context.RuntimeChildResourceDefinition;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
|
@ -35,9 +35,9 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
|||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.BaseStorageDao;
|
||||
import ca.uhn.fhir.jpa.dao.LegacySearchBuilder;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
|
||||
|
@ -114,7 +114,7 @@ class PredicateBuilderReference extends BasePredicateBuilder {
|
|||
private static final Logger ourLog = LoggerFactory.getLogger(PredicateBuilderReference.class);
|
||||
private final PredicateBuilder myPredicateBuilder;
|
||||
@Autowired
|
||||
IdHelperService myIdHelperService;
|
||||
IIdHelperService myIdHelperService;
|
||||
@Autowired
|
||||
ISearchParamRegistry mySearchParamRegistry;
|
||||
@Autowired
|
||||
|
|
|
@ -21,8 +21,8 @@ package ca.uhn.fhir.jpa.dao.predicate;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.LegacySearchBuilder;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
|
@ -49,7 +49,7 @@ public class PredicateBuilderResourceId extends BasePredicateBuilder {
|
|||
private static final Logger ourLog = LoggerFactory.getLogger(PredicateBuilderResourceId.class);
|
||||
|
||||
@Autowired
|
||||
IdHelperService myIdHelperService;
|
||||
IIdHelperService myIdHelperService;
|
||||
|
||||
public PredicateBuilderResourceId(LegacySearchBuilder theSearchBuilder) {
|
||||
super(theSearchBuilder);
|
||||
|
|
|
@ -20,14 +20,14 @@ package ca.uhn.fhir.jpa.dao.r5;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.context.support.IValidationSupport.CodeValidationResult;
|
||||
import ca.uhn.fhir.context.support.ValidationSupportContext;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
|
@ -64,7 +64,7 @@ public class FhirResourceDaoCodeSystemR5 extends BaseHapiFhirResourceDao<CodeSys
|
|||
@Autowired
|
||||
protected ITermCodeSystemStorageSvc myTerminologyCodeSystemStorageSvc;
|
||||
@Autowired
|
||||
protected IdHelperService myIdHelperService;
|
||||
protected IIdHelperService myIdHelperService;
|
||||
@Autowired
|
||||
protected ITermDeferredStorageSvc myTermDeferredStorageSvc;
|
||||
@Autowired
|
||||
|
|
|
@ -26,7 +26,7 @@ import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
|
|||
import ca.uhn.fhir.jpa.dao.expunge.PartitionRunner;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ResourceForeignKey;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ResourceTableFKProvider;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.index.IJpaIdHelperService;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -56,7 +56,7 @@ public class DeleteExpungeProcessor implements ItemProcessor<List<Long>, List<St
|
|||
@Autowired
|
||||
DaoConfig myDaoConfig;
|
||||
@Autowired
|
||||
IdHelperService myIdHelper;
|
||||
IJpaIdHelperService myIdHelper;
|
||||
@Autowired
|
||||
IResourceLinkDao myResourceLinkDao;
|
||||
|
||||
|
|
|
@ -0,0 +1,271 @@
|
|||
package ca.uhn.fhir.jpa.entity;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.batch2.model.JobDefinition;
|
||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.EnumType;
|
||||
import javax.persistence.Enumerated;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.Index;
|
||||
import javax.persistence.Lob;
|
||||
import javax.persistence.Table;
|
||||
import javax.persistence.Temporal;
|
||||
import javax.persistence.TemporalType;
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
import static ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity.ERROR_MSG_MAX_LENGTH;
|
||||
import static org.apache.commons.lang3.StringUtils.left;
|
||||
|
||||
@Entity
|
||||
@Table(name = "BT2_JOB_INSTANCE", indexes = {
|
||||
@Index(name = "IDX_BT2JI_CT", columnList = "CREATE_TIME")
|
||||
})
|
||||
public class Batch2JobInstanceEntity implements Serializable {
|
||||
|
||||
public static final int STATUS_MAX_LENGTH = 20;
|
||||
public static final int TIME_REMAINING_LENGTH = 100;
|
||||
public static final int PARAMS_JSON_MAX_LENGTH = 2000;
|
||||
private static final long serialVersionUID = 8187134261799095422L;
|
||||
|
||||
@Id
|
||||
@Column(name = "ID", length = JobDefinition.ID_MAX_LENGTH, nullable = false)
|
||||
private String myId;
|
||||
|
||||
@Column(name = "CREATE_TIME", nullable = false)
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
private Date myCreateTime;
|
||||
|
||||
@Column(name = "START_TIME", nullable = true)
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
private Date myStartTime;
|
||||
|
||||
@Column(name = "END_TIME", nullable = true)
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
private Date myEndTime;
|
||||
|
||||
@Column(name = "DEFINITION_ID", length = JobDefinition.ID_MAX_LENGTH, nullable = false)
|
||||
private String myDefinitionId;
|
||||
|
||||
@Column(name = "DEFINITION_VER", nullable = false)
|
||||
private int myDefinitionVersion;
|
||||
|
||||
@Column(name = "STAT", length = STATUS_MAX_LENGTH, nullable = false)
|
||||
@Enumerated(EnumType.STRING)
|
||||
private StatusEnum myStatus;
|
||||
|
||||
@Column(name = "JOB_CANCELLED", nullable = false)
|
||||
private boolean myCancelled;
|
||||
@Column(name = "PARAMS_JSON", length = PARAMS_JSON_MAX_LENGTH, nullable = true)
|
||||
private String myParamsJson;
|
||||
@Lob
|
||||
@Column(name = "PARAMS_JSON_LOB", nullable = true)
|
||||
private String myParamsJsonLob;
|
||||
@Column(name = "CMB_RECS_PROCESSED", nullable = true)
|
||||
private Integer myCombinedRecordsProcessed;
|
||||
@Column(name = "CMB_RECS_PER_SEC", nullable = true)
|
||||
private Double myCombinedRecordsProcessedPerSecond;
|
||||
@Column(name = "TOT_ELAPSED_MILLIS", nullable = true)
|
||||
private Integer myTotalElapsedMillis;
|
||||
@Column(name = "WORK_CHUNKS_PURGED", nullable = false)
|
||||
private boolean myWorkChunksPurged;
|
||||
@Column(name = "PROGRESS_PCT")
|
||||
private double myProgress;
|
||||
@Column(name = "ERROR_MSG", length = ERROR_MSG_MAX_LENGTH, nullable = true)
|
||||
private String myErrorMessage;
|
||||
@Column(name = "ERROR_COUNT")
|
||||
private int myErrorCount;
|
||||
@Column(name = "EST_REMAINING", length = TIME_REMAINING_LENGTH, nullable = true)
|
||||
private String myEstimatedTimeRemaining;
|
||||
|
||||
public boolean isCancelled() {
|
||||
return myCancelled;
|
||||
}
|
||||
|
||||
public void setCancelled(boolean theCancelled) {
|
||||
myCancelled = theCancelled;
|
||||
}
|
||||
|
||||
public int getErrorCount() {
|
||||
return myErrorCount;
|
||||
}
|
||||
|
||||
public void setErrorCount(int theErrorCount) {
|
||||
myErrorCount = theErrorCount;
|
||||
}
|
||||
|
||||
public Integer getTotalElapsedMillis() {
|
||||
return myTotalElapsedMillis;
|
||||
}
|
||||
|
||||
public void setTotalElapsedMillis(Integer theTotalElapsedMillis) {
|
||||
myTotalElapsedMillis = theTotalElapsedMillis;
|
||||
}
|
||||
|
||||
public Integer getCombinedRecordsProcessed() {
|
||||
return myCombinedRecordsProcessed;
|
||||
}
|
||||
|
||||
public void setCombinedRecordsProcessed(Integer theCombinedRecordsProcessed) {
|
||||
myCombinedRecordsProcessed = theCombinedRecordsProcessed;
|
||||
}
|
||||
|
||||
public Double getCombinedRecordsProcessedPerSecond() {
|
||||
return myCombinedRecordsProcessedPerSecond;
|
||||
}
|
||||
|
||||
public void setCombinedRecordsProcessedPerSecond(Double theCombinedRecordsProcessedPerSecond) {
|
||||
myCombinedRecordsProcessedPerSecond = theCombinedRecordsProcessedPerSecond;
|
||||
}
|
||||
|
||||
public Date getCreateTime() {
|
||||
return myCreateTime;
|
||||
}
|
||||
|
||||
public void setCreateTime(Date theCreateTime) {
|
||||
myCreateTime = theCreateTime;
|
||||
}
|
||||
|
||||
public Date getStartTime() {
|
||||
return myStartTime;
|
||||
}
|
||||
|
||||
public void setStartTime(Date theStartTime) {
|
||||
myStartTime = theStartTime;
|
||||
}
|
||||
|
||||
public Date getEndTime() {
|
||||
return myEndTime;
|
||||
}
|
||||
|
||||
public void setEndTime(Date theEndTime) {
|
||||
myEndTime = theEndTime;
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return myId;
|
||||
}
|
||||
|
||||
public void setId(String theId) {
|
||||
myId = theId;
|
||||
}
|
||||
|
||||
public String getDefinitionId() {
|
||||
return myDefinitionId;
|
||||
}
|
||||
|
||||
public void setDefinitionId(String theDefinitionId) {
|
||||
myDefinitionId = theDefinitionId;
|
||||
}
|
||||
|
||||
public int getDefinitionVersion() {
|
||||
return myDefinitionVersion;
|
||||
}
|
||||
|
||||
public void setDefinitionVersion(int theDefinitionVersion) {
|
||||
myDefinitionVersion = theDefinitionVersion;
|
||||
}
|
||||
|
||||
public StatusEnum getStatus() {
|
||||
return myStatus;
|
||||
}
|
||||
|
||||
public void setStatus(StatusEnum theStatus) {
|
||||
myStatus = theStatus;
|
||||
}
|
||||
|
||||
public String getParams() {
|
||||
if (myParamsJsonLob != null) {
|
||||
return myParamsJsonLob;
|
||||
}
|
||||
return myParamsJson;
|
||||
}
|
||||
|
||||
public void setParams(String theParams) {
|
||||
myParamsJsonLob = null;
|
||||
myParamsJson = null;
|
||||
if (theParams != null && theParams.length() > PARAMS_JSON_MAX_LENGTH) {
|
||||
myParamsJsonLob = theParams;
|
||||
} else {
|
||||
myParamsJson = theParams;
|
||||
}
|
||||
}
|
||||
|
||||
public boolean getWorkChunksPurged() {
|
||||
return myWorkChunksPurged;
|
||||
}
|
||||
|
||||
public void setWorkChunksPurged(boolean theWorkChunksPurged) {
|
||||
myWorkChunksPurged = theWorkChunksPurged;
|
||||
}
|
||||
|
||||
public double getProgress() {
|
||||
return myProgress;
|
||||
}
|
||||
|
||||
public void setProgress(double theProgress) {
|
||||
myProgress = theProgress;
|
||||
}
|
||||
|
||||
public String getErrorMessage() {
|
||||
return myErrorMessage;
|
||||
}
|
||||
|
||||
public void setErrorMessage(String theErrorMessage) {
|
||||
myErrorMessage = left(theErrorMessage, ERROR_MSG_MAX_LENGTH);
|
||||
}
|
||||
|
||||
public String getEstimatedTimeRemaining() {
|
||||
return myEstimatedTimeRemaining;
|
||||
}
|
||||
|
||||
public void setEstimatedTimeRemaining(String theEstimatedTimeRemaining) {
|
||||
myEstimatedTimeRemaining = left(theEstimatedTimeRemaining, TIME_REMAINING_LENGTH);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
|
||||
.append("id", myId)
|
||||
.append("definitionId", myDefinitionId)
|
||||
.append("definitionVersion", myDefinitionVersion)
|
||||
.append("errorCount", myErrorCount)
|
||||
.append("createTime", myCreateTime)
|
||||
.append("startTime", myStartTime)
|
||||
.append("endTime", myEndTime)
|
||||
.append("status", myStatus)
|
||||
.append("cancelled", myCancelled)
|
||||
.append("combinedRecordsProcessed", myCombinedRecordsProcessed)
|
||||
.append("combinedRecordsProcessedPerSecond", myCombinedRecordsProcessedPerSecond)
|
||||
.append("totalElapsedMillis", myTotalElapsedMillis)
|
||||
.append("workChunksPurged", myWorkChunksPurged)
|
||||
.append("progress", myProgress)
|
||||
.append("errorMessage", myErrorMessage)
|
||||
.append("estimatedTimeRemaining", myEstimatedTimeRemaining)
|
||||
.toString();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,235 @@
|
|||
package ca.uhn.fhir.jpa.entity;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
|
||||
import javax.persistence.Basic;
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.EnumType;
|
||||
import javax.persistence.Enumerated;
|
||||
import javax.persistence.FetchType;
|
||||
import javax.persistence.ForeignKey;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.Index;
|
||||
import javax.persistence.JoinColumn;
|
||||
import javax.persistence.Lob;
|
||||
import javax.persistence.ManyToOne;
|
||||
import javax.persistence.Table;
|
||||
import javax.persistence.Temporal;
|
||||
import javax.persistence.TemporalType;
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
import static ca.uhn.fhir.batch2.model.JobDefinition.ID_MAX_LENGTH;
|
||||
import static ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity.STATUS_MAX_LENGTH;
|
||||
import static org.apache.commons.lang3.StringUtils.left;
|
||||
|
||||
@Entity
|
||||
@Table(name = "BT2_WORK_CHUNK", indexes = {
|
||||
@Index(name = "IDX_BT2WC_II_SEQ", columnList = "INSTANCE_ID,SEQ")
|
||||
})
|
||||
public class Batch2WorkChunkEntity implements Serializable {
|
||||
|
||||
public static final int ERROR_MSG_MAX_LENGTH = 500;
|
||||
private static final long serialVersionUID = -6202771941965780558L;
|
||||
@Id
|
||||
@Column(name = "ID", length = ID_MAX_LENGTH)
|
||||
private String myId;
|
||||
@Column(name = "SEQ", nullable = false)
|
||||
private int mySequence;
|
||||
@Column(name = "CREATE_TIME", nullable = false)
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
private Date myCreateTime;
|
||||
@Column(name = "START_TIME", nullable = true)
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
private Date myStartTime;
|
||||
@Column(name = "END_TIME", nullable = true)
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
private Date myEndTime;
|
||||
@Column(name = "RECORDS_PROCESSED", nullable = true)
|
||||
private Integer myRecordsProcessed;
|
||||
@Column(name = "DEFINITION_ID", length = ID_MAX_LENGTH, nullable = false)
|
||||
private String myJobDefinitionId;
|
||||
@Column(name = "DEFINITION_VER", length = ID_MAX_LENGTH, nullable = false)
|
||||
private int myJobDefinitionVersion;
|
||||
@Column(name = "TGT_STEP_ID", length = ID_MAX_LENGTH, nullable = false)
|
||||
private String myTargetStepId;
|
||||
@Lob
|
||||
@Basic(fetch = FetchType.LAZY)
|
||||
@Column(name = "CHUNK_DATA", nullable = true, length = Integer.MAX_VALUE - 1)
|
||||
private String mySerializedData;
|
||||
@Column(name = "STAT", length = STATUS_MAX_LENGTH, nullable = false)
|
||||
@Enumerated(EnumType.STRING)
|
||||
private StatusEnum myStatus;
|
||||
@ManyToOne(fetch = FetchType.LAZY)
|
||||
@JoinColumn(name = "INSTANCE_ID", insertable = false, updatable = false, foreignKey = @ForeignKey(name = "FK_BT2WC_INSTANCE"))
|
||||
private Batch2JobInstanceEntity myInstance;
|
||||
@Column(name = "INSTANCE_ID", length = ID_MAX_LENGTH, nullable = false)
|
||||
private String myInstanceId;
|
||||
@Column(name = "ERROR_MSG", length = ERROR_MSG_MAX_LENGTH, nullable = true)
|
||||
private String myErrorMessage;
|
||||
@Column(name = "ERROR_COUNT", nullable = false)
|
||||
private int myErrorCount;
|
||||
|
||||
public int getErrorCount() {
|
||||
return myErrorCount;
|
||||
}
|
||||
|
||||
public void setErrorCount(int theErrorCount) {
|
||||
myErrorCount = theErrorCount;
|
||||
}
|
||||
|
||||
public String getErrorMessage() {
|
||||
return myErrorMessage;
|
||||
}
|
||||
|
||||
public void setErrorMessage(String theErrorMessage) {
|
||||
myErrorMessage = left(theErrorMessage, ERROR_MSG_MAX_LENGTH);
|
||||
}
|
||||
|
||||
public int getSequence() {
|
||||
return mySequence;
|
||||
}
|
||||
|
||||
public void setSequence(int theSequence) {
|
||||
mySequence = theSequence;
|
||||
}
|
||||
|
||||
public Date getCreateTime() {
|
||||
return myCreateTime;
|
||||
}
|
||||
|
||||
public void setCreateTime(Date theCreateTime) {
|
||||
myCreateTime = theCreateTime;
|
||||
}
|
||||
|
||||
public Date getStartTime() {
|
||||
return myStartTime;
|
||||
}
|
||||
|
||||
public void setStartTime(Date theStartTime) {
|
||||
myStartTime = theStartTime;
|
||||
}
|
||||
|
||||
public Date getEndTime() {
|
||||
return myEndTime;
|
||||
}
|
||||
|
||||
public void setEndTime(Date theEndTime) {
|
||||
myEndTime = theEndTime;
|
||||
}
|
||||
|
||||
public Integer getRecordsProcessed() {
|
||||
return myRecordsProcessed;
|
||||
}
|
||||
|
||||
public void setRecordsProcessed(Integer theRecordsProcessed) {
|
||||
myRecordsProcessed = theRecordsProcessed;
|
||||
}
|
||||
|
||||
public Batch2JobInstanceEntity getInstance() {
|
||||
return myInstance;
|
||||
}
|
||||
|
||||
public void setInstance(Batch2JobInstanceEntity theInstance) {
|
||||
myInstance = theInstance;
|
||||
}
|
||||
|
||||
public String getJobDefinitionId() {
|
||||
return myJobDefinitionId;
|
||||
}
|
||||
|
||||
public void setJobDefinitionId(String theJobDefinitionId) {
|
||||
myJobDefinitionId = theJobDefinitionId;
|
||||
}
|
||||
|
||||
public int getJobDefinitionVersion() {
|
||||
return myJobDefinitionVersion;
|
||||
}
|
||||
|
||||
public void setJobDefinitionVersion(int theJobDefinitionVersion) {
|
||||
myJobDefinitionVersion = theJobDefinitionVersion;
|
||||
}
|
||||
|
||||
public String getTargetStepId() {
|
||||
return myTargetStepId;
|
||||
}
|
||||
|
||||
public void setTargetStepId(String theTargetStepId) {
|
||||
myTargetStepId = theTargetStepId;
|
||||
}
|
||||
|
||||
public String getSerializedData() {
|
||||
return mySerializedData;
|
||||
}
|
||||
|
||||
public void setSerializedData(String theSerializedData) {
|
||||
mySerializedData = theSerializedData;
|
||||
}
|
||||
|
||||
public StatusEnum getStatus() {
|
||||
return myStatus;
|
||||
}
|
||||
|
||||
public void setStatus(StatusEnum theStatus) {
|
||||
myStatus = theStatus;
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return myId;
|
||||
}
|
||||
|
||||
public void setId(String theId) {
|
||||
myId = theId;
|
||||
}
|
||||
|
||||
public String getInstanceId() {
|
||||
return myInstanceId;
|
||||
}
|
||||
|
||||
public void setInstanceId(String theInstanceId) {
|
||||
myInstanceId = theInstanceId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
|
||||
.append("id", myId)
|
||||
.append("instanceId", myInstanceId)
|
||||
.append("sequence", mySequence)
|
||||
.append("errorCount", myErrorCount)
|
||||
.append("jobDefinitionId", myJobDefinitionId)
|
||||
.append("jobDefinitionVersion", myJobDefinitionVersion)
|
||||
.append("createTime", myCreateTime)
|
||||
.append("startTime", myStartTime)
|
||||
.append("endTime", myEndTime)
|
||||
.append("recordsProcessed", myRecordsProcessed)
|
||||
.append("targetStepId", myTargetStepId)
|
||||
.append("serializedData", mySerializedData)
|
||||
.append("status", myStatus)
|
||||
.append("errorMessage", myErrorMessage)
|
||||
.toString();
|
||||
}
|
||||
}
|
|
@ -49,11 +49,10 @@ import java.util.stream.Collectors;
|
|||
@SuppressWarnings({"SqlNoDataSourceInspection", "SpellCheckingInspection"})
|
||||
public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||
|
||||
private final Set<FlagEnum> myFlags;
|
||||
|
||||
// H2, Derby, MariaDB, and MySql automatically add indexes to foreign keys
|
||||
public static final DriverTypeEnum[] NON_AUTOMATIC_FK_INDEX_PLATFORMS = new DriverTypeEnum[] {
|
||||
DriverTypeEnum.POSTGRES_9_4, DriverTypeEnum.ORACLE_12C, DriverTypeEnum.MSSQL_2012 };
|
||||
private final Set<FlagEnum> myFlags;
|
||||
|
||||
|
||||
/**
|
||||
|
@ -162,6 +161,45 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
.withColumns("VALUESET_CONCEPT_PID")
|
||||
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
|
||||
|
||||
// Batch2 Framework
|
||||
|
||||
Builder.BuilderAddTableByColumns batchInstance = version.addTableByColumns("20220227.1", "BT2_JOB_INSTANCE", "ID");
|
||||
batchInstance.addColumn("ID").nonNullable().type(ColumnTypeEnum.STRING, 100);
|
||||
batchInstance.addColumn("CREATE_TIME").nonNullable().type(ColumnTypeEnum.DATE_TIMESTAMP);
|
||||
batchInstance.addColumn("START_TIME").nullable().type(ColumnTypeEnum.DATE_TIMESTAMP);
|
||||
batchInstance.addColumn("END_TIME").nullable().type(ColumnTypeEnum.DATE_TIMESTAMP);
|
||||
batchInstance.addColumn("DEFINITION_ID").nonNullable().type(ColumnTypeEnum.STRING, 100);
|
||||
batchInstance.addColumn("DEFINITION_VER").nonNullable().type(ColumnTypeEnum.INT);
|
||||
batchInstance.addColumn("STAT").nonNullable().type(ColumnTypeEnum.STRING, 20);
|
||||
batchInstance.addColumn("JOB_CANCELLED").nonNullable().type(ColumnTypeEnum.BOOLEAN);
|
||||
batchInstance.addColumn("PARAMS_JSON").nullable().type(ColumnTypeEnum.STRING, 2000);
|
||||
batchInstance.addColumn("PARAMS_JSON_LOB").nullable().type(ColumnTypeEnum.CLOB);
|
||||
batchInstance.addColumn("CMB_RECS_PROCESSED").nullable().type(ColumnTypeEnum.INT);
|
||||
batchInstance.addColumn("CMB_RECS_PER_SEC").nullable().type(ColumnTypeEnum.DOUBLE);
|
||||
batchInstance.addColumn("TOT_ELAPSED_MILLIS").nullable().type(ColumnTypeEnum.INT);
|
||||
batchInstance.addColumn("WORK_CHUNKS_PURGED").nonNullable().type(ColumnTypeEnum.BOOLEAN);
|
||||
batchInstance.addColumn("PROGRESS_PCT").nonNullable().type(ColumnTypeEnum.DOUBLE);
|
||||
batchInstance.addColumn("ERROR_MSG").nullable().type(ColumnTypeEnum.STRING, 500);
|
||||
batchInstance.addColumn("ERROR_COUNT").nullable().type(ColumnTypeEnum.INT);
|
||||
batchInstance.addColumn("EST_REMAINING").nullable().type(ColumnTypeEnum.STRING, 100);
|
||||
batchInstance.addIndex("20220227.2", "IDX_BT2JI_CT").unique(false).withColumns("CREATE_TIME");
|
||||
|
||||
Builder.BuilderAddTableByColumns batchChunk = version.addTableByColumns("20220227.3", "BT2_WORK_CHUNK", "ID");
|
||||
batchChunk.addColumn("ID").nonNullable().type(ColumnTypeEnum.STRING, 100);
|
||||
batchChunk.addColumn("SEQ").nonNullable().type(ColumnTypeEnum.INT);
|
||||
batchChunk.addColumn("CREATE_TIME").nonNullable().type(ColumnTypeEnum.DATE_TIMESTAMP);
|
||||
batchChunk.addColumn("START_TIME").nullable().type(ColumnTypeEnum.DATE_TIMESTAMP);
|
||||
batchChunk.addColumn("END_TIME").nullable().type(ColumnTypeEnum.DATE_TIMESTAMP);
|
||||
batchChunk.addColumn("DEFINITION_ID").nonNullable().type(ColumnTypeEnum.STRING, 100);
|
||||
batchChunk.addColumn("DEFINITION_VER").nonNullable().type(ColumnTypeEnum.INT);
|
||||
batchChunk.addColumn("STAT").nonNullable().type(ColumnTypeEnum.STRING, 20);
|
||||
batchChunk.addColumn("RECORDS_PROCESSED").nullable().type(ColumnTypeEnum.INT);
|
||||
batchChunk.addColumn("TGT_STEP_ID").nonNullable().type(ColumnTypeEnum.STRING, 100);
|
||||
batchChunk.addColumn("CHUNK_DATA").nullable().type(ColumnTypeEnum.CLOB);
|
||||
batchChunk.addColumn("INSTANCE_ID").nonNullable().type(ColumnTypeEnum.STRING, 100);
|
||||
batchChunk.addColumn("ERROR_MSG").nullable().type(ColumnTypeEnum.STRING, 500);
|
||||
batchChunk.addColumn("ERROR_COUNT").nullable().type(ColumnTypeEnum.INT);
|
||||
batchChunk.addIndex("20220227.4", "IDX_BT2WC_II_SEQ").unique(false).withColumns("ID", "SEQ");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -317,7 +355,6 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
.migratePostgresTextClobToBinaryClob("20211003.3", "SEARCH_QUERY_STRING");
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
private void init540() {
|
||||
|
|
|
@ -170,7 +170,7 @@ public class BaseJpaResourceProviderPatientR4 extends JpaResourceProviderR4<Pati
|
|||
* Beneficiary (Patient) demographics in this version
|
||||
*/
|
||||
@Operation(name = ProviderConstants.OPERATION_MEMBER_MATCH, idempotent = false, returnParameters = {
|
||||
@OperationParam(name = "MemberIdentifier", type = StringDt.class)
|
||||
@OperationParam(name = "MemberIdentifier", typeName = "string")
|
||||
})
|
||||
public Parameters patientMemberMatch(
|
||||
javax.servlet.http.HttpServletRequest theServletRequest,
|
||||
|
|
|
@ -34,6 +34,7 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
|||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean;
|
||||
import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider;
|
||||
import ca.uhn.fhir.jpa.dao.BaseStorageDao;
|
||||
|
@ -166,7 +167,7 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
@Autowired
|
||||
private FhirContext myContext;
|
||||
@Autowired
|
||||
private IdHelperService myIdHelperService;
|
||||
private IIdHelperService myIdHelperService;
|
||||
@Autowired(required = false)
|
||||
private IFulltextSearchSvc myFulltextSearchSvc;
|
||||
@Autowired(required = false)
|
||||
|
|
|
@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.search.builder.predicate;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser;
|
||||
import ca.uhn.fhir.jpa.search.builder.QueryStack;
|
||||
|
@ -49,7 +50,7 @@ public class ResourceIdPredicateBuilder extends BasePredicateBuilder {
|
|||
private static final Logger ourLog = LoggerFactory.getLogger(ResourceIdPredicateBuilder.class);
|
||||
|
||||
@Autowired
|
||||
private IdHelperService myIdHelperService;
|
||||
private IIdHelperService myIdHelperService;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
|
|
|
@ -35,6 +35,7 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
|||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.BaseStorageDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderReference;
|
||||
|
@ -116,7 +117,7 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder {
|
|||
@Autowired
|
||||
private ISearchParamRegistry mySearchParamRegistry;
|
||||
@Autowired
|
||||
private IdHelperService myIdHelperService;
|
||||
private IIdHelperService myIdHelperService;
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
@Autowired
|
||||
|
|
|
@ -1400,7 +1400,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
|
||||
String[] values = theFilter.getValue().split(",");
|
||||
if (values.length == 0) {
|
||||
throw new InvalidRequestException(Msg.code(2037) + "Invalid filter criteria - no codes specified");
|
||||
throw new InvalidRequestException(Msg.code(2062) + "Invalid filter criteria - no codes specified");
|
||||
}
|
||||
|
||||
List<Long> descendantCodePidList = getMultipleCodeParentPids(theSystem, theFilter.getProperty(), values);
|
||||
|
@ -1437,7 +1437,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
List<TermConcept> termConcepts = findCodes(theSystem, valuesList);
|
||||
if (valuesList.size() != termConcepts.size()) {
|
||||
String exMsg = getTermConceptsFetchExceptionMsg(termConcepts, valuesList);
|
||||
throw new InvalidRequestException(Msg.code(2038) + "Invalid filter criteria - {" +
|
||||
throw new InvalidRequestException(Msg.code(2064) + "Invalid filter criteria - {" +
|
||||
Constants.codeSystemWithDefaultDescription(theSystem) + "}: " + exMsg);
|
||||
}
|
||||
|
||||
|
|
|
@ -20,11 +20,12 @@ package ca.uhn.fhir.jpa.term;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
|
@ -34,7 +35,6 @@ import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
|
|||
import ca.uhn.fhir.jpa.dao.data.ITermConceptDesignationDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptParentChildLinkDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptPropertyDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
|
@ -110,7 +110,7 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
@Autowired
|
||||
protected ITermConceptDesignationDao myConceptDesignationDao;
|
||||
@Autowired
|
||||
protected IdHelperService myIdHelperService;
|
||||
protected IIdHelperService myIdHelperService;
|
||||
@Autowired
|
||||
private ITermConceptParentChildLinkDao myConceptParentChildLinkDao;
|
||||
@Autowired
|
||||
|
@ -129,7 +129,8 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
@Autowired
|
||||
private IBatchJobSubmitter myJobSubmitter;
|
||||
|
||||
@Autowired @Qualifier(TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME)
|
||||
@Autowired
|
||||
@Qualifier(TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME)
|
||||
private Job myTermCodeSystemVersionDeleteJob;
|
||||
|
||||
|
||||
|
@ -356,7 +357,7 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
@Override
|
||||
@Transactional
|
||||
public IIdType storeNewCodeSystemVersion(CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion,
|
||||
RequestDetails theRequest, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps) {
|
||||
RequestDetails theRequest, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps) {
|
||||
assert TransactionSynchronizationManager.isActualTransactionActive();
|
||||
|
||||
Validate.notBlank(theCodeSystemResource.getUrl(), "theCodeSystemResource must have a URL");
|
||||
|
@ -383,8 +384,8 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
@Override
|
||||
@Transactional
|
||||
public void storeNewCodeSystemVersion(ResourcePersistentId theCodeSystemResourcePid, String theSystemUri,
|
||||
String theSystemName, String theCodeSystemVersionId, TermCodeSystemVersion theCodeSystemVersion,
|
||||
ResourceTable theCodeSystemResourceTable, RequestDetails theRequestDetails) {
|
||||
String theSystemName, String theCodeSystemVersionId, TermCodeSystemVersion theCodeSystemVersion,
|
||||
ResourceTable theCodeSystemResourceTable, RequestDetails theRequestDetails) {
|
||||
assert TransactionSynchronizationManager.isActualTransactionActive();
|
||||
|
||||
ourLog.debug("Storing code system");
|
||||
|
@ -466,7 +467,7 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
}
|
||||
|
||||
ourLog.debug("Done saving concepts, flushing to database");
|
||||
if (! myDeferredStorageSvc.isStorageQueueEmpty()) {
|
||||
if (!myDeferredStorageSvc.isStorageQueueEmpty()) {
|
||||
ourLog.info("Note that some concept saving has been deferred");
|
||||
}
|
||||
}
|
||||
|
@ -536,7 +537,7 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
conceptToAdd.setParentPids(null);
|
||||
conceptToAdd.setCodeSystemVersion(theCsv);
|
||||
|
||||
if (conceptToAdd.getProperties() !=null)
|
||||
if (conceptToAdd.getProperties() != null)
|
||||
conceptToAdd.getProperties().forEach(termConceptProperty -> {
|
||||
termConceptProperty.setConcept(theConceptToAdd);
|
||||
termConceptProperty.setCodeSystemVersion(theCsv);
|
||||
|
@ -645,8 +646,8 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
for (TermConceptParentChildLink next : theNext.getChildren()) {
|
||||
populateVersion(next.getChild(), theCodeSystemVersion);
|
||||
}
|
||||
theNext.getProperties().forEach(t->t.setCodeSystemVersion(theCodeSystemVersion));
|
||||
theNext.getDesignations().forEach(t->t.setCodeSystemVersion(theCodeSystemVersion));
|
||||
theNext.getProperties().forEach(t -> t.setCodeSystemVersion(theCodeSystemVersion));
|
||||
theNext.getDesignations().forEach(t -> t.setCodeSystemVersion(theCodeSystemVersion));
|
||||
}
|
||||
|
||||
private void saveConceptLink(TermConceptParentChildLink next) {
|
||||
|
|
|
@ -20,9 +20,10 @@ package ca.uhn.fhir.jpa.util;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.LegacySearchBuilder;
|
||||
import ca.uhn.fhir.jpa.search.builder.SearchBuilder;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
|
@ -34,15 +35,21 @@ import java.util.function.Consumer;
|
|||
*/
|
||||
public class QueryChunker<T> {
|
||||
|
||||
public void chunk(List<T> theInput, Consumer<List<T>> theBatchConsumer) {
|
||||
public void chunk(Collection<T> theInput, Consumer<List<T>> theBatchConsumer) {
|
||||
chunk(theInput, SearchBuilder.getMaximumPageSize(), theBatchConsumer);
|
||||
}
|
||||
|
||||
public void chunk(List<T> theInput, int theChunkSize, Consumer<List<T>> theBatchConsumer ) {
|
||||
for (int i = 0; i < theInput.size(); i += theChunkSize) {
|
||||
public void chunk(Collection<T> theInput, int theChunkSize, Consumer<List<T>> theBatchConsumer) {
|
||||
List<T> input;
|
||||
if (theInput instanceof List) {
|
||||
input = (List<T>) theInput;
|
||||
} else {
|
||||
input = new ArrayList<>(theInput);
|
||||
}
|
||||
for (int i = 0; i < input.size(); i += theChunkSize) {
|
||||
int to = i + theChunkSize;
|
||||
to = Math.min(to, theInput.size());
|
||||
List<T> batch = theInput.subList(i, to);
|
||||
to = Math.min(to, input.size());
|
||||
List<T> batch = input.subList(i, to);
|
||||
theBatchConsumer.accept(batch);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,401 @@
|
|||
package ca.uhn.fhir.jpa.batch2;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobPersistence;
|
||||
import ca.uhn.fhir.batch2.jobs.imprt.NdJsonFileJson;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||
import ca.uhn.fhir.batch2.model.WorkChunk;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
|
||||
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity;
|
||||
import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity;
|
||||
import ca.uhn.fhir.util.JsonUtil;
|
||||
import com.github.jsonldjava.shaded.com.google.common.collect.Lists;
|
||||
import org.junit.jupiter.api.MethodOrderer;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.TestMethodOrder;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.in;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
@TestMethodOrder(MethodOrderer.MethodName.class)
|
||||
public class JpaJobPersistenceImplTest extends BaseJpaR4Test {
|
||||
|
||||
public static final String CHUNK_DATA = "{\"key\":\"value\"}";
|
||||
@Autowired
|
||||
private IJobPersistence mySvc;
|
||||
@Autowired
|
||||
private IBatch2WorkChunkRepository myWorkChunkRepository;
|
||||
@Autowired
|
||||
private IBatch2JobInstanceRepository myJobInstanceRepository;
|
||||
|
||||
@Test
|
||||
public void testDeleteInstance() {
|
||||
// Setup
|
||||
|
||||
JobInstance instance = createInstance();
|
||||
String instanceId = mySvc.storeNewInstance(instance);
|
||||
for (int i = 0; i < 10; i++) {
|
||||
mySvc.storeWorkChunk("definition-id", 1, "step-id", instanceId, i, JsonUtil.serialize(new NdJsonFileJson().setNdJsonText("{}")));
|
||||
}
|
||||
|
||||
// Execute
|
||||
|
||||
mySvc.deleteInstanceAndChunks(instanceId);
|
||||
|
||||
// Verify
|
||||
|
||||
runInTransaction(()->{
|
||||
assertEquals(0, myJobInstanceRepository.findAll().size());
|
||||
assertEquals(0, myWorkChunkRepository.findAll().size());
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeleteChunks() {
|
||||
// Setup
|
||||
|
||||
JobInstance instance = createInstance();
|
||||
String instanceId = mySvc.storeNewInstance(instance);
|
||||
for (int i = 0; i < 10; i++) {
|
||||
mySvc.storeWorkChunk("definition-id", 1, "step-id", instanceId, i, CHUNK_DATA);
|
||||
}
|
||||
|
||||
// Execute
|
||||
|
||||
mySvc.deleteChunks(instanceId);
|
||||
|
||||
// Verify
|
||||
|
||||
runInTransaction(()->{
|
||||
assertEquals(1, myJobInstanceRepository.findAll().size());
|
||||
assertEquals(0, myWorkChunkRepository.findAll().size());
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStoreAndFetchInstance() {
|
||||
JobInstance instance = createInstance();
|
||||
String instanceId = mySvc.storeNewInstance(instance);
|
||||
|
||||
runInTransaction(() -> {
|
||||
Batch2JobInstanceEntity instanceEntity = myJobInstanceRepository.findById(instanceId).orElseThrow(() -> new IllegalStateException());
|
||||
assertEquals(StatusEnum.QUEUED, instanceEntity.getStatus());
|
||||
});
|
||||
|
||||
JobInstance foundInstance = mySvc.fetchInstanceAndMarkInProgress(instanceId).orElseThrow(() -> new IllegalStateException());
|
||||
assertEquals(instanceId, foundInstance.getInstanceId());
|
||||
assertEquals("definition-id", foundInstance.getJobDefinitionId());
|
||||
assertEquals(1, foundInstance.getJobDefinitionVersion());
|
||||
assertEquals(StatusEnum.IN_PROGRESS, foundInstance.getStatus());
|
||||
assertEquals(CHUNK_DATA, foundInstance.getParameters());
|
||||
|
||||
runInTransaction(() -> {
|
||||
Batch2JobInstanceEntity instanceEntity = myJobInstanceRepository.findById(instanceId).orElseThrow(() -> new IllegalStateException());
|
||||
assertEquals(StatusEnum.IN_PROGRESS, instanceEntity.getStatus());
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCancelInstance() {
|
||||
JobInstance instance = createInstance();
|
||||
String instanceId = mySvc.storeNewInstance(instance);
|
||||
|
||||
runInTransaction(() -> {
|
||||
Batch2JobInstanceEntity instanceEntity = myJobInstanceRepository.findById(instanceId).orElseThrow(() -> new IllegalStateException());
|
||||
assertEquals(StatusEnum.QUEUED, instanceEntity.getStatus());
|
||||
instanceEntity.setCancelled(true);
|
||||
myJobInstanceRepository.save(instanceEntity);
|
||||
});
|
||||
|
||||
mySvc.cancelInstance(instanceId);
|
||||
|
||||
JobInstance foundInstance = mySvc.fetchInstanceAndMarkInProgress(instanceId).orElseThrow(() -> new IllegalStateException());
|
||||
assertEquals(instanceId, foundInstance.getInstanceId());
|
||||
assertEquals("definition-id", foundInstance.getJobDefinitionId());
|
||||
assertEquals(1, foundInstance.getJobDefinitionVersion());
|
||||
assertEquals(StatusEnum.IN_PROGRESS, foundInstance.getStatus());
|
||||
assertTrue( foundInstance.isCancelled());
|
||||
assertEquals(CHUNK_DATA, foundInstance.getParameters());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchInstanceAndMarkInProgress() {
|
||||
JobInstance instance = createInstance();
|
||||
String instanceId = mySvc.storeNewInstance(instance);
|
||||
|
||||
JobInstance foundInstance = mySvc.fetchInstanceAndMarkInProgress(instanceId).orElseThrow(() -> new IllegalStateException());
|
||||
assertEquals(36, foundInstance.getInstanceId().length());
|
||||
assertEquals("definition-id", foundInstance.getJobDefinitionId());
|
||||
assertEquals(1, foundInstance.getJobDefinitionVersion());
|
||||
assertEquals(StatusEnum.IN_PROGRESS, foundInstance.getStatus());
|
||||
assertEquals(CHUNK_DATA, foundInstance.getParameters());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchChunks() {
|
||||
JobInstance instance = createInstance();
|
||||
String instanceId = mySvc.storeNewInstance(instance);
|
||||
|
||||
List<String> ids =new ArrayList<>();
|
||||
for (int i = 0; i < 10; i++) {
|
||||
String id = mySvc.storeWorkChunk("definition-id", 1, "step-id", instanceId, i, CHUNK_DATA);
|
||||
ids.add(id);
|
||||
}
|
||||
|
||||
List<WorkChunk> chunks = mySvc.fetchWorkChunksWithoutData(instanceId, 3, 0);
|
||||
assertEquals(null, chunks.get(0).getData());
|
||||
assertEquals(null, chunks.get(1).getData());
|
||||
assertEquals(null, chunks.get(2).getData());
|
||||
assertThat(chunks.stream().map(t->t.getId()).collect(Collectors.toList()),
|
||||
contains(ids.get(0), ids.get(1), ids.get(2)));
|
||||
|
||||
chunks = mySvc.fetchWorkChunksWithoutData(instanceId, 3, 1);
|
||||
assertThat(chunks.stream().map(t->t.getId()).collect(Collectors.toList()),
|
||||
contains(ids.get(3), ids.get(4), ids.get(5)));
|
||||
|
||||
chunks = mySvc.fetchWorkChunksWithoutData(instanceId, 3, 2);
|
||||
assertThat(chunks.stream().map(t->t.getId()).collect(Collectors.toList()),
|
||||
contains(ids.get(6), ids.get(7), ids.get(8)));
|
||||
|
||||
chunks = mySvc.fetchWorkChunksWithoutData(instanceId, 3, 3);
|
||||
assertThat(chunks.stream().map(t->t.getId()).collect(Collectors.toList()),
|
||||
contains(ids.get(9)));
|
||||
|
||||
chunks = mySvc.fetchWorkChunksWithoutData(instanceId, 3, 4);
|
||||
assertThat(chunks.stream().map(t->t.getId()).collect(Collectors.toList()),
|
||||
empty());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchUnknownWork() {
|
||||
assertFalse(myWorkChunkRepository.findById("FOO").isPresent());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStoreAndFetchWorkChunk_NoData() {
|
||||
JobInstance instance = createInstance();
|
||||
String instanceId = mySvc.storeNewInstance(instance);
|
||||
|
||||
String id = mySvc.storeWorkChunk("definition-id", 1, "step-id", instanceId, 0, null);
|
||||
|
||||
WorkChunk chunk = mySvc.fetchWorkChunkSetStartTimeAndMarkInProgress(id).orElseThrow(() -> new IllegalArgumentException());
|
||||
assertEquals(null, chunk.getData());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStoreAndFetchWorkChunk_WithData() {
|
||||
JobInstance instance = createInstance();
|
||||
String instanceId = mySvc.storeNewInstance(instance);
|
||||
|
||||
String id = mySvc.storeWorkChunk("definition-id", 1, "step-id", instanceId, 0, CHUNK_DATA);
|
||||
assertNotNull(id);
|
||||
runInTransaction(() -> assertEquals(StatusEnum.QUEUED, myWorkChunkRepository.findById(id).orElseThrow(() -> new IllegalArgumentException()).getStatus()));
|
||||
|
||||
WorkChunk chunk = mySvc.fetchWorkChunkSetStartTimeAndMarkInProgress(id).orElseThrow(() -> new IllegalArgumentException());
|
||||
assertEquals(36, chunk.getInstanceId().length());
|
||||
assertEquals("definition-id", chunk.getJobDefinitionId());
|
||||
assertEquals(1, chunk.getJobDefinitionVersion());
|
||||
assertEquals(StatusEnum.IN_PROGRESS, chunk.getStatus());
|
||||
assertEquals(CHUNK_DATA, chunk.getData());
|
||||
|
||||
runInTransaction(() -> assertEquals(StatusEnum.IN_PROGRESS, myWorkChunkRepository.findById(id).orElseThrow(() -> new IllegalArgumentException()).getStatus()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMarkChunkAsCompleted_Success() {
|
||||
JobInstance instance = createInstance();
|
||||
String instanceId = mySvc.storeNewInstance(instance);
|
||||
String chunkId = mySvc.storeWorkChunk("definition-chunkId", 1, "step-chunkId", instanceId, 1, CHUNK_DATA);
|
||||
assertNotNull(chunkId);
|
||||
|
||||
runInTransaction(() -> assertEquals(StatusEnum.QUEUED, myWorkChunkRepository.findById(chunkId).orElseThrow(() -> new IllegalArgumentException()).getStatus()));
|
||||
|
||||
sleepUntilTimeChanges();
|
||||
|
||||
WorkChunk chunk = mySvc.fetchWorkChunkSetStartTimeAndMarkInProgress(chunkId).orElseThrow(() -> new IllegalArgumentException());
|
||||
assertEquals(1, chunk.getSequence());
|
||||
assertEquals(StatusEnum.IN_PROGRESS, chunk.getStatus());
|
||||
assertNotNull(chunk.getCreateTime());
|
||||
assertNotNull(chunk.getStartTime());
|
||||
assertNull(chunk.getEndTime());
|
||||
assertNull(chunk.getRecordsProcessed());
|
||||
assertNotNull(chunk.getData());
|
||||
runInTransaction(() -> assertEquals(StatusEnum.IN_PROGRESS, myWorkChunkRepository.findById(chunkId).orElseThrow(() -> new IllegalArgumentException()).getStatus()));
|
||||
|
||||
sleepUntilTimeChanges();
|
||||
|
||||
mySvc.markWorkChunkAsCompletedAndClearData(chunkId, 50);
|
||||
runInTransaction(() -> {
|
||||
Batch2WorkChunkEntity entity = myWorkChunkRepository.findById(chunkId).orElseThrow(() -> new IllegalArgumentException());
|
||||
assertEquals(StatusEnum.COMPLETED, entity.getStatus());
|
||||
assertEquals(50, entity.getRecordsProcessed());
|
||||
assertNotNull(entity.getCreateTime());
|
||||
assertNotNull(entity.getStartTime());
|
||||
assertNotNull(entity.getEndTime());
|
||||
assertNull(entity.getSerializedData());
|
||||
assertTrue(entity.getCreateTime().getTime() < entity.getStartTime().getTime());
|
||||
assertTrue(entity.getStartTime().getTime() < entity.getEndTime().getTime());
|
||||
});
|
||||
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMarkChunkAsCompleted_Error() {
|
||||
JobInstance instance = createInstance();
|
||||
String instanceId = mySvc.storeNewInstance(instance);
|
||||
String chunkId = mySvc.storeWorkChunk("definition-chunkId", 1, "step-chunkId", instanceId, 1, null);
|
||||
assertNotNull(chunkId);
|
||||
|
||||
runInTransaction(() -> assertEquals(StatusEnum.QUEUED, myWorkChunkRepository.findById(chunkId).orElseThrow(() -> new IllegalArgumentException()).getStatus()));
|
||||
|
||||
sleepUntilTimeChanges();
|
||||
|
||||
WorkChunk chunk = mySvc.fetchWorkChunkSetStartTimeAndMarkInProgress(chunkId).orElseThrow(() -> new IllegalArgumentException());
|
||||
assertEquals(1, chunk.getSequence());
|
||||
assertEquals(StatusEnum.IN_PROGRESS, chunk.getStatus());
|
||||
|
||||
sleepUntilTimeChanges();
|
||||
|
||||
mySvc.markWorkChunkAsErroredAndIncrementErrorCount(chunkId, "This is an error message");
|
||||
runInTransaction(() -> {
|
||||
Batch2WorkChunkEntity entity = myWorkChunkRepository.findById(chunkId).orElseThrow(() -> new IllegalArgumentException());
|
||||
assertEquals(StatusEnum.ERRORED, entity.getStatus());
|
||||
assertEquals("This is an error message", entity.getErrorMessage());
|
||||
assertNotNull(entity.getCreateTime());
|
||||
assertNotNull(entity.getStartTime());
|
||||
assertNotNull(entity.getEndTime());
|
||||
assertEquals(1, entity.getErrorCount());
|
||||
assertTrue(entity.getCreateTime().getTime() < entity.getStartTime().getTime());
|
||||
assertTrue(entity.getStartTime().getTime() < entity.getEndTime().getTime());
|
||||
});
|
||||
|
||||
// Mark errored again
|
||||
|
||||
mySvc.markWorkChunkAsErroredAndIncrementErrorCount(chunkId, "This is an error message 2");
|
||||
runInTransaction(() -> {
|
||||
Batch2WorkChunkEntity entity = myWorkChunkRepository.findById(chunkId).orElseThrow(() -> new IllegalArgumentException());
|
||||
assertEquals(StatusEnum.ERRORED, entity.getStatus());
|
||||
assertEquals("This is an error message 2", entity.getErrorMessage());
|
||||
assertNotNull(entity.getCreateTime());
|
||||
assertNotNull(entity.getStartTime());
|
||||
assertNotNull(entity.getEndTime());
|
||||
assertEquals(2, entity.getErrorCount());
|
||||
assertTrue(entity.getCreateTime().getTime() < entity.getStartTime().getTime());
|
||||
assertTrue(entity.getStartTime().getTime() < entity.getEndTime().getTime());
|
||||
});
|
||||
|
||||
List<WorkChunk> chunks = mySvc.fetchWorkChunksWithoutData(instanceId, 100, 0);
|
||||
assertEquals(1, chunks.size());
|
||||
assertEquals(2, chunks.get(0).getErrorCount());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMarkChunkAsCompleted_Fail() {
|
||||
JobInstance instance = createInstance();
|
||||
String instanceId = mySvc.storeNewInstance(instance);
|
||||
String chunkId = mySvc.storeWorkChunk("definition-chunkId", 1, "step-chunkId", instanceId, 1, null);
|
||||
assertNotNull(chunkId);
|
||||
|
||||
runInTransaction(() -> assertEquals(StatusEnum.QUEUED, myWorkChunkRepository.findById(chunkId).orElseThrow(() -> new IllegalArgumentException()).getStatus()));
|
||||
|
||||
sleepUntilTimeChanges();
|
||||
|
||||
WorkChunk chunk = mySvc.fetchWorkChunkSetStartTimeAndMarkInProgress(chunkId).orElseThrow(() -> new IllegalArgumentException());
|
||||
assertEquals(1, chunk.getSequence());
|
||||
assertEquals(StatusEnum.IN_PROGRESS, chunk.getStatus());
|
||||
|
||||
sleepUntilTimeChanges();
|
||||
|
||||
mySvc.markWorkChunkAsFailed(chunkId, "This is an error message");
|
||||
runInTransaction(() -> {
|
||||
Batch2WorkChunkEntity entity = myWorkChunkRepository.findById(chunkId).orElseThrow(() -> new IllegalArgumentException());
|
||||
assertEquals(StatusEnum.FAILED, entity.getStatus());
|
||||
assertEquals("This is an error message", entity.getErrorMessage());
|
||||
assertNotNull(entity.getCreateTime());
|
||||
assertNotNull(entity.getStartTime());
|
||||
assertNotNull(entity.getEndTime());
|
||||
assertTrue(entity.getCreateTime().getTime() < entity.getStartTime().getTime());
|
||||
assertTrue(entity.getStartTime().getTime() < entity.getEndTime().getTime());
|
||||
});
|
||||
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMarkInstanceAsCompleted() {
|
||||
String instanceId = mySvc.storeNewInstance(createInstance());
|
||||
|
||||
mySvc.markInstanceAsCompleted(instanceId);
|
||||
|
||||
runInTransaction(()->{
|
||||
Batch2JobInstanceEntity entity = myJobInstanceRepository.findById(instanceId).orElseThrow(() -> new IllegalArgumentException());
|
||||
assertEquals(StatusEnum.COMPLETED, entity.getStatus());
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUpdateInstance() {
|
||||
String instanceId = mySvc.storeNewInstance(createInstance());
|
||||
|
||||
JobInstance instance = mySvc.fetchInstance(instanceId).orElseThrow(() -> new IllegalArgumentException());
|
||||
assertEquals(instanceId, instance.getInstanceId());
|
||||
assertFalse(instance.isWorkChunksPurged());
|
||||
|
||||
instance.setStartTime(new Date());
|
||||
sleepUntilTimeChanges();
|
||||
instance.setEndTime(new Date());
|
||||
instance.setCombinedRecordsProcessed(100);
|
||||
instance.setCombinedRecordsProcessedPerSecond(22.0);
|
||||
instance.setWorkChunksPurged(true);
|
||||
instance.setProgress(0.5d);
|
||||
instance.setErrorCount(3);
|
||||
instance.setEstimatedTimeRemaining("32d");
|
||||
|
||||
mySvc.updateInstance(instance);
|
||||
|
||||
runInTransaction(()->{
|
||||
Batch2JobInstanceEntity entity = myJobInstanceRepository.findById(instanceId).orElseThrow(() -> new IllegalArgumentException());
|
||||
assertEquals(instance.getStartTime().getTime(), entity.getStartTime().getTime());
|
||||
assertEquals(instance.getEndTime().getTime(), entity.getEndTime().getTime());
|
||||
});
|
||||
|
||||
JobInstance finalInstance = mySvc.fetchInstance(instanceId).orElseThrow(() -> new IllegalArgumentException());
|
||||
assertEquals(instanceId, finalInstance.getInstanceId());
|
||||
assertEquals(0.5d, finalInstance.getProgress());
|
||||
assertTrue(finalInstance.isWorkChunksPurged());
|
||||
assertEquals(3, finalInstance.getErrorCount());
|
||||
assertEquals(instance.getEstimatedTimeRemaining(), finalInstance.getEstimatedTimeRemaining());
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private JobInstance createInstance() {
|
||||
JobInstance instance = new JobInstance();
|
||||
instance.setJobDefinitionId("definition-id");
|
||||
instance.setStatus(StatusEnum.QUEUED);
|
||||
instance.setJobDefinitionVersion(1);
|
||||
instance.setParameters(CHUNK_DATA);
|
||||
return instance;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,286 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.bulk;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.provider.BulkDataImportProvider;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.client.apache.ResourceEntity;
|
||||
import ca.uhn.fhir.rest.server.RestfulServer;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import ca.uhn.fhir.test.utilities.JettyUtil;
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.http.client.entity.EntityBuilder;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.eclipse.jetty.server.Server;
|
||||
import org.eclipse.jetty.servlet.ServletHandler;
|
||||
import org.eclipse.jetty.servlet.ServletHolder;
|
||||
import org.hl7.fhir.r4.model.InstantType;
|
||||
import org.hl7.fhir.r4.model.IntegerType;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Captor;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.eq;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public class BulkDataImportProviderTest {
|
||||
private static final String A_JOB_ID = "0000000-AAAAAA";
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BulkDataImportProviderTest.class);
|
||||
private Server myServer;
|
||||
private final FhirContext myCtx = FhirContext.forR4Cached();
|
||||
private int myPort;
|
||||
@Mock
|
||||
private IBulkDataImportSvc myBulkDataImportSvc;
|
||||
@Mock
|
||||
private IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||
private CloseableHttpClient myClient;
|
||||
@Captor
|
||||
private ArgumentCaptor<BulkImportJobJson> myBulkImportJobJsonCaptor;
|
||||
@Captor
|
||||
private ArgumentCaptor<List<BulkImportJobFileJson>> myBulkImportJobFileJsonCaptor;
|
||||
|
||||
@AfterEach
|
||||
public void after() throws Exception {
|
||||
JettyUtil.closeServer(myServer);
|
||||
myClient.close();
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
public void start() throws Exception {
|
||||
myServer = new Server(0);
|
||||
|
||||
BulkDataImportProvider provider = new BulkDataImportProvider();
|
||||
provider.setBulkDataImportSvcForUnitTests(myBulkDataImportSvc);
|
||||
provider.setFhirContextForUnitTest(myCtx);
|
||||
|
||||
ServletHandler proxyHandler = new ServletHandler();
|
||||
RestfulServer servlet = new RestfulServer(myCtx);
|
||||
servlet.registerProvider(provider);
|
||||
ServletHolder servletHolder = new ServletHolder(servlet);
|
||||
proxyHandler.addServletWithMapping(servletHolder, "/*");
|
||||
myServer.setHandler(proxyHandler);
|
||||
JettyUtil.startServer(myServer);
|
||||
myPort = JettyUtil.getPortForStartedServer(myServer);
|
||||
|
||||
PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS);
|
||||
HttpClientBuilder builder = HttpClientBuilder.create();
|
||||
builder.setConnectionManager(connectionManager);
|
||||
myClient = builder.build();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSuccessfulInitiateBulkRequest_Post() throws IOException {
|
||||
when(myBulkDataImportSvc.createNewJob(any(), any())).thenReturn(A_JOB_ID);
|
||||
|
||||
HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_IMPORT +
|
||||
"?" + JpaConstants.PARAM_IMPORT_JOB_DESCRIPTION + "=" + UrlUtil.escapeUrlParam("My Import Job") +
|
||||
"&" + JpaConstants.PARAM_IMPORT_BATCH_SIZE + "=" + UrlUtil.escapeUrlParam("100"));
|
||||
|
||||
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
|
||||
post.setEntity(
|
||||
EntityBuilder.create()
|
||||
.setContentType(ContentType.create(Constants.CT_FHIR_NDJSON))
|
||||
.setText("{\"resourceType\":\"Patient\",\"id\":\"Pat1\"}\n" +
|
||||
"{\"resourceType\":\"Patient\",\"id\":\"Pat2\"}\n")
|
||||
.build());
|
||||
|
||||
ourLog.info("Request: {}", post);
|
||||
try (CloseableHttpResponse response = myClient.execute(post)) {
|
||||
ourLog.info("Response: {}", EntityUtils.toString(response.getEntity()));
|
||||
assertEquals(202, response.getStatusLine().getStatusCode());
|
||||
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
|
||||
assertEquals("http://localhost:" + myPort + "/$import-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
|
||||
}
|
||||
|
||||
verify(myBulkDataImportSvc, times(1)).createNewJob(myBulkImportJobJsonCaptor.capture(), myBulkImportJobFileJsonCaptor.capture());
|
||||
BulkImportJobJson options = myBulkImportJobJsonCaptor.getValue();
|
||||
assertEquals(1, options.getFileCount());
|
||||
assertEquals(100, options.getBatchSize());
|
||||
assertEquals(JobFileRowProcessingModeEnum.FHIR_TRANSACTION, options.getProcessingMode());
|
||||
assertEquals("My Import Job", options.getJobDescription());
|
||||
List<BulkImportJobFileJson> jobs = myBulkImportJobFileJsonCaptor.getValue();
|
||||
assertEquals(1, jobs.size());
|
||||
assertThat(jobs.get(0).getContents(), containsString("Pat1"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSuccessfulInitiateBulkRequest_Post_AllParameters() throws IOException {
|
||||
when(myBulkDataImportSvc.createNewJob(any(), any())).thenReturn(A_JOB_ID);
|
||||
|
||||
HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_IMPORT +
|
||||
"?" + JpaConstants.PARAM_IMPORT_JOB_DESCRIPTION + "=" + UrlUtil.escapeUrlParam("My Import Job") +
|
||||
"&" + JpaConstants.PARAM_IMPORT_PROCESSING_MODE + "=" + UrlUtil.escapeUrlParam(JobFileRowProcessingModeEnum.FHIR_TRANSACTION.toString()) +
|
||||
"&" + JpaConstants.PARAM_IMPORT_BATCH_SIZE + "=" + UrlUtil.escapeUrlParam("100") +
|
||||
"&" + JpaConstants.PARAM_IMPORT_FILE_COUNT + "=" + UrlUtil.escapeUrlParam("1"));
|
||||
|
||||
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
|
||||
post.setEntity(
|
||||
EntityBuilder.create()
|
||||
.setContentType(ContentType.create(Constants.CT_FHIR_NDJSON))
|
||||
.setText("{\"resourceType\":\"Patient\",\"id\":\"Pat1\"}\n" +
|
||||
"{\"resourceType\":\"Patient\",\"id\":\"Pat2\"}\n")
|
||||
.build());
|
||||
|
||||
ourLog.info("Request: {}", post);
|
||||
try (CloseableHttpResponse response = myClient.execute(post)) {
|
||||
ourLog.info("Response: {}", EntityUtils.toString(response.getEntity()));
|
||||
assertEquals(202, response.getStatusLine().getStatusCode());
|
||||
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
|
||||
assertEquals("http://localhost:" + myPort + "/$import-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
|
||||
}
|
||||
|
||||
verify(myBulkDataImportSvc, times(1)).createNewJob(myBulkImportJobJsonCaptor.capture(), myBulkImportJobFileJsonCaptor.capture());
|
||||
BulkImportJobJson options = myBulkImportJobJsonCaptor.getValue();
|
||||
assertEquals(1, options.getFileCount());
|
||||
assertEquals(100, options.getBatchSize());
|
||||
assertEquals(JobFileRowProcessingModeEnum.FHIR_TRANSACTION, options.getProcessingMode());
|
||||
assertEquals("My Import Job", options.getJobDescription());
|
||||
List<BulkImportJobFileJson> jobs = myBulkImportJobFileJsonCaptor.getValue();
|
||||
assertEquals(1, jobs.size());
|
||||
assertThat(jobs.get(0).getContents(), containsString("Pat1"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPollForStatus_STAGING() throws IOException {
|
||||
|
||||
IBulkDataImportSvc.JobInfo jobInfo = new IBulkDataImportSvc.JobInfo()
|
||||
.setStatus(BulkImportJobStatusEnum.STAGING)
|
||||
.setStatusTime(InstantType.now().getValue());
|
||||
when(myBulkDataImportSvc.getJobStatus(eq(A_JOB_ID))).thenReturn(jobInfo);
|
||||
|
||||
String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_IMPORT_POLL_STATUS + "?" +
|
||||
JpaConstants.PARAM_IMPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID;
|
||||
HttpGet get = new HttpGet(url);
|
||||
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
|
||||
try (CloseableHttpResponse response = myClient.execute(get)) {
|
||||
ourLog.info("Response: {}", response.toString());
|
||||
|
||||
assertEquals(202, response.getStatusLine().getStatusCode());
|
||||
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
|
||||
assertEquals("120", response.getFirstHeader(Constants.HEADER_RETRY_AFTER).getValue());
|
||||
assertThat(response.getFirstHeader(Constants.HEADER_X_PROGRESS).getValue(), containsString("Status set to STAGING at 20"));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPollForStatus_READY() throws IOException {
|
||||
|
||||
IBulkDataImportSvc.JobInfo jobInfo = new IBulkDataImportSvc.JobInfo()
|
||||
.setStatus(BulkImportJobStatusEnum.READY)
|
||||
.setStatusTime(InstantType.now().getValue());
|
||||
when(myBulkDataImportSvc.getJobStatus(eq(A_JOB_ID))).thenReturn(jobInfo);
|
||||
|
||||
String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_IMPORT_POLL_STATUS + "?" +
|
||||
JpaConstants.PARAM_IMPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID;
|
||||
HttpGet get = new HttpGet(url);
|
||||
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
|
||||
try (CloseableHttpResponse response = myClient.execute(get)) {
|
||||
ourLog.info("Response: {}", response.toString());
|
||||
|
||||
assertEquals(202, response.getStatusLine().getStatusCode());
|
||||
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
|
||||
assertEquals("120", response.getFirstHeader(Constants.HEADER_RETRY_AFTER).getValue());
|
||||
assertThat(response.getFirstHeader(Constants.HEADER_X_PROGRESS).getValue(), containsString("Status set to READY at 20"));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPollForStatus_RUNNING() throws IOException {
|
||||
|
||||
IBulkDataImportSvc.JobInfo jobInfo = new IBulkDataImportSvc.JobInfo()
|
||||
.setStatus(BulkImportJobStatusEnum.RUNNING)
|
||||
.setStatusTime(InstantType.now().getValue());
|
||||
when(myBulkDataImportSvc.getJobStatus(eq(A_JOB_ID))).thenReturn(jobInfo);
|
||||
|
||||
String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_IMPORT_POLL_STATUS + "?" +
|
||||
JpaConstants.PARAM_IMPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID;
|
||||
HttpGet get = new HttpGet(url);
|
||||
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
|
||||
try (CloseableHttpResponse response = myClient.execute(get)) {
|
||||
ourLog.info("Response: {}", response.toString());
|
||||
|
||||
assertEquals(202, response.getStatusLine().getStatusCode());
|
||||
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
|
||||
assertEquals("120", response.getFirstHeader(Constants.HEADER_RETRY_AFTER).getValue());
|
||||
assertThat(response.getFirstHeader(Constants.HEADER_X_PROGRESS).getValue(), containsString("Status set to RUNNING at 20"));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPollForStatus_COMPLETE() throws IOException {
|
||||
IBulkDataImportSvc.JobInfo jobInfo = new IBulkDataImportSvc.JobInfo()
|
||||
.setStatus(BulkImportJobStatusEnum.COMPLETE)
|
||||
.setStatusTime(InstantType.now().getValue());
|
||||
when(myBulkDataImportSvc.getJobStatus(eq(A_JOB_ID))).thenReturn(jobInfo);
|
||||
|
||||
String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_IMPORT_POLL_STATUS + "?" +
|
||||
JpaConstants.PARAM_IMPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID;
|
||||
HttpGet get = new HttpGet(url);
|
||||
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
|
||||
try (CloseableHttpResponse response = myClient.execute(get)) {
|
||||
ourLog.info("Response: {}", response.toString());
|
||||
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
assertEquals("OK", response.getStatusLine().getReasonPhrase());
|
||||
assertThat(response.getEntity().getContentType().getValue(), containsString(Constants.CT_FHIR_JSON));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPollForStatus_ERROR() throws IOException {
|
||||
IBulkDataImportSvc.JobInfo jobInfo = new IBulkDataImportSvc.JobInfo()
|
||||
.setStatus(BulkImportJobStatusEnum.ERROR)
|
||||
.setStatusMessage("It failed.")
|
||||
.setStatusTime(InstantType.now().getValue());
|
||||
when(myBulkDataImportSvc.getJobStatus(eq(A_JOB_ID))).thenReturn(jobInfo);
|
||||
|
||||
String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_IMPORT_POLL_STATUS + "?" +
|
||||
JpaConstants.PARAM_IMPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID;
|
||||
HttpGet get = new HttpGet(url);
|
||||
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
|
||||
try (CloseableHttpResponse response = myClient.execute(get)) {
|
||||
ourLog.info("Response: {}", response.toString());
|
||||
|
||||
assertEquals(500, response.getStatusLine().getStatusCode());
|
||||
assertEquals("Server Error", response.getStatusLine().getReasonPhrase());
|
||||
String responseContent = IOUtils.toString(response.getEntity().getContent(), Charsets.UTF_8);
|
||||
ourLog.info("Response content: {}", responseContent);
|
||||
assertThat(responseContent, containsString("\"diagnostics\": \"It failed.\""));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,380 @@
|
|||
package ca.uhn.fhir.jpa.bulk.imprt2;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobCleanerService;
|
||||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.batch2.jobs.imprt.BulkImport2AppCtx;
|
||||
import ca.uhn.fhir.batch2.jobs.imprt.BulkImportFileServlet;
|
||||
import ca.uhn.fhir.batch2.jobs.imprt.BulkImportJobParameters;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||
import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
|
||||
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.jpa.subscription.channel.api.IChannelReceiver;
|
||||
import ca.uhn.fhir.jpa.subscription.channel.impl.LinkedBlockingChannel;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.test.utilities.ProxyUtil;
|
||||
import ca.uhn.fhir.test.utilities.server.HttpServletExtension;
|
||||
import ca.uhn.fhir.util.JsonUtil;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
|
||||
import java.io.StringReader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.awaitility.Awaitility.await;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.blankOrNullString;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
public class BulkImportR4Test extends BaseJpaR4Test {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BulkImportR4Test.class);
|
||||
private final BulkImportFileServlet myBulkImportFileServlet = new BulkImportFileServlet();
|
||||
@RegisterExtension
|
||||
private final HttpServletExtension myHttpServletExtension = new HttpServletExtension()
|
||||
.withServlet(myBulkImportFileServlet);
|
||||
@Autowired
|
||||
private IJobCoordinator myJobCoordinator;
|
||||
@Autowired
|
||||
private IJobCleanerService myJobCleanerService;
|
||||
@Autowired
|
||||
private IBatch2JobInstanceRepository myJobInstanceRepository;
|
||||
@Autowired
|
||||
private IBatch2WorkChunkRepository myWorkChunkRepository;
|
||||
@Qualifier("batch2ProcessingChannelReceiver")
|
||||
@Autowired
|
||||
private IChannelReceiver myChannelReceiver;
|
||||
|
||||
@AfterEach
|
||||
public void afterEach() {
|
||||
myBulkImportFileServlet.clearFiles();
|
||||
|
||||
LinkedBlockingChannel channel = ProxyUtil.getSingletonTarget(myChannelReceiver, LinkedBlockingChannel.class);
|
||||
await().until(() -> channel.getQueueSizeForUnitTest() == 0);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRunBulkImport() {
|
||||
// Setup
|
||||
|
||||
int fileCount = 100;
|
||||
List<String> indexes = addFiles(fileCount);
|
||||
|
||||
BulkImportJobParameters parameters = new BulkImportJobParameters();
|
||||
for (String next : indexes) {
|
||||
String url = myHttpServletExtension.getBaseUrl() + "/download?index=" + next;
|
||||
parameters.addNdJsonUrl(url);
|
||||
}
|
||||
|
||||
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
||||
request.setJobDefinitionId(BulkImport2AppCtx.JOB_BULK_IMPORT_PULL);
|
||||
request.setParameters(parameters);
|
||||
|
||||
// Execute
|
||||
|
||||
String instanceId = myJobCoordinator.startInstance(request);
|
||||
assertThat(instanceId, not(blankOrNullString()));
|
||||
ourLog.info("Execution got ID: {}", instanceId);
|
||||
|
||||
// Verify
|
||||
|
||||
await().until(() -> {
|
||||
myJobCleanerService.runCleanupPass();
|
||||
JobInstance instance = myJobCoordinator.getInstance(instanceId);
|
||||
return instance.getStatus();
|
||||
}, equalTo(StatusEnum.COMPLETED));
|
||||
|
||||
runInTransaction(() -> {
|
||||
assertEquals(200, myResourceTableDao.count());
|
||||
});
|
||||
|
||||
runInTransaction(() -> {
|
||||
JobInstance instance = myJobCoordinator.getInstance(instanceId);
|
||||
ourLog.info("Instance details:\n{}", JsonUtil.serialize(instance, true));
|
||||
assertEquals(0, instance.getErrorCount());
|
||||
assertNotNull(instance.getCreateTime());
|
||||
assertNotNull(instance.getStartTime());
|
||||
assertNotNull(instance.getEndTime());
|
||||
assertEquals(200, instance.getCombinedRecordsProcessed());
|
||||
assertThat(instance.getCombinedRecordsProcessedPerSecond(), greaterThan(5.0));
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRunBulkImport_StorageFailure() {
|
||||
// Setup
|
||||
|
||||
int fileCount = 3;
|
||||
List<String> indexes = addFiles(fileCount);
|
||||
|
||||
BulkImportJobParameters parameters = new BulkImportJobParameters();
|
||||
for (String next : indexes) {
|
||||
String url = myHttpServletExtension.getBaseUrl() + "/download?index=" + next;
|
||||
parameters.addNdJsonUrl(url);
|
||||
}
|
||||
|
||||
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
||||
request.setJobDefinitionId(BulkImport2AppCtx.JOB_BULK_IMPORT_PULL);
|
||||
request.setParameters(parameters);
|
||||
|
||||
IAnonymousInterceptor anonymousInterceptor = (thePointcut, theArgs) -> {
|
||||
throw new NullPointerException("This is an exception");
|
||||
};
|
||||
myInterceptorRegistry.registerAnonymousInterceptor(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED, anonymousInterceptor);
|
||||
try {
|
||||
|
||||
// Execute
|
||||
|
||||
String instanceId = myJobCoordinator.startInstance(request);
|
||||
assertThat(instanceId, not(blankOrNullString()));
|
||||
ourLog.info("Execution got ID: {}", instanceId);
|
||||
|
||||
// Verify
|
||||
|
||||
await().until(() -> {
|
||||
myJobCleanerService.runCleanupPass();
|
||||
JobInstance instance = myJobCoordinator.getInstance(instanceId);
|
||||
return instance.getStatus();
|
||||
}, equalTo(StatusEnum.ERRORED));
|
||||
|
||||
String storageDescription = runInTransaction(() -> {
|
||||
assertEquals(0, myResourceTableDao.count());
|
||||
String storage = myJobInstanceRepository
|
||||
.findAll()
|
||||
.stream()
|
||||
.map(t -> "\n * " + t.toString())
|
||||
.collect(Collectors.joining(""));
|
||||
storage += myWorkChunkRepository
|
||||
.findAll()
|
||||
.stream()
|
||||
.map(t -> "\n * " + t.toString())
|
||||
.collect(Collectors.joining(""));
|
||||
ourLog.info("Stored entities:{}", storage);
|
||||
return storage;
|
||||
});
|
||||
|
||||
await().until(() -> {
|
||||
myJobCleanerService.runCleanupPass();
|
||||
JobInstance instance = myJobCoordinator.getInstance(instanceId);
|
||||
return instance.getErrorCount();
|
||||
}, equalTo(3));
|
||||
|
||||
runInTransaction(() -> {
|
||||
JobInstance instance = myJobCoordinator.getInstance(instanceId);
|
||||
ourLog.info("Instance details:\n{}", JsonUtil.serialize(instance, true));
|
||||
assertEquals(3, instance.getErrorCount(), storageDescription);
|
||||
assertNotNull(instance.getCreateTime());
|
||||
assertNotNull(instance.getStartTime());
|
||||
assertNull(instance.getEndTime());
|
||||
assertThat(instance.getErrorMessage(), containsString("NullPointerException: This is an exception"));
|
||||
});
|
||||
|
||||
} finally {
|
||||
|
||||
myInterceptorRegistry.unregisterInterceptor(anonymousInterceptor);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testRunBulkImport_InvalidFileContents() {
|
||||
// Setup
|
||||
|
||||
int fileCount = 3;
|
||||
List<String> indexes = addFiles(fileCount - 1);
|
||||
indexes.add(myBulkImportFileServlet.registerFile(() -> new StringReader("{\"resourceType\":\"Foo\"}")));
|
||||
|
||||
BulkImportJobParameters parameters = new BulkImportJobParameters();
|
||||
for (String next : indexes) {
|
||||
String url = myHttpServletExtension.getBaseUrl() + "/download?index=" + next;
|
||||
parameters.addNdJsonUrl(url);
|
||||
}
|
||||
|
||||
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
||||
request.setJobDefinitionId(BulkImport2AppCtx.JOB_BULK_IMPORT_PULL);
|
||||
request.setParameters(parameters);
|
||||
|
||||
// Execute
|
||||
|
||||
String instanceId = myJobCoordinator.startInstance(request);
|
||||
assertThat(instanceId, not(blankOrNullString()));
|
||||
ourLog.info("Execution got ID: {}", instanceId);
|
||||
|
||||
// Verify
|
||||
|
||||
await().until(() -> {
|
||||
myJobCleanerService.runCleanupPass();
|
||||
JobInstance instance = myJobCoordinator.getInstance(instanceId);
|
||||
return instance.getStatus();
|
||||
}, equalTo(StatusEnum.FAILED));
|
||||
|
||||
JobInstance instance = myJobCoordinator.getInstance(instanceId);
|
||||
ourLog.info("Instance details:\n{}", JsonUtil.serialize(instance, true));
|
||||
assertEquals(1, instance.getErrorCount());
|
||||
assertEquals(StatusEnum.FAILED, instance.getStatus());
|
||||
assertNotNull(instance.getCreateTime());
|
||||
assertNotNull(instance.getStartTime());
|
||||
assertNotNull(instance.getEndTime());
|
||||
assertThat(instance.getErrorMessage(), containsString("Unknown resource name \"Foo\""));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testRunBulkImport_UnknownTargetFile() {
|
||||
// Setup
|
||||
|
||||
BulkImportJobParameters parameters = new BulkImportJobParameters();
|
||||
String url = myHttpServletExtension.getBaseUrl() + "/download?index=FOO";
|
||||
parameters.addNdJsonUrl(url);
|
||||
|
||||
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
||||
request.setJobDefinitionId(BulkImport2AppCtx.JOB_BULK_IMPORT_PULL);
|
||||
request.setParameters(parameters);
|
||||
|
||||
IAnonymousInterceptor anonymousInterceptor = (thePointcut, theArgs) -> {
|
||||
throw new NullPointerException("This is an exception");
|
||||
};
|
||||
myInterceptorRegistry.registerAnonymousInterceptor(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED, anonymousInterceptor);
|
||||
try {
|
||||
|
||||
// Execute
|
||||
|
||||
String instanceId = myJobCoordinator.startInstance(request);
|
||||
assertThat(instanceId, not(blankOrNullString()));
|
||||
ourLog.info("Execution got ID: {}", instanceId);
|
||||
|
||||
// Verify
|
||||
|
||||
await().until(() -> {
|
||||
myJobCleanerService.runCleanupPass();
|
||||
JobInstance instance = myJobCoordinator.getInstance(instanceId);
|
||||
return instance.getStatus();
|
||||
}, equalTo(StatusEnum.FAILED));
|
||||
|
||||
runInTransaction(() -> {
|
||||
JobInstance instance = myJobCoordinator.getInstance(instanceId);
|
||||
ourLog.info("Instance details:\n{}", JsonUtil.serialize(instance, true));
|
||||
assertEquals(1, instance.getErrorCount());
|
||||
assertNotNull(instance.getCreateTime());
|
||||
assertNotNull(instance.getStartTime());
|
||||
assertNotNull(instance.getEndTime());
|
||||
assertThat(instance.getErrorMessage(), containsString("Received HTTP 404 from URL: http://"));
|
||||
});
|
||||
|
||||
} finally {
|
||||
|
||||
myInterceptorRegistry.unregisterInterceptor(anonymousInterceptor);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStartInvalidJob_NoParameters() {
|
||||
// Setup
|
||||
|
||||
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
||||
request.setJobDefinitionId(BulkImport2AppCtx.JOB_BULK_IMPORT_PULL);
|
||||
|
||||
// Execute
|
||||
|
||||
try {
|
||||
myJobCoordinator.startInstance(request);
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
|
||||
// Verify
|
||||
assertEquals("HAPI-2065: No parameters supplied", e.getMessage());
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStartInvalidJob_NoUrls() {
|
||||
// Setup
|
||||
|
||||
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
||||
request.setJobDefinitionId(BulkImport2AppCtx.JOB_BULK_IMPORT_PULL);
|
||||
request.setParameters(new BulkImportJobParameters());
|
||||
|
||||
// Execute
|
||||
|
||||
try {
|
||||
myJobCoordinator.startInstance(request);
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
|
||||
// Verify
|
||||
assertEquals("HAPI-2039: Failed to validate parameters for job of type BULK_IMPORT_PULL: [myNdJsonUrls At least one NDJSON URL must be provided]", e.getMessage());
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStartInvalidJob_InvalidUrls() {
|
||||
// Setup
|
||||
|
||||
BulkImportJobParameters parameters = new BulkImportJobParameters();
|
||||
parameters.addNdJsonUrl("foo");
|
||||
|
||||
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
||||
request.setJobDefinitionId(BulkImport2AppCtx.JOB_BULK_IMPORT_PULL);
|
||||
request.setParameters(parameters);
|
||||
|
||||
// Execute
|
||||
|
||||
try {
|
||||
myJobCoordinator.startInstance(request);
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
|
||||
// Verify
|
||||
assertEquals("HAPI-2039: Failed to validate parameters for job of type BULK_IMPORT_PULL: [myNdJsonUrls[0].<list element> Must be a valid URL]", e.getMessage());
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
private List<String> addFiles(int fileCount) {
|
||||
List<String> retVal = new ArrayList<>();
|
||||
for (int i = 0; i < fileCount; i++) {
|
||||
StringBuilder builder = new StringBuilder();
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.setId("Patient/P" + i);
|
||||
patient.setActive(true);
|
||||
builder.append(myFhirContext.newJsonParser().setPrettyPrint(false).encodeResourceToString(patient));
|
||||
builder.append("\n");
|
||||
|
||||
Observation observation = new Observation();
|
||||
observation.setId("Observation/O" + i);
|
||||
observation.getSubject().setReference("Patient/P" + i);
|
||||
builder.append(myFhirContext.newJsonParser().setPrettyPrint(false).encodeResourceToString(observation));
|
||||
builder.append("\n");
|
||||
builder.append("\n");
|
||||
|
||||
String index = myBulkImportFileServlet.registerFile(() -> new StringReader(builder.toString()));
|
||||
retVal.add(index);
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,205 @@
|
|||
package ca.uhn.fhir.jpa.bulk.imprt2;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
|
||||
import ca.uhn.fhir.batch2.jobs.imprt.ConsumeFilesStep;
|
||||
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.api.MethodOrderer;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.TestMethodOrder;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.either;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
@TestMethodOrder(MethodOrderer.MethodName.class)
|
||||
public class ConsumeFilesStepR4Test extends BaseJpaR4Test {
|
||||
|
||||
@Autowired
|
||||
private ConsumeFilesStep mySvc;
|
||||
|
||||
@Test
|
||||
public void testAlreadyExisting_NoChanges() {
|
||||
// Setup
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.setId("A");
|
||||
patient.setActive(true);
|
||||
myPatientDao.update(patient);
|
||||
|
||||
patient = new Patient();
|
||||
patient.setId("B");
|
||||
patient.setActive(false);
|
||||
myPatientDao.update(patient);
|
||||
|
||||
|
||||
List<IBaseResource> resources = new ArrayList<>();
|
||||
|
||||
patient = new Patient();
|
||||
patient.setId("Patient/A");
|
||||
patient.setActive(true);
|
||||
resources.add(patient);
|
||||
|
||||
patient = new Patient();
|
||||
patient.setId("Patient/B");
|
||||
patient.setActive(false);
|
||||
resources.add(patient);
|
||||
|
||||
// Execute
|
||||
|
||||
myMemoryCacheService.invalidateAllCaches();
|
||||
myCaptureQueriesListener.clear();
|
||||
mySvc.storeResources(resources);
|
||||
|
||||
// Validate
|
||||
|
||||
assertEquals(4, myCaptureQueriesListener.logSelectQueries().size());
|
||||
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
assertEquals(1, myCaptureQueriesListener.countCommits());
|
||||
assertEquals(0, myCaptureQueriesListener.countRollbacks());
|
||||
|
||||
patient = myPatientDao.read(new IdType("Patient/A"));
|
||||
assertTrue(patient.getActive());
|
||||
patient = myPatientDao.read(new IdType("Patient/B"));
|
||||
assertFalse(patient.getActive());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAlreadyExisting_WithChanges() {
|
||||
// Setup
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.setId("A");
|
||||
patient.setActive(false);
|
||||
myPatientDao.update(patient);
|
||||
|
||||
patient = new Patient();
|
||||
patient.setId("B");
|
||||
patient.setActive(true);
|
||||
myPatientDao.update(patient);
|
||||
|
||||
|
||||
List<IBaseResource> resources = new ArrayList<>();
|
||||
|
||||
patient = new Patient();
|
||||
patient.setId("Patient/A");
|
||||
patient.setActive(true);
|
||||
resources.add(patient);
|
||||
|
||||
patient = new Patient();
|
||||
patient.setId("Patient/B");
|
||||
patient.setActive(false);
|
||||
resources.add(patient);
|
||||
|
||||
// Execute
|
||||
|
||||
myMemoryCacheService.invalidateAllCaches();
|
||||
myCaptureQueriesListener.clear();
|
||||
mySvc.storeResources(resources);
|
||||
|
||||
// Validate
|
||||
|
||||
assertEquals(4, myCaptureQueriesListener.logSelectQueries().size());
|
||||
assertEquals(2, myCaptureQueriesListener.logInsertQueries());
|
||||
assertEquals(4, myCaptureQueriesListener.logUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
assertEquals(1, myCaptureQueriesListener.countCommits());
|
||||
assertEquals(0, myCaptureQueriesListener.countRollbacks());
|
||||
|
||||
patient = myPatientDao.read(new IdType("Patient/A"));
|
||||
assertTrue(patient.getActive());
|
||||
patient = myPatientDao.read(new IdType("Patient/B"));
|
||||
assertFalse(patient.getActive());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNotAlreadyExisting() {
|
||||
|
||||
// Setup
|
||||
|
||||
List<IBaseResource> resources = new ArrayList<>();
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.setId("A");
|
||||
patient.setActive(true);
|
||||
resources.add(patient);
|
||||
|
||||
patient = new Patient();
|
||||
patient.setId("B");
|
||||
patient.setActive(false);
|
||||
resources.add(patient);
|
||||
|
||||
// Execute
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
mySvc.storeResources(resources);
|
||||
|
||||
// Validate
|
||||
|
||||
assertEquals(1, myCaptureQueriesListener.logSelectQueries().size());
|
||||
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false),
|
||||
either(containsString("forcedid0_.RESOURCE_TYPE='Patient' and forcedid0_.FORCED_ID='B' or forcedid0_.RESOURCE_TYPE='Patient' and forcedid0_.FORCED_ID='A'"))
|
||||
.or(containsString("forcedid0_.RESOURCE_TYPE='Patient' and forcedid0_.FORCED_ID='A' or forcedid0_.RESOURCE_TYPE='Patient' and forcedid0_.FORCED_ID='B'")));
|
||||
assertEquals(10, myCaptureQueriesListener.logInsertQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
assertEquals(1, myCaptureQueriesListener.countCommits());
|
||||
assertEquals(0, myCaptureQueriesListener.countRollbacks());
|
||||
|
||||
|
||||
patient = myPatientDao.read(new IdType("Patient/A"));
|
||||
assertTrue(patient.getActive());
|
||||
patient = myPatientDao.read(new IdType("Patient/B"));
|
||||
assertFalse(patient.getActive());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNotAlreadyExisting_InvalidIdForStorage() {
|
||||
// Setup
|
||||
|
||||
List<IBaseResource> resources = new ArrayList<>();
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.setId("1");
|
||||
patient.setActive(true);
|
||||
resources.add(patient);
|
||||
|
||||
patient = new Patient();
|
||||
patient.setId("2");
|
||||
patient.setActive(false);
|
||||
resources.add(patient);
|
||||
|
||||
// Execute
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
try {
|
||||
|
||||
mySvc.storeResources(resources);
|
||||
fail();
|
||||
|
||||
} catch (JobExecutionFailedException e) {
|
||||
|
||||
// Validate
|
||||
assertThat(e.getMessage(), containsString("no resource with this ID exists and clients may only assign IDs"));
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,6 +1,8 @@
|
|||
package ca.uhn.fhir.jpa.config;
|
||||
|
||||
import ca.uhn.fhir.batch2.jobs.config.Batch2JobsConfig;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.batch2.JpaBatch2Config;
|
||||
import ca.uhn.fhir.jpa.binstore.IBinaryStorageSvc;
|
||||
import ca.uhn.fhir.jpa.binstore.MemoryBinaryStorageSvcImpl;
|
||||
import ca.uhn.fhir.jpa.config.r4.JpaR4Config;
|
||||
|
@ -24,7 +26,13 @@ import org.springframework.context.annotation.Lazy;
|
|||
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
|
||||
|
||||
import javax.sql.DataSource;
|
||||
import java.lang.management.ManagementFactory;
|
||||
import java.lang.management.ThreadInfo;
|
||||
import java.lang.management.ThreadMXBean;
|
||||
import java.sql.Connection;
|
||||
import java.util.Deque;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedList;
|
||||
import java.util.Properties;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
|
@ -35,7 +43,9 @@ import static org.junit.jupiter.api.Assertions.fail;
|
|||
JpaR4Config.class,
|
||||
HapiJpaConfig.class,
|
||||
TestJPAConfig.class,
|
||||
TestHibernateSearchAddInConfig.DefaultLuceneHeap.class
|
||||
TestHibernateSearchAddInConfig.DefaultLuceneHeap.class,
|
||||
JpaBatch2Config.class,
|
||||
Batch2JobsConfig.class
|
||||
})
|
||||
public class TestR4Config {
|
||||
|
||||
|
@ -60,8 +70,10 @@ public class TestR4Config {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
private Exception myLastStackTrace;
|
||||
private final Deque<Exception> myLastStackTrace = new LinkedList<>();
|
||||
@Autowired
|
||||
TestHibernateSearchAddInConfig.IHibernateSearchConfigurer hibernateSearchConfigurer;
|
||||
private boolean myHaveDumpedThreads;
|
||||
|
||||
@Bean
|
||||
public CircularQueueCaptureQueriesListener captureQueriesListener() {
|
||||
|
@ -87,7 +99,12 @@ public class TestR4Config {
|
|||
try {
|
||||
throw new Exception();
|
||||
} catch (Exception e) {
|
||||
myLastStackTrace = e;
|
||||
synchronized (myLastStackTrace) {
|
||||
myLastStackTrace.add(e);
|
||||
while (myLastStackTrace.size() > ourMaxThreads) {
|
||||
myLastStackTrace.removeFirst();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return retVal;
|
||||
|
@ -95,19 +112,32 @@ public class TestR4Config {
|
|||
|
||||
private void logGetConnectionStackTrace() {
|
||||
StringBuilder b = new StringBuilder();
|
||||
b.append("Last connection request stack trace:");
|
||||
for (StackTraceElement next : myLastStackTrace.getStackTrace()) {
|
||||
b.append("\n ");
|
||||
b.append(next.getClassName());
|
||||
b.append(".");
|
||||
b.append(next.getMethodName());
|
||||
b.append("(");
|
||||
b.append(next.getFileName());
|
||||
b.append(":");
|
||||
b.append(next.getLineNumber());
|
||||
b.append(")");
|
||||
int i = 0;
|
||||
synchronized (myLastStackTrace) {
|
||||
for (Iterator<Exception> iter = myLastStackTrace.descendingIterator(); iter.hasNext(); ) {
|
||||
Exception nextStack = iter.next();
|
||||
b.append("\n\nPrevious request stack trace ");
|
||||
b.append(i++);
|
||||
b.append(":");
|
||||
for (StackTraceElement next : nextStack.getStackTrace()) {
|
||||
b.append("\n ");
|
||||
b.append(next.getClassName());
|
||||
b.append(".");
|
||||
b.append(next.getMethodName());
|
||||
b.append("(");
|
||||
b.append(next.getFileName());
|
||||
b.append(":");
|
||||
b.append(next.getLineNumber());
|
||||
b.append(")");
|
||||
}
|
||||
}
|
||||
}
|
||||
ourLog.info(b.toString());
|
||||
|
||||
if (!myHaveDumpedThreads) {
|
||||
ourLog.info("Thread dump:" + crunchifyGenerateThreadDump());
|
||||
myHaveDumpedThreads = true;
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
|
@ -140,7 +170,6 @@ public class TestR4Config {
|
|||
return new SingleQueryCountHolder();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory, FhirContext theFhirContext) {
|
||||
LocalContainerEntityManagerFactoryBean retVal = HapiEntityManagerFactoryUtil.newEntityManagerFactory(theConfigurableListableBeanFactory, theFhirContext);
|
||||
|
@ -150,9 +179,6 @@ public class TestR4Config {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
@Autowired
|
||||
TestHibernateSearchAddInConfig.IHibernateSearchConfigurer hibernateSearchConfigurer;
|
||||
|
||||
private Properties jpaProperties() {
|
||||
Properties extraProperties = new Properties();
|
||||
extraProperties.put("hibernate.format_sql", "false");
|
||||
|
@ -187,6 +213,27 @@ public class TestR4Config {
|
|||
return new MemoryBinaryStorageSvcImpl();
|
||||
}
|
||||
|
||||
public static String crunchifyGenerateThreadDump() {
|
||||
final StringBuilder dump = new StringBuilder();
|
||||
final ThreadMXBean threadMXBean = ManagementFactory.getThreadMXBean();
|
||||
final ThreadInfo[] threadInfos = threadMXBean.getThreadInfo(threadMXBean.getAllThreadIds(), 100);
|
||||
for (ThreadInfo threadInfo : threadInfos) {
|
||||
dump.append('"');
|
||||
dump.append(threadInfo.getThreadName());
|
||||
dump.append("\" ");
|
||||
final Thread.State state = threadInfo.getThreadState();
|
||||
dump.append("\n java.lang.Thread.State: ");
|
||||
dump.append(state);
|
||||
final StackTraceElement[] stackTraceElements = threadInfo.getStackTrace();
|
||||
for (final StackTraceElement stackTraceElement : stackTraceElements) {
|
||||
dump.append("\n at ");
|
||||
dump.append(stackTraceElement);
|
||||
}
|
||||
dump.append("\n\n");
|
||||
}
|
||||
return dump.toString();
|
||||
}
|
||||
|
||||
public static int getMaxThreads() {
|
||||
return ourMaxThreads;
|
||||
}
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.api.model.DeleteConflictList;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
|
@ -41,7 +41,7 @@ class BaseHapiFhirResourceDaoTest {
|
|||
private IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
|
||||
|
||||
@Mock
|
||||
private IdHelperService myIdHelperService;
|
||||
private IIdHelperService myIdHelperService;
|
||||
|
||||
@Mock
|
||||
private EntityManager myEntityManager;
|
||||
|
@ -68,6 +68,7 @@ class BaseHapiFhirResourceDaoTest {
|
|||
/**
|
||||
* To be called for tests that require additional
|
||||
* setup
|
||||
*
|
||||
* @param clazz
|
||||
*/
|
||||
private void setup(Class clazz) {
|
||||
|
|
|
@ -8,6 +8,7 @@ import ca.uhn.fhir.interceptor.executor.InterceptorService;
|
|||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.config.JpaConfig;
|
||||
|
@ -24,7 +25,6 @@ import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
|
|||
import ca.uhn.fhir.jpa.dao.data.ITermConceptDesignationDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptPropertyDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptDesignation;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptProperty;
|
||||
|
@ -142,6 +142,7 @@ public abstract class BaseJpaTest extends BaseTest {
|
|||
static {
|
||||
System.setProperty(Constants.TEST_SYSTEM_PROP_VALIDATION_RESOURCE_CACHES_MS, "1000");
|
||||
System.setProperty("test", "true");
|
||||
System.setProperty("unit_test_mode", "true");
|
||||
TestUtil.setShouldRandomizeTimezones(false);
|
||||
}
|
||||
|
||||
|
@ -191,7 +192,7 @@ public abstract class BaseJpaTest extends BaseTest {
|
|||
@Autowired
|
||||
protected ITermConceptPropertyDao myTermConceptPropertyDao;
|
||||
@Autowired
|
||||
private IdHelperService myIdHelperService;
|
||||
private IIdHelperService myIdHelperService;
|
||||
@Autowired
|
||||
private MemoryCacheService myMemoryCacheService;
|
||||
@Qualifier(JpaConfig.JPA_VALIDATION_SUPPORT)
|
||||
|
|
|
@ -5,8 +5,9 @@ import ca.uhn.fhir.i18n.Msg;
|
|||
import ca.uhn.fhir.interceptor.executor.InterceptorService;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.r4.TransactionProcessorVersionAdapterR4;
|
||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
|
@ -16,15 +17,11 @@ import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
|||
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher;
|
||||
import ca.uhn.fhir.jpa.searchparam.matcher.SearchParamMatcher;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.util.BundleBuilder;
|
||||
import org.hibernate.Session;
|
||||
import org.hibernate.internal.SessionImpl;
|
||||
import org.hl7.fhir.dstu3.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.MedicationKnowledge;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.hl7.fhir.r4.model.Meta;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
|
@ -73,7 +70,7 @@ public class TransactionProcessorTest {
|
|||
@MockBean
|
||||
private InMemoryResourceMatcher myInMemoryResourceMatcher;
|
||||
@MockBean
|
||||
private IdHelperService myIdHelperService;
|
||||
private IIdHelperService myIdHelperService;
|
||||
@MockBean
|
||||
private PartitionSettings myPartitionSettings;
|
||||
@MockBean
|
||||
|
@ -86,7 +83,8 @@ public class TransactionProcessorTest {
|
|||
private SearchParamMatcher mySearchParamMatcher;
|
||||
@MockBean(answer = Answers.RETURNS_DEEP_STUBS)
|
||||
private SessionImpl mySession;
|
||||
private FhirContext myFhirCtx = FhirContext.forR4Cached();
|
||||
@MockBean
|
||||
private IFhirSystemDao<Bundle, Meta> mySystemDao;
|
||||
|
||||
@BeforeEach
|
||||
public void before() {
|
||||
|
|
|
@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.dao.index;
|
|||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
|
@ -98,8 +99,7 @@ public class IdHelperServiceTest {
|
|||
Map<String, ResourcePersistentId> map = myHelperService.resolveResourcePersistentIds(
|
||||
partitionId,
|
||||
resourceType,
|
||||
patientIdsToResolve
|
||||
);
|
||||
patientIdsToResolve);
|
||||
|
||||
Assertions.assertFalse(map.isEmpty());
|
||||
for (String id : patientIdsToResolve) {
|
||||
|
|
|
@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.dao.index;
|
|||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.cache.ResourcePersistentIdMap;
|
||||
import ca.uhn.fhir.jpa.cache.ResourceVersionSvcDaoImpl;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
|
@ -37,27 +38,12 @@ import static org.mockito.Mockito.when;
|
|||
@ExtendWith(MockitoExtension.class)
|
||||
public class ResourceVersionSvcTest {
|
||||
|
||||
// helper class to package up data for helper methods
|
||||
private class ResourceIdPackage {
|
||||
public IIdType MyResourceId;
|
||||
public ResourcePersistentId MyPid;
|
||||
public Long MyVersion;
|
||||
|
||||
public ResourceIdPackage(IIdType id,
|
||||
ResourcePersistentId pid,
|
||||
Long version) {
|
||||
MyResourceId = id;
|
||||
MyPid = pid;
|
||||
MyVersion = version;
|
||||
}
|
||||
}
|
||||
|
||||
@Mock
|
||||
DaoRegistry myDaoRegistry;
|
||||
@Mock
|
||||
IResourceTableDao myResourceTableDao;
|
||||
@Mock
|
||||
IdHelperService myIdHelperService;
|
||||
IIdHelperService myIdHelperService;
|
||||
|
||||
// TODO KHS move the methods that use this out to a separate test class
|
||||
@InjectMocks
|
||||
|
@ -66,13 +52,14 @@ public class ResourceVersionSvcTest {
|
|||
/**
|
||||
* Gets a ResourceTable record for getResourceVersionsForPid
|
||||
* Order matters!
|
||||
*
|
||||
* @param resourceType
|
||||
* @param pid
|
||||
* @param version
|
||||
* @return
|
||||
*/
|
||||
private Object[] getResourceTableRecordForResourceTypeAndPid(String resourceType, long pid, long version) {
|
||||
return new Object[] {
|
||||
return new Object[]{
|
||||
pid, // long
|
||||
resourceType, // string
|
||||
version // long
|
||||
|
@ -96,6 +83,7 @@ public class ResourceVersionSvcTest {
|
|||
* Helper function to mock out getIdsOfExistingResources
|
||||
* to return the matches and resources matching those provided
|
||||
* by parameters.
|
||||
*
|
||||
* @param theResourcePacks
|
||||
*/
|
||||
private void mockReturnsFor_getIdsOfExistingResources(ResourceIdPackage... theResourcePacks) {
|
||||
|
@ -115,8 +103,7 @@ public class ResourceVersionSvcTest {
|
|||
ResourcePersistentId first = resourcePersistentIds.remove(0);
|
||||
if (resourcePersistentIds.isEmpty()) {
|
||||
when(myIdHelperService.resolveResourcePersistentIdsWithCache(any(), any())).thenReturn(Collections.singletonList(first));
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
when(myIdHelperService.resolveResourcePersistentIdsWithCache(any(), any())).thenReturn(resourcePersistentIds);
|
||||
}
|
||||
}
|
||||
|
@ -206,4 +193,19 @@ public class ResourceVersionSvcTest {
|
|||
assertEquals(1, outcome.getPartitionIds().get(0));
|
||||
}
|
||||
|
||||
// helper class to package up data for helper methods
|
||||
private class ResourceIdPackage {
|
||||
public IIdType MyResourceId;
|
||||
public ResourcePersistentId MyPid;
|
||||
public Long MyVersion;
|
||||
|
||||
public ResourceIdPackage(IIdType id,
|
||||
ResourcePersistentId pid,
|
||||
Long version) {
|
||||
MyResourceId = id;
|
||||
MyPid = pid;
|
||||
MyVersion = version;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -51,7 +51,7 @@ import ca.uhn.fhir.jpa.dao.data.ITermConceptPropertyDao;
|
|||
import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDesignationDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.index.IJpaIdHelperService;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
|
@ -491,7 +491,7 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
|
|||
@Autowired
|
||||
protected DaoRegistry myDaoRegistry;
|
||||
@Autowired
|
||||
protected IdHelperService myIdHelperService;
|
||||
protected IJpaIdHelperService myIdHelperService;
|
||||
@Autowired
|
||||
protected IBatchJobSubmitter myBatchJobSubmitter;
|
||||
@Autowired
|
||||
|
|
|
@ -11,6 +11,7 @@ import org.hl7.fhir.r4.model.Patient;
|
|||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -140,6 +141,7 @@ public class FhirResourceDaoR4MetaTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
|
||||
@Disabled // TODO JA: This test fails regularly, need to get a dedicated connection pool for tag creation
|
||||
@Test
|
||||
public void testConcurrentAddTag() throws ExecutionException, InterruptedException {
|
||||
|
||||
|
|
|
@ -944,8 +944,8 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
// 1 lookup for the match URL only
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(5, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(1, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(19, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(2, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
runInTransaction(() -> assertEquals(4, myResourceTableDao.count()));
|
||||
logAllResources();
|
||||
|
@ -956,8 +956,8 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
mySystemDao.transaction(mySrd, createTransactionWithCreatesAndOneMatchUrl());
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(4, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(1, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(16, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(2, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
runInTransaction(() -> assertEquals(7, myResourceTableDao.count()));
|
||||
|
||||
|
@ -967,8 +967,8 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
mySystemDao.transaction(mySrd, createTransactionWithCreatesAndOneMatchUrl());
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(4, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(1, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(16, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(2, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
runInTransaction(() -> assertEquals(10, myResourceTableDao.count()));
|
||||
|
||||
|
@ -1016,8 +1016,8 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
// 1 lookup for the match URL only
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(5, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(1, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(16, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(2, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
runInTransaction(() -> assertEquals(4, myResourceTableDao.count(), () -> myResourceTableDao.findAll().stream().map(t -> t.getIdDt().toUnqualifiedVersionless().getValue()).collect(Collectors.joining(","))));
|
||||
|
||||
|
@ -1027,8 +1027,8 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
mySystemDao.transaction(mySrd, createTransactionWithCreatesAndOneMatchUrl());
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(4, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(1, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(16, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(2, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
runInTransaction(() -> assertEquals(7, myResourceTableDao.count()));
|
||||
|
||||
|
@ -1038,8 +1038,8 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
mySystemDao.transaction(mySrd, createTransactionWithCreatesAndOneMatchUrl());
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(4, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(1, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(16, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(2, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
runInTransaction(() -> assertEquals(10, myResourceTableDao.count()));
|
||||
|
||||
|
@ -1071,7 +1071,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(0, myCaptureQueriesListener.countSelectQueries());
|
||||
myCaptureQueriesListener.logInsertQueries();
|
||||
assertEquals(3, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(8, myCaptureQueriesListener.countInsertQueries());
|
||||
myCaptureQueriesListener.logUpdateQueries();
|
||||
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
@ -1118,7 +1118,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
|
||||
myCaptureQueriesListener.logInsertQueries();
|
||||
assertEquals(6, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(21, myCaptureQueriesListener.countInsertQueries());
|
||||
myCaptureQueriesListener.logUpdateQueries();
|
||||
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
@ -1131,11 +1131,94 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
outcome = mySystemDao.transaction(mySrd, input.get());
|
||||
ourLog.info("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(10, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(5, myCaptureQueriesListener.countSelectQueries());
|
||||
myCaptureQueriesListener.logInsertQueries();
|
||||
assertEquals(1, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(2, myCaptureQueriesListener.countInsertQueries());
|
||||
myCaptureQueriesListener.logUpdateQueries();
|
||||
assertEquals(2, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(4, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
/*
|
||||
* Third time with mass ingestion mode enabled
|
||||
*/
|
||||
myDaoConfig.setMassIngestionMode(true);
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
outcome = mySystemDao.transaction(mySrd, input.get());
|
||||
ourLog.info("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(5, myCaptureQueriesListener.countSelectQueries());
|
||||
myCaptureQueriesListener.logInsertQueries();
|
||||
assertEquals(2, myCaptureQueriesListener.countInsertQueries());
|
||||
myCaptureQueriesListener.logUpdateQueries();
|
||||
assertEquals(4, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTransactionWithMultipleUpdates_ResourcesHaveTags() {
|
||||
|
||||
AtomicInteger counter = new AtomicInteger(0);
|
||||
Supplier<Bundle> input = () -> {
|
||||
BundleBuilder bb = new BundleBuilder(myFhirContext);
|
||||
|
||||
Patient pt = new Patient();
|
||||
pt.setId("Patient/A");
|
||||
pt.getMeta().addTag("http://foo", "bar", "baz");
|
||||
pt.addIdentifier().setSystem("http://foo").setValue("123");
|
||||
bb.addTransactionUpdateEntry(pt);
|
||||
|
||||
int i = counter.incrementAndGet();
|
||||
|
||||
Observation obsA = new Observation();
|
||||
obsA.getMeta().addTag("http://foo", "bar" + i, "baz"); // changes every time
|
||||
obsA.setId("Observation/A");
|
||||
obsA.getCode().addCoding().setSystem("http://foo").setCode("bar");
|
||||
obsA.setValue(new Quantity(null, 1, "http://unitsofmeasure.org", "kg", "kg"));
|
||||
obsA.setEffective(new DateTimeType(new Date()));
|
||||
obsA.addNote().setText("Foo " + i); // changes every time
|
||||
bb.addTransactionUpdateEntry(obsA);
|
||||
|
||||
Observation obsB = new Observation();
|
||||
obsB.getMeta().addTag("http://foo", "bar", "baz" + i); // changes every time
|
||||
obsB.setId("Observation/B");
|
||||
obsB.getCode().addCoding().setSystem("http://foo").setCode("bar");
|
||||
obsB.setValue(new Quantity(null, 1, "http://unitsofmeasure.org", "kg", "kg"));
|
||||
obsB.setEffective(new DateTimeType(new Date()));
|
||||
obsB.addNote().setText("Foo " + i); // changes every time
|
||||
bb.addTransactionUpdateEntry(obsB);
|
||||
|
||||
return (Bundle) bb.getBundle();
|
||||
};
|
||||
|
||||
ourLog.info("About to start transaction");
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
Bundle outcome = mySystemDao.transaction(mySrd, input.get());
|
||||
ourLog.info("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
// Search for IDs and Search for tag definition
|
||||
assertEquals(3, myCaptureQueriesListener.countSelectQueries());
|
||||
myCaptureQueriesListener.logInsertQueries();
|
||||
assertEquals(29, myCaptureQueriesListener.countInsertQueries());
|
||||
myCaptureQueriesListener.logUpdateQueries();
|
||||
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
/*
|
||||
* Run a second time
|
||||
*/
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
outcome = mySystemDao.transaction(mySrd, input.get());
|
||||
ourLog.info("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(9, myCaptureQueriesListener.countSelectQueries());
|
||||
myCaptureQueriesListener.logInsertQueries();
|
||||
assertEquals(7, myCaptureQueriesListener.countInsertQueries());
|
||||
myCaptureQueriesListener.logUpdateQueries();
|
||||
assertEquals(4, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
/*
|
||||
|
@ -1149,9 +1232,9 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(7, myCaptureQueriesListener.countSelectQueries());
|
||||
myCaptureQueriesListener.logInsertQueries();
|
||||
assertEquals(1, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(5, myCaptureQueriesListener.countInsertQueries());
|
||||
myCaptureQueriesListener.logUpdateQueries();
|
||||
assertEquals(2, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(4, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
}
|
||||
|
@ -1409,9 +1492,9 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
|
||||
myCaptureQueriesListener.logInsertQueries();
|
||||
assertEquals(6, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(40, myCaptureQueriesListener.countInsertQueries());
|
||||
myCaptureQueriesListener.logUpdateQueries();
|
||||
assertEquals(1, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(4, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
/*
|
||||
|
@ -1422,11 +1505,11 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
outcome = mySystemDao.transaction(mySrd, input.get());
|
||||
ourLog.info("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(11, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(8, myCaptureQueriesListener.countSelectQueries());
|
||||
myCaptureQueriesListener.logInsertQueries();
|
||||
assertEquals(1, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(4, myCaptureQueriesListener.countInsertQueries());
|
||||
myCaptureQueriesListener.logUpdateQueries();
|
||||
assertEquals(2, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(8, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
/*
|
||||
|
@ -1439,11 +1522,11 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
outcome = mySystemDao.transaction(mySrd, input.get());
|
||||
ourLog.info("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(6, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(7, myCaptureQueriesListener.countSelectQueries());
|
||||
myCaptureQueriesListener.logInsertQueries();
|
||||
assertEquals(1, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(4, myCaptureQueriesListener.countInsertQueries());
|
||||
myCaptureQueriesListener.logUpdateQueries();
|
||||
assertEquals(2, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(8, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
/*
|
||||
|
@ -1454,11 +1537,11 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
outcome = mySystemDao.transaction(mySrd, input.get());
|
||||
ourLog.info("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(5, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(6, myCaptureQueriesListener.countSelectQueries());
|
||||
myCaptureQueriesListener.logInsertQueries();
|
||||
assertEquals(1, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(4, myCaptureQueriesListener.countInsertQueries());
|
||||
myCaptureQueriesListener.logUpdateQueries();
|
||||
assertEquals(2, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(8, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
}
|
||||
|
||||
|
@ -1488,7 +1571,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.clear();
|
||||
mySystemDao.transaction(mySrd, bundleCreator.get());
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(5, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(8, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
runInTransaction(() -> {
|
||||
|
@ -1502,7 +1585,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
mySystemDao.transaction(mySrd, bundleCreator.get());
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(0, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(3, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(4, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
runInTransaction(() -> {
|
||||
|
@ -1515,7 +1598,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.clear();
|
||||
mySystemDao.transaction(mySrd, bundleCreator.get());
|
||||
assertEquals(0, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(3, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(4, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
runInTransaction(() -> {
|
||||
|
@ -1550,7 +1633,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
mySystemDao.transaction(mySrd, bundleCreator.get());
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(5, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(8, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
runInTransaction(() -> {
|
||||
|
@ -1564,7 +1647,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
mySystemDao.transaction(mySrd, bundleCreator.get());
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(3, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(4, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
// Make sure the match URL query uses a small limit
|
||||
|
@ -1582,7 +1665,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.clear();
|
||||
mySystemDao.transaction(mySrd, bundleCreator.get());
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(3, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(4, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
runInTransaction(() -> {
|
||||
|
@ -2164,7 +2247,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
|
||||
// Lookup the two existing IDs to make sure they are legit
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(3, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(3, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
@ -2223,7 +2306,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
|
||||
// Lookup the two existing IDs to make sure they are legit
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(4, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(3, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
@ -2465,7 +2548,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.clear();
|
||||
mySystemDao.transaction(new SystemRequestDetails(), supplier.get());
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(7, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(2, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(3, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
@ -2508,7 +2591,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.clear();
|
||||
mySystemDao.transaction(new SystemRequestDetails(), loadResourceFromClasspath(Bundle.class, "r4/transaction-perf-bundle-smallchanges.json"));
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(3, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
|
|
@ -1174,7 +1174,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(4, myCaptureQueriesListener.countSelectQueries());
|
||||
// Batches of 30 are written for each query - so 9 inserts total
|
||||
assertEquals(9, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(221, myCaptureQueriesListener.logInsertQueries());
|
||||
assertEquals(1, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
|
|
|
@ -760,6 +760,7 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest {
|
|||
DaoTestDataBuilder testDataBuilder = new DaoTestDataBuilder(myFhirCtx, myDaoRegistry, new SystemRequestDetails());
|
||||
return new TestDataBuilderFixture<>(testDataBuilder, myObservationDao);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -2743,9 +2743,9 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
|
||||
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("resourcein0_.HASH_SYS_AND_VALUE='-4132452001562191669' and (resourcein0_.PARTITION_ID in ('1'))"));
|
||||
myCaptureQueriesListener.logInsertQueries();
|
||||
assertEquals(6, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(40, myCaptureQueriesListener.countInsertQueries());
|
||||
myCaptureQueriesListener.logUpdateQueries();
|
||||
assertEquals(1, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(4, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
/*
|
||||
|
@ -2756,11 +2756,11 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
outcome = mySystemDao.transaction(mySrd, input.get());
|
||||
ourLog.info("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(11, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(8, myCaptureQueriesListener.countSelectQueries());
|
||||
myCaptureQueriesListener.logInsertQueries();
|
||||
assertEquals(1, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(4, myCaptureQueriesListener.countInsertQueries());
|
||||
myCaptureQueriesListener.logUpdateQueries();
|
||||
assertEquals(2, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(8, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
/*
|
||||
|
@ -2773,11 +2773,11 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
outcome = mySystemDao.transaction(mySrd, input.get());
|
||||
ourLog.info("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(6, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(7, myCaptureQueriesListener.countSelectQueries());
|
||||
myCaptureQueriesListener.logInsertQueries();
|
||||
assertEquals(1, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(4, myCaptureQueriesListener.countInsertQueries());
|
||||
myCaptureQueriesListener.logUpdateQueries();
|
||||
assertEquals(2, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(8, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
/*
|
||||
|
@ -2788,11 +2788,11 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
outcome = mySystemDao.transaction(mySrd, input.get());
|
||||
ourLog.info("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(5, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(6, myCaptureQueriesListener.countSelectQueries());
|
||||
myCaptureQueriesListener.logInsertQueries();
|
||||
assertEquals(1, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(4, myCaptureQueriesListener.countInsertQueries());
|
||||
myCaptureQueriesListener.logUpdateQueries();
|
||||
assertEquals(2, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(8, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
}
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@ import java.util.ArrayList;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
|
@ -24,14 +25,13 @@ public abstract class BaseTest {
|
|||
|
||||
private static final String DATABASE_NAME = "DATABASE";
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BaseTest.class);
|
||||
private static int ourDatabaseUrl = 0;
|
||||
private BasicDataSource myDataSource;
|
||||
private String myUrl;
|
||||
private FlywayMigrator myMigrator;
|
||||
private DriverTypeEnum.ConnectionProperties myConnectionProperties;
|
||||
|
||||
public static Stream<Supplier<TestDatabaseDetails>> data() {
|
||||
ourLog.info("H2: {}", org.h2.Driver.class.toString());
|
||||
ourLog.info("H2: {}", org.h2.Driver.class);
|
||||
|
||||
ArrayList<Supplier<TestDatabaseDetails>> retVal = new ArrayList<>();
|
||||
|
||||
|
@ -39,7 +39,7 @@ public abstract class BaseTest {
|
|||
retVal.add(new Supplier<TestDatabaseDetails>() {
|
||||
@Override
|
||||
public TestDatabaseDetails get() {
|
||||
String url = "jdbc:h2:mem:" + DATABASE_NAME + ourDatabaseUrl++;
|
||||
String url = "jdbc:h2:mem:" + DATABASE_NAME + UUID.randomUUID();
|
||||
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "SA", "SA");
|
||||
BasicDataSource dataSource = new BasicDataSource();
|
||||
dataSource.setUrl(url);
|
||||
|
@ -60,7 +60,7 @@ public abstract class BaseTest {
|
|||
retVal.add(new Supplier<TestDatabaseDetails>() {
|
||||
@Override
|
||||
public TestDatabaseDetails get() {
|
||||
String url = "jdbc:derby:memory:" + DATABASE_NAME + ourDatabaseUrl++ + ";create=true";
|
||||
String url = "jdbc:derby:memory:" + DATABASE_NAME + UUID.randomUUID() + ";create=true";
|
||||
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newConnectionProperties(url, "SA", "SA");
|
||||
BasicDataSource dataSource = new BasicDataSource();
|
||||
dataSource.setUrl(url);
|
||||
|
|
|
@ -589,19 +589,14 @@ public class NpmR4Test extends BaseJpaR4Test {
|
|||
|
||||
runInTransaction(() -> {
|
||||
NpmPackageMetadataJson metadata = myPackageCacheManager.loadPackageMetadata("hl7.fhir.uv.shorthand");
|
||||
try {
|
||||
ourLog.info(JsonUtil.serialize(metadata));
|
||||
ourLog.info(JsonUtil.serialize(metadata));
|
||||
|
||||
assertEquals("0.12.0", metadata.getDistTags().getLatest());
|
||||
assertEquals("0.12.0", metadata.getDistTags().getLatest());
|
||||
|
||||
assertThat(metadata.getVersions().keySet(), contains("0.12.0", "0.11.1"));
|
||||
assertThat(metadata.getVersions().keySet(), contains("0.12.0", "0.11.1"));
|
||||
|
||||
NpmPackageMetadataJson.Version version0120 = metadata.getVersions().get("0.12.0");
|
||||
assertEquals(3001, version0120.getBytes());
|
||||
|
||||
} catch (IOException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
NpmPackageMetadataJson.Version version0120 = metadata.getVersions().get("0.12.0");
|
||||
assertEquals(3001, version0120.getBytes());
|
||||
});
|
||||
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ package ca.uhn.fhir.jpa.provider.r4;
|
|||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
|
@ -30,7 +30,7 @@ public class HookInterceptorR4Test extends BaseResourceProviderR4Test {
|
|||
private static final Logger ourLog = LoggerFactory.getLogger(HookInterceptorR4Test.class);
|
||||
|
||||
@Autowired
|
||||
IdHelperService myIdHelperService;
|
||||
IIdHelperService myIdHelperService;
|
||||
|
||||
@Override
|
||||
@BeforeEach
|
||||
|
@ -111,7 +111,7 @@ public class HookInterceptorR4Test extends BaseResourceProviderR4Test {
|
|||
});
|
||||
IIdType savedPatientId = myClient.create().resource(new Patient()).execute().getId();
|
||||
|
||||
runInTransaction(()-> {
|
||||
runInTransaction(() -> {
|
||||
Long savedPatientPid = myIdHelperService.resolveResourcePersistentIdsWithCache(null, Collections.singletonList(savedPatientId)).get(0).getIdAsLong();
|
||||
assertEquals(savedPatientPid.longValue(), pid.get());
|
||||
});
|
||||
|
@ -127,7 +127,7 @@ public class HookInterceptorR4Test extends BaseResourceProviderR4Test {
|
|||
pid.set(resourcePid);
|
||||
});
|
||||
IIdType savedPatientId = myClient.create().resource(new Patient()).execute().getId();
|
||||
Long savedPatientPid = runInTransaction(()->myIdHelperService.resolveResourcePersistentIdsWithCache(null, Collections.singletonList(savedPatientId)).get(0).getIdAsLong());
|
||||
Long savedPatientPid = runInTransaction(() -> myIdHelperService.resolveResourcePersistentIdsWithCache(null, Collections.singletonList(savedPatientId)).get(0).getIdAsLong());
|
||||
|
||||
myClient.delete().resourceById(savedPatientId).execute();
|
||||
Parameters parameters = new Parameters();
|
||||
|
@ -163,7 +163,7 @@ public class HookInterceptorR4Test extends BaseResourceProviderR4Test {
|
|||
});
|
||||
patient.setActive(true);
|
||||
myClient.update().resource(patient).execute();
|
||||
runInTransaction(()-> {
|
||||
runInTransaction(() -> {
|
||||
Long savedPatientPid = myIdHelperService.resolveResourcePersistentIdsWithCache(null, Collections.singletonList(savedPatientId)).get(0).getIdAsLong();
|
||||
assertEquals(savedPatientPid.longValue(), pidOld.get());
|
||||
assertEquals(savedPatientPid.longValue(), pidNew.get());
|
||||
|
@ -244,5 +244,4 @@ public class HookInterceptorR4Test extends BaseResourceProviderR4Test {
|
|||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -162,6 +162,7 @@ import java.util.Set;
|
|||
import java.util.TreeSet;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.jpa.config.r4.FhirContextR4Config.DEFAULT_PRESERVE_VERSION_REFS;
|
||||
import static ca.uhn.fhir.jpa.util.TestUtil.sleepOneClick;
|
||||
import static ca.uhn.fhir.util.TestUtil.sleepAtLeast;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
@ -228,6 +229,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
|||
public void before() throws Exception {
|
||||
super.before();
|
||||
myFhirContext.setParserErrorHandler(new StrictErrorHandler());
|
||||
myFhirContext.getParserOptions().setDontStripVersionsFromReferencesAtPaths(DEFAULT_PRESERVE_VERSION_REFS);
|
||||
|
||||
myDaoConfig.setAllowMultipleDelete(true);
|
||||
myClient.registerInterceptor(myCapturingInterceptor);
|
||||
|
|
|
@ -7,6 +7,8 @@ import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
|
|||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.cache.IResourceChangeListener;
|
||||
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
|
||||
import ca.uhn.fhir.jpa.cache.ResourceChangeListenerCache;
|
||||
|
@ -19,7 +21,6 @@ import ca.uhn.fhir.jpa.dao.JpaResourceDao;
|
|||
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.DaoSearchParamSynchronizer;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.index.SearchParamWithInlineReferencesExtractor;
|
||||
import ca.uhn.fhir.jpa.dao.r4.FhirSystemDaoR4;
|
||||
import ca.uhn.fhir.jpa.dao.r4.TransactionProcessorVersionAdapterR4;
|
||||
|
@ -140,7 +141,7 @@ public class GiantTransactionPerfTest {
|
|||
private SearchParamPresenceSvcImpl mySearchParamPresenceSvc;
|
||||
private DaoSearchParamSynchronizer myDaoSearchParamSynchronizer;
|
||||
@Mock
|
||||
private IdHelperService myIdHelperService;
|
||||
private IIdHelperService myIdHelperService;
|
||||
|
||||
@AfterEach
|
||||
public void afterEach() {
|
||||
|
@ -185,6 +186,7 @@ public class GiantTransactionPerfTest {
|
|||
myTransactionProcessor.setPartitionSettingsForUnitTest(this.myPartitionSettings);
|
||||
myTransactionProcessor.setIdHelperServiceForUnitTest(myIdHelperService);
|
||||
myTransactionProcessor.setFhirContextForUnitTest(ourFhirContext);
|
||||
myTransactionProcessor.setApplicationContextForUnitTest(myAppCtx);
|
||||
myTransactionProcessor.start();
|
||||
|
||||
mySystemDao = new FhirSystemDaoR4();
|
||||
|
@ -194,6 +196,7 @@ public class GiantTransactionPerfTest {
|
|||
mySystemDao.start();
|
||||
|
||||
when(myAppCtx.getBean(eq(IInstanceValidatorModule.class))).thenReturn(myInstanceValidatorSvc);
|
||||
when(myAppCtx.getBean(eq(IFhirSystemDao.class))).thenReturn(mySystemDao);
|
||||
|
||||
myInMemoryResourceMatcher = new InMemoryResourceMatcher();
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
package ca.uhn.fhir.jpa.subscription;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.eclipse.jetty.websocket.api.Session;
|
||||
|
@ -17,7 +18,7 @@ public class SocketImplementation {
|
|||
private String myCriteria;
|
||||
protected String myError;
|
||||
protected boolean myGotBound;
|
||||
private List<String> myMessages = new ArrayList<String>();
|
||||
private List<String> myMessages = Collections.synchronizedList(new ArrayList<>());
|
||||
protected int myPingCount;
|
||||
protected String mySubsId;
|
||||
private Session session;
|
||||
|
@ -82,4 +83,4 @@ public class SocketImplementation {
|
|||
myError = "Unexpected message: " + theMsg;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,6 +32,7 @@ import java.util.List;
|
|||
import java.util.function.Consumer;
|
||||
|
||||
import static ca.uhn.fhir.jpa.subscription.resthook.RestHookTestDstu3Test.logAllInterceptors;
|
||||
import static org.awaitility.Awaitility.await;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
|
@ -102,12 +103,17 @@ public class EmailSubscriptionDstu3Test extends BaseResourceProviderDstu3Test {
|
|||
}
|
||||
}
|
||||
|
||||
int initialCount = mySubscriptionRegistry.getAll().size();
|
||||
|
||||
ourLog.info("About to create subscription...");
|
||||
MethodOutcome methodOutcome = ourClient.create().resource(subscription).execute();
|
||||
subscription.setId(methodOutcome.getId().getIdPart());
|
||||
mySubscriptionIds.add(methodOutcome.getId());
|
||||
|
||||
waitForQueueToDrain();
|
||||
|
||||
await().until(()-> mySubscriptionRegistry.getAll().size() == initialCount + 1);
|
||||
|
||||
return subscription;
|
||||
}
|
||||
|
||||
|
|
|
@ -304,6 +304,7 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
createSubscription(criteria2, payload);
|
||||
waitForActivatedSubscriptionCount(2);
|
||||
|
||||
ourLog.info("Sending an Observation");
|
||||
Observation obs = sendObservation(code, "SNOMED-CT");
|
||||
|
||||
// Should see 1 subscription notification
|
||||
|
@ -313,6 +314,7 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
assertEquals(Constants.CT_FHIR_JSON_NEW, ourRestfulServer.getRequestContentTypes().get(0));
|
||||
|
||||
// Send a meta-add
|
||||
ourLog.info("Sending a meta-add");
|
||||
obs.setId(obs.getIdElement().toUnqualifiedVersionless());
|
||||
myClient.meta().add().onResource(obs.getIdElement()).meta(new Meta().addTag("http://blah", "blah", null)).execute();
|
||||
|
||||
|
@ -509,8 +511,8 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
assertEquals(0, ourObservationProvider.getCountCreate());
|
||||
ourObservationProvider.waitForUpdateCount(2);
|
||||
|
||||
Observation observation1 = ourObservationProvider.getResourceUpdates().get(0);
|
||||
Observation observation2 = ourObservationProvider.getResourceUpdates().get(1);
|
||||
Observation observation1 = ourObservationProvider.getResourceUpdates().stream().filter(t->t.getIdElement().getVersionIdPart().equals("1")).findFirst().orElseThrow(()->new IllegalArgumentException());
|
||||
Observation observation2 = ourObservationProvider.getResourceUpdates().stream().filter(t->t.getIdElement().getVersionIdPart().equals("2")).findFirst().orElseThrow(()->new IllegalArgumentException());
|
||||
|
||||
assertEquals("1", observation1.getIdElement().getVersionIdPart());
|
||||
assertNull(observation1.getNoteFirstRep().getText());
|
||||
|
|
|
@ -46,6 +46,7 @@ import javax.servlet.http.HttpServletRequest;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.awaitility.Awaitility.await;
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
|
@ -66,10 +67,10 @@ public class SubscriptionTriggeringDstu3Test extends BaseResourceProviderDstu3Te
|
|||
private static String ourListenerServerBase;
|
||||
private static final List<Observation> ourCreatedObservations = Collections.synchronizedList(Lists.newArrayList());
|
||||
private static final List<Observation> ourUpdatedObservations = Collections.synchronizedList(Lists.newArrayList());
|
||||
private static final List<Patient> ourCreatedPatients = Lists.newArrayList();
|
||||
private static final List<Patient> ourUpdatedPatients = Lists.newArrayList();
|
||||
private static final List<String> ourContentTypes = new ArrayList<>();
|
||||
private final List<IIdType> mySubscriptionIds = new ArrayList<>();
|
||||
private static final List<Patient> ourCreatedPatients = Collections.synchronizedList(Lists.newArrayList());
|
||||
private static final List<Patient> ourUpdatedPatients = Collections.synchronizedList(Lists.newArrayList());
|
||||
private static final List<String> ourContentTypes = Collections.synchronizedList(Lists.newArrayList());
|
||||
private final List<IIdType> mySubscriptionIds = Collections.synchronizedList(Lists.newArrayList());
|
||||
|
||||
@Autowired
|
||||
private SubscriptionTestUtil mySubscriptionTestUtil;
|
||||
|
@ -261,7 +262,7 @@ public class SubscriptionTriggeringDstu3Test extends BaseResourceProviderDstu3Te
|
|||
waitForSize(0, ourCreatedObservations);
|
||||
waitForSize(0, ourCreatedPatients);
|
||||
waitForSize(50, ourUpdatedPatients);
|
||||
|
||||
ourLog.info("Updated patients: {}", ourUpdatedPatients.stream().map(t->t.getIdElement().toUnqualifiedVersionless().getValue()).collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -8,6 +8,10 @@
|
|||
</encoder>
|
||||
</appender>
|
||||
|
||||
<!--<logger name="ca.uhn.fhir.jpa.subscription.match.matcher.subscriber.SubscriptionMatchingSubscriber" additivity="false" level="trace">
|
||||
<appender-ref ref="STDOUT" />
|
||||
</logger>-->
|
||||
|
||||
<logger name="org.springframework.web.socket.handler.ExceptionWebSocketHandlerDecorator" additivity="false" level="info">
|
||||
<appender-ref ref="STDOUT" />
|
||||
</logger>
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue