Integrate Bulk Export (#1487)
* Start working on subscription processor * Work on new scheduler * Test fixes * Scheduler refactoring * Fix test failure * One more test fix * Updates to scheduler * More scheduler work * Tests now all passing * Ongoing work on export * Ongoing scheduler work * Ongoing testing * Work on export task * Sync master * Ongoing work * Bump xml patch version * Work on provider * Work on bulk * Work on export scheduler * More test fies * More test fixes * Compile fix * Reduce logging * Improve logging * Reuse bulk export jobs * Export provider * Improve logging in bulk export * Work on bulk export service * One more bugfix * Ongoing work on Bulk Data * Add changelog
This commit is contained in:
parent
882e0853df
commit
4a751cbfc5
|
@ -8,6 +8,7 @@ import java.io.IOException;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.rest.api.PreferHeader;
|
||||||
import org.hl7.fhir.dstu3.model.*;
|
import org.hl7.fhir.dstu3.model.*;
|
||||||
import org.junit.*;
|
import org.junit.*;
|
||||||
import org.mockito.ArgumentCaptor;
|
import org.mockito.ArgumentCaptor;
|
||||||
|
@ -20,7 +21,6 @@ import ca.uhn.fhir.context.FhirVersionEnum;
|
||||||
import ca.uhn.fhir.parser.IParser;
|
import ca.uhn.fhir.parser.IParser;
|
||||||
import ca.uhn.fhir.rest.api.Constants;
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||||
import ca.uhn.fhir.rest.api.PreferReturnEnum;
|
|
||||||
import ca.uhn.fhir.rest.client.api.IGenericClient;
|
import ca.uhn.fhir.rest.client.api.IGenericClient;
|
||||||
import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum;
|
import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum;
|
||||||
import ca.uhn.fhir.rest.client.exceptions.FhirClientConnectionException;
|
import ca.uhn.fhir.rest.client.exceptions.FhirClientConnectionException;
|
||||||
|
@ -246,7 +246,7 @@ public class GenericClientDstu3IT {
|
||||||
Patient pt = new Patient();
|
Patient pt = new Patient();
|
||||||
pt.getText().setDivAsString("A PATIENT");
|
pt.getText().setDivAsString("A PATIENT");
|
||||||
|
|
||||||
MethodOutcome outcome = client.create().resource(pt).prefer(PreferReturnEnum.REPRESENTATION).execute();
|
MethodOutcome outcome = client.create().resource(pt).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute();
|
||||||
|
|
||||||
assertNull(outcome.getOperationOutcome());
|
assertNull(outcome.getOperationOutcome());
|
||||||
assertNotNull(outcome.getResource());
|
assertNotNull(outcome.getResource());
|
||||||
|
|
|
@ -220,17 +220,31 @@ public class Constants {
|
||||||
public static final String CASCADE_DELETE = "delete";
|
public static final String CASCADE_DELETE = "delete";
|
||||||
public static final int MAX_RESOURCE_NAME_LENGTH = 100;
|
public static final int MAX_RESOURCE_NAME_LENGTH = 100;
|
||||||
public static final String CACHE_CONTROL_PRIVATE = "private";
|
public static final String CACHE_CONTROL_PRIVATE = "private";
|
||||||
|
public static final String CT_FHIR_NDJSON = "application/fhir+ndjson";
|
||||||
|
public static final String CT_APP_NDJSON = "application/ndjson";
|
||||||
|
public static final String CT_NDJSON = "ndjson";
|
||||||
|
public static final Set<String> CTS_NDJSON;
|
||||||
|
public static final String HEADER_PREFER_RESPOND_ASYNC = "respond-async";
|
||||||
public static final int STATUS_HTTP_412_PAYLOAD_TOO_LARGE = 413;
|
public static final int STATUS_HTTP_412_PAYLOAD_TOO_LARGE = 413;
|
||||||
public static final String OPERATION_NAME_GRAPHQL = "$graphql";
|
public static final String OPERATION_NAME_GRAPHQL = "$graphql";
|
||||||
/**
|
/**
|
||||||
* Note that this constant is used in a number of places including DB column lengths! Be careful if you decide to change it.
|
* Note that this constant is used in a number of places including DB column lengths! Be careful if you decide to change it.
|
||||||
*/
|
*/
|
||||||
public static final int REQUEST_ID_LENGTH = 16;
|
public static final int REQUEST_ID_LENGTH = 16;
|
||||||
|
public static final int STATUS_HTTP_202_ACCEPTED = 202;
|
||||||
|
public static final String HEADER_X_PROGRESS = "X-Progress";
|
||||||
|
public static final String HEADER_RETRY_AFTER = "Retry-After";
|
||||||
|
|
||||||
static {
|
static {
|
||||||
CHARSET_UTF8 = StandardCharsets.UTF_8;
|
CHARSET_UTF8 = StandardCharsets.UTF_8;
|
||||||
CHARSET_US_ASCII = StandardCharsets.ISO_8859_1;
|
CHARSET_US_ASCII = StandardCharsets.ISO_8859_1;
|
||||||
|
|
||||||
|
HashSet<String> ctsNdjson = new HashSet<>();
|
||||||
|
ctsNdjson.add(CT_FHIR_NDJSON);
|
||||||
|
ctsNdjson.add(CT_APP_NDJSON);
|
||||||
|
ctsNdjson.add(CT_NDJSON);
|
||||||
|
CTS_NDJSON = Collections.unmodifiableSet(ctsNdjson);
|
||||||
|
|
||||||
HashMap<Integer, String> statusNames = new HashMap<>();
|
HashMap<Integer, String> statusNames = new HashMap<>();
|
||||||
statusNames.put(200, "OK");
|
statusNames.put(200, "OK");
|
||||||
statusNames.put(201, "Created");
|
statusNames.put(201, "Created");
|
||||||
|
|
|
@ -0,0 +1,61 @@
|
||||||
|
package ca.uhn.fhir.rest.api;
|
||||||
|
|
||||||
|
import javax.annotation.Nullable;
|
||||||
|
import java.util.HashMap;
|
||||||
|
|
||||||
|
public class PreferHeader {
|
||||||
|
|
||||||
|
private PreferReturnEnum myReturn;
|
||||||
|
private boolean myRespondAsync;
|
||||||
|
|
||||||
|
public @Nullable
|
||||||
|
PreferReturnEnum getReturn() {
|
||||||
|
return myReturn;
|
||||||
|
}
|
||||||
|
|
||||||
|
public PreferHeader setReturn(PreferReturnEnum theReturn) {
|
||||||
|
myReturn = theReturn;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean getRespondAsync() {
|
||||||
|
return myRespondAsync;
|
||||||
|
}
|
||||||
|
|
||||||
|
public PreferHeader setRespondAsync(boolean theRespondAsync) {
|
||||||
|
myRespondAsync = theRespondAsync;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represents values for "return" value as provided in the the <a href="https://tools.ietf.org/html/rfc7240#section-4.2">HTTP Prefer header</a>.
|
||||||
|
*/
|
||||||
|
public enum PreferReturnEnum {
|
||||||
|
|
||||||
|
REPRESENTATION("representation"), MINIMAL("minimal"), OPERATION_OUTCOME("OperationOutcome");
|
||||||
|
|
||||||
|
private static HashMap<String, PreferReturnEnum> ourValues;
|
||||||
|
private String myHeaderValue;
|
||||||
|
|
||||||
|
PreferReturnEnum(String theHeaderValue) {
|
||||||
|
myHeaderValue = theHeaderValue;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getHeaderValue() {
|
||||||
|
return myHeaderValue;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static PreferReturnEnum fromHeaderValue(String theHeaderValue) {
|
||||||
|
if (ourValues == null) {
|
||||||
|
HashMap<String, PreferReturnEnum> values = new HashMap<>();
|
||||||
|
for (PreferReturnEnum next : PreferReturnEnum.values()) {
|
||||||
|
values.put(next.getHeaderValue(), next);
|
||||||
|
}
|
||||||
|
ourValues = values;
|
||||||
|
}
|
||||||
|
return ourValues.get(theHeaderValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -1,54 +0,0 @@
|
||||||
package ca.uhn.fhir.rest.api;
|
|
||||||
|
|
||||||
/*
|
|
||||||
* #%L
|
|
||||||
* HAPI FHIR - Core Library
|
|
||||||
* %%
|
|
||||||
* Copyright (C) 2014 - 2019 University Health Network
|
|
||||||
* %%
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
* #L%
|
|
||||||
*/
|
|
||||||
|
|
||||||
import java.util.HashMap;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Represents values for "return" value as provided in the the <a href="https://tools.ietf.org/html/rfc7240#section-4.2">HTTP Prefer header</a>.
|
|
||||||
*/
|
|
||||||
public enum PreferReturnEnum {
|
|
||||||
|
|
||||||
REPRESENTATION("representation"), MINIMAL("minimal"), OPERATION_OUTCOME("OperationOutcome");
|
|
||||||
|
|
||||||
private String myHeaderValue;
|
|
||||||
private static HashMap<String, PreferReturnEnum> ourValues;
|
|
||||||
|
|
||||||
private PreferReturnEnum(String theHeaderValue) {
|
|
||||||
myHeaderValue = theHeaderValue;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static PreferReturnEnum fromHeaderValue(String theHeaderValue) {
|
|
||||||
if (ourValues == null) {
|
|
||||||
HashMap<String, PreferReturnEnum> values = new HashMap<String, PreferReturnEnum>();
|
|
||||||
for (PreferReturnEnum next : PreferReturnEnum.values()) {
|
|
||||||
values.put(next.getHeaderValue(), next);
|
|
||||||
}
|
|
||||||
ourValues = values;
|
|
||||||
}
|
|
||||||
return ourValues.get(theHeaderValue);
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getHeaderValue() {
|
|
||||||
return myHeaderValue;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.rest.gclient;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||||
import ca.uhn.fhir.rest.api.PreferReturnEnum;
|
import ca.uhn.fhir.rest.api.PreferHeader;
|
||||||
|
|
||||||
public interface ICreateTyped extends IClientExecutable<ICreateTyped, MethodOutcome> {
|
public interface ICreateTyped extends IClientExecutable<ICreateTyped, MethodOutcome> {
|
||||||
|
|
||||||
|
@ -47,6 +47,6 @@ public interface ICreateTyped extends IClientExecutable<ICreateTyped, MethodOutc
|
||||||
*
|
*
|
||||||
* @since HAPI 1.1
|
* @since HAPI 1.1
|
||||||
*/
|
*/
|
||||||
ICreateTyped prefer(PreferReturnEnum theReturn);
|
ICreateTyped prefer(PreferHeader.PreferReturnEnum theReturn);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.rest.gclient;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||||
import ca.uhn.fhir.rest.api.PreferReturnEnum;
|
import ca.uhn.fhir.rest.api.PreferHeader;
|
||||||
|
|
||||||
public interface IPatchExecutable extends IClientExecutable<IPatchExecutable, MethodOutcome>{
|
public interface IPatchExecutable extends IClientExecutable<IPatchExecutable, MethodOutcome>{
|
||||||
|
|
||||||
|
@ -32,6 +32,6 @@ public interface IPatchExecutable extends IClientExecutable<IPatchExecutable, Me
|
||||||
*
|
*
|
||||||
* @since HAPI 1.1
|
* @since HAPI 1.1
|
||||||
*/
|
*/
|
||||||
IPatchExecutable prefer(PreferReturnEnum theReturn);
|
IPatchExecutable prefer(PreferHeader.PreferReturnEnum theReturn);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.rest.gclient;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||||
import ca.uhn.fhir.rest.api.PreferReturnEnum;
|
import ca.uhn.fhir.rest.api.PreferHeader;
|
||||||
|
|
||||||
public interface IUpdateExecutable extends IClientExecutable<IUpdateExecutable, MethodOutcome>{
|
public interface IUpdateExecutable extends IClientExecutable<IUpdateExecutable, MethodOutcome>{
|
||||||
|
|
||||||
|
@ -32,6 +32,6 @@ public interface IUpdateExecutable extends IClientExecutable<IUpdateExecutable,
|
||||||
*
|
*
|
||||||
* @since HAPI 1.1
|
* @since HAPI 1.1
|
||||||
*/
|
*/
|
||||||
IUpdateExecutable prefer(PreferReturnEnum theReturn);
|
IUpdateExecutable prefer(PreferHeader.PreferReturnEnum theReturn);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
package ca.uhn.fhir.util;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import static org.apache.commons.lang3.StringUtils.*;
|
||||||
|
|
||||||
|
public class ArrayUtil {
|
||||||
|
|
||||||
|
/** Non instantiable */
|
||||||
|
private ArrayUtil() {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Takes in a list like "foo, bar,, baz" and returns a set containing only ["foo", "bar", "baz"]
|
||||||
|
*/
|
||||||
|
public static Set<String> commaSeparatedListToCleanSet(String theValueAsString) {
|
||||||
|
Set<String> resourceTypes;
|
||||||
|
resourceTypes = Arrays.stream(split(theValueAsString, ","))
|
||||||
|
.map(t->trim(t))
|
||||||
|
.filter(t->isNotBlank(t))
|
||||||
|
.collect(Collectors.toSet());
|
||||||
|
return resourceTypes;
|
||||||
|
}
|
||||||
|
}
|
|
@ -63,6 +63,9 @@ public class StopWatch {
|
||||||
myStarted = theStart.getTime();
|
myStarted = theStart.getTime();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public StopWatch(long theL) {
|
||||||
|
}
|
||||||
|
|
||||||
private void addNewlineIfContentExists(StringBuilder theB) {
|
private void addNewlineIfContentExists(StringBuilder theB) {
|
||||||
if (theB.length() > 0) {
|
if (theB.length() > 0) {
|
||||||
theB.append("\n");
|
theB.append("\n");
|
||||||
|
@ -231,7 +234,12 @@ public class StopWatch {
|
||||||
|
|
||||||
double denominator = ((double) millisElapsed) / ((double) periodMillis);
|
double denominator = ((double) millisElapsed) / ((double) periodMillis);
|
||||||
|
|
||||||
return (double) theNumOperations / denominator;
|
double throughput = (double) theNumOperations / denominator;
|
||||||
|
if (throughput > theNumOperations) {
|
||||||
|
throughput = theNumOperations;
|
||||||
|
}
|
||||||
|
|
||||||
|
return throughput;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void restart() {
|
public void restart() {
|
||||||
|
|
|
@ -5,9 +5,9 @@
|
||||||
if you are using this file as a basis for your own project. -->
|
if you are using this file as a basis for your own project. -->
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-fhir-cli</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>4.1.0-SNAPSHOT</version>
|
<version>4.1.0-SNAPSHOT</version>
|
||||||
<relativePath>../pom.xml</relativePath>
|
<relativePath>../../hapi-deployable-pom</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<artifactId>hapi-fhir-cli-jpaserver</artifactId>
|
<artifactId>hapi-fhir-cli-jpaserver</artifactId>
|
||||||
|
@ -131,6 +131,10 @@
|
||||||
<artifactId>Saxon-HE</artifactId>
|
<artifactId>Saxon-HE</artifactId>
|
||||||
<groupId>net.sf.saxon</groupId>
|
<groupId>net.sf.saxon</groupId>
|
||||||
</exclusion>
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.glassfish.jaxb</groupId>
|
||||||
|
<artifactId>jaxb-core</artifactId>
|
||||||
|
</exclusion>
|
||||||
</exclusions>
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
@ -183,15 +187,6 @@
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
|
||||||
<!-- This plugin is just a part of the HAPI internal build process, you do not need to incude it in your own projects -->
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-deploy-plugin</artifactId>
|
|
||||||
<configuration>
|
|
||||||
<skip>true</skip>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
|
||||||
|
|
||||||
<!-- This is to run the integration tests -->
|
<!-- This is to run the integration tests -->
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
|
|
@ -1,5 +1,25 @@
|
||||||
package ca.uhn.fhir.jpa.demo;
|
package ca.uhn.fhir.jpa.demo;
|
||||||
|
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR - Command Line Client - Server WAR
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2019 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||||
import ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory;
|
import ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory;
|
||||||
|
|
|
@ -1,5 +1,25 @@
|
||||||
package ca.uhn.fhir.jpa.demo;
|
package ca.uhn.fhir.jpa.demo;
|
||||||
|
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR - Command Line Client - Server WAR
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2019 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
import org.apache.commons.cli.ParseException;
|
import org.apache.commons.cli.ParseException;
|
||||||
|
|
|
@ -1,5 +1,25 @@
|
||||||
package ca.uhn.fhir.jpa.demo;
|
package ca.uhn.fhir.jpa.demo;
|
||||||
|
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR - Command Line Client - Server WAR
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2019 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.config.BaseJavaConfigDstu2;
|
import ca.uhn.fhir.jpa.config.BaseJavaConfigDstu2;
|
||||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
import ca.uhn.fhir.jpa.util.SubscriptionsRequireManualActivationInterceptorDstu2;
|
import ca.uhn.fhir.jpa.util.SubscriptionsRequireManualActivationInterceptorDstu2;
|
||||||
|
|
|
@ -1,5 +1,25 @@
|
||||||
package ca.uhn.fhir.jpa.demo;
|
package ca.uhn.fhir.jpa.demo;
|
||||||
|
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR - Command Line Client - Server WAR
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2019 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.config.BaseJavaConfigDstu3;
|
import ca.uhn.fhir.jpa.config.BaseJavaConfigDstu3;
|
||||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||||
|
|
|
@ -1,5 +1,25 @@
|
||||||
package ca.uhn.fhir.jpa.demo;
|
package ca.uhn.fhir.jpa.demo;
|
||||||
|
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR - Command Line Client - Server WAR
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2019 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.config.BaseJavaConfigR4;
|
import ca.uhn.fhir.jpa.config.BaseJavaConfigR4;
|
||||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
import ca.uhn.fhir.jpa.util.SubscriptionsRequireManualActivationInterceptorR4;
|
import ca.uhn.fhir.jpa.util.SubscriptionsRequireManualActivationInterceptorR4;
|
||||||
|
|
|
@ -1,5 +1,25 @@
|
||||||
package ca.uhn.fhir.jpa.demo;
|
package ca.uhn.fhir.jpa.demo;
|
||||||
|
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR - Command Line Client - Server WAR
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2019 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
import org.springframework.context.annotation.Import;
|
import org.springframework.context.annotation.Import;
|
||||||
|
|
|
@ -1,5 +1,25 @@
|
||||||
package ca.uhn.fhir.jpa.demo;
|
package ca.uhn.fhir.jpa.demo;
|
||||||
|
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR - Command Line Client - Server WAR
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2019 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||||
|
|
|
@ -364,11 +364,11 @@ public class GenericOkHttpClientDstu2Test {
|
||||||
Patient p = new Patient();
|
Patient p = new Patient();
|
||||||
p.addName().addFamily("FOOFAMILY");
|
p.addName().addFamily("FOOFAMILY");
|
||||||
|
|
||||||
client.create().resource(p).prefer(PreferReturnEnum.MINIMAL).execute();
|
client.create().resource(p).prefer(PreferHeader.PreferReturnEnum.MINIMAL).execute();
|
||||||
assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size());
|
assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size());
|
||||||
assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_MINIMAL, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue());
|
assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_MINIMAL, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue());
|
||||||
|
|
||||||
client.create().resource(p).prefer(PreferReturnEnum.REPRESENTATION).execute();
|
client.create().resource(p).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute();
|
||||||
assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size());
|
assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size());
|
||||||
assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_REPRESENTATION, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue());
|
assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_REPRESENTATION, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue());
|
||||||
}
|
}
|
||||||
|
@ -1735,11 +1735,11 @@ public class GenericOkHttpClientDstu2Test {
|
||||||
p.setId(new IdDt("1"));
|
p.setId(new IdDt("1"));
|
||||||
p.addName().addFamily("FOOFAMILY");
|
p.addName().addFamily("FOOFAMILY");
|
||||||
|
|
||||||
client.update().resource(p).prefer(PreferReturnEnum.MINIMAL).execute();
|
client.update().resource(p).prefer(PreferHeader.PreferReturnEnum.MINIMAL).execute();
|
||||||
assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size());
|
assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size());
|
||||||
assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_MINIMAL, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue());
|
assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_MINIMAL, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue());
|
||||||
|
|
||||||
client.update().resource(p).prefer(PreferReturnEnum.REPRESENTATION).execute();
|
client.update().resource(p).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute();
|
||||||
assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size());
|
assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size());
|
||||||
assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_REPRESENTATION, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue());
|
assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_REPRESENTATION, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue());
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,20 @@
|
||||||
|
package ca.uhn.fhir.rest.client.apache;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
|
import org.apache.http.entity.ContentType;
|
||||||
|
import org.apache.http.entity.StringEntity;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
|
||||||
|
import java.nio.charset.UnsupportedCharsetException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apache HttpClient request content entity where the body is a FHIR resource, that will
|
||||||
|
* be encoded as JSON by default
|
||||||
|
*/
|
||||||
|
public class ResourceEntity extends StringEntity {
|
||||||
|
|
||||||
|
public ResourceEntity(FhirContext theContext, IBaseResource theResource) throws UnsupportedCharsetException {
|
||||||
|
super(theContext.newJsonParser().encodeResourceToString(theResource), ContentType.parse(Constants.CT_FHIR_JSON_NEW));
|
||||||
|
}
|
||||||
|
}
|
|
@ -533,7 +533,7 @@ public class GenericClient extends BaseClient implements IGenericClient {
|
||||||
private class CreateInternal extends BaseSearch<ICreateTyped, ICreateWithQueryTyped, MethodOutcome> implements ICreate, ICreateTyped, ICreateWithQuery, ICreateWithQueryTyped {
|
private class CreateInternal extends BaseSearch<ICreateTyped, ICreateWithQueryTyped, MethodOutcome> implements ICreate, ICreateTyped, ICreateWithQuery, ICreateWithQueryTyped {
|
||||||
|
|
||||||
private boolean myConditional;
|
private boolean myConditional;
|
||||||
private PreferReturnEnum myPrefer;
|
private PreferHeader.PreferReturnEnum myPrefer;
|
||||||
private IBaseResource myResource;
|
private IBaseResource myResource;
|
||||||
private String myResourceBody;
|
private String myResourceBody;
|
||||||
private String mySearchUrl;
|
private String mySearchUrl;
|
||||||
|
@ -580,7 +580,7 @@ public class GenericClient extends BaseClient implements IGenericClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ICreateTyped prefer(PreferReturnEnum theReturn) {
|
public ICreateTyped prefer(PreferHeader.PreferReturnEnum theReturn) {
|
||||||
myPrefer = theReturn;
|
myPrefer = theReturn;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
@ -1380,13 +1380,13 @@ public class GenericClient extends BaseClient implements IGenericClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
private final class OutcomeResponseHandler implements IClientResponseHandler<MethodOutcome> {
|
private final class OutcomeResponseHandler implements IClientResponseHandler<MethodOutcome> {
|
||||||
private PreferReturnEnum myPrefer;
|
private PreferHeader.PreferReturnEnum myPrefer;
|
||||||
|
|
||||||
private OutcomeResponseHandler() {
|
private OutcomeResponseHandler() {
|
||||||
super();
|
super();
|
||||||
}
|
}
|
||||||
|
|
||||||
private OutcomeResponseHandler(PreferReturnEnum thePrefer) {
|
private OutcomeResponseHandler(PreferHeader.PreferReturnEnum thePrefer) {
|
||||||
this();
|
this();
|
||||||
myPrefer = thePrefer;
|
myPrefer = thePrefer;
|
||||||
}
|
}
|
||||||
|
@ -1396,7 +1396,7 @@ public class GenericClient extends BaseClient implements IGenericClient {
|
||||||
MethodOutcome response = MethodUtil.process2xxResponse(myContext, theResponseStatusCode, theResponseMimeType, theResponseInputStream, theHeaders);
|
MethodOutcome response = MethodUtil.process2xxResponse(myContext, theResponseStatusCode, theResponseMimeType, theResponseInputStream, theHeaders);
|
||||||
response.setCreatedUsingStatusCode(theResponseStatusCode);
|
response.setCreatedUsingStatusCode(theResponseStatusCode);
|
||||||
|
|
||||||
if (myPrefer == PreferReturnEnum.REPRESENTATION) {
|
if (myPrefer == PreferHeader.PreferReturnEnum.REPRESENTATION) {
|
||||||
if (response.getResource() == null) {
|
if (response.getResource() == null) {
|
||||||
if (response.getId() != null && isNotBlank(response.getId().getValue()) && response.getId().hasBaseUrl()) {
|
if (response.getId() != null && isNotBlank(response.getId().getValue()) && response.getId().hasBaseUrl()) {
|
||||||
ourLog.info("Server did not return resource for Prefer-representation, going to fetch: {}", response.getId().getValue());
|
ourLog.info("Server did not return resource for Prefer-representation, going to fetch: {}", response.getId().getValue());
|
||||||
|
@ -1418,7 +1418,7 @@ public class GenericClient extends BaseClient implements IGenericClient {
|
||||||
private IIdType myId;
|
private IIdType myId;
|
||||||
private String myPatchBody;
|
private String myPatchBody;
|
||||||
private PatchTypeEnum myPatchType;
|
private PatchTypeEnum myPatchType;
|
||||||
private PreferReturnEnum myPrefer;
|
private PreferHeader.PreferReturnEnum myPrefer;
|
||||||
private String myResourceType;
|
private String myResourceType;
|
||||||
private String mySearchUrl;
|
private String mySearchUrl;
|
||||||
|
|
||||||
|
@ -1476,7 +1476,7 @@ public class GenericClient extends BaseClient implements IGenericClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public IPatchExecutable prefer(PreferReturnEnum theReturn) {
|
public IPatchExecutable prefer(PreferHeader.PreferReturnEnum theReturn) {
|
||||||
myPrefer = theReturn;
|
myPrefer = theReturn;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
@ -2048,7 +2048,7 @@ public class GenericClient extends BaseClient implements IGenericClient {
|
||||||
|
|
||||||
private boolean myConditional;
|
private boolean myConditional;
|
||||||
private IIdType myId;
|
private IIdType myId;
|
||||||
private PreferReturnEnum myPrefer;
|
private PreferHeader.PreferReturnEnum myPrefer;
|
||||||
private IBaseResource myResource;
|
private IBaseResource myResource;
|
||||||
private String myResourceBody;
|
private String myResourceBody;
|
||||||
private String mySearchUrl;
|
private String mySearchUrl;
|
||||||
|
@ -2102,7 +2102,7 @@ public class GenericClient extends BaseClient implements IGenericClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public IUpdateExecutable prefer(PreferReturnEnum theReturn) {
|
public IUpdateExecutable prefer(PreferHeader.PreferReturnEnum theReturn) {
|
||||||
myPrefer = theReturn;
|
myPrefer = theReturn;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
@ -2282,7 +2282,7 @@ public class GenericClient extends BaseClient implements IGenericClient {
|
||||||
params.get(parameterName).add(parameterValue);
|
params.get(parameterName).add(parameterValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void addPreferHeader(PreferReturnEnum thePrefer, BaseHttpClientInvocation theInvocation) {
|
private static void addPreferHeader(PreferHeader.PreferReturnEnum thePrefer, BaseHttpClientInvocation theInvocation) {
|
||||||
if (thePrefer != null) {
|
if (thePrefer != null) {
|
||||||
theInvocation.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RETURN + '=' + thePrefer.getHeaderValue());
|
theInvocation.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RETURN + '=' + thePrefer.getHeaderValue());
|
||||||
}
|
}
|
||||||
|
|
|
@ -112,8 +112,8 @@ public abstract class AbstractJaxRsPageProvider extends AbstractJaxRsProvider im
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public PreferReturnEnum getDefaultPreferReturn() {
|
public PreferHeader.PreferReturnEnum getDefaultPreferReturn() {
|
||||||
return PreferReturnEnum.REPRESENTATION;
|
return PreferHeader.PreferReturnEnum.REPRESENTATION;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,7 +28,6 @@ import javax.ws.rs.*;
|
||||||
import javax.ws.rs.core.MediaType;
|
import javax.ws.rs.core.MediaType;
|
||||||
import javax.ws.rs.core.Response;
|
import javax.ws.rs.core.Response;
|
||||||
|
|
||||||
import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
@ -371,8 +370,8 @@ implements IRestfulServer<JaxRsRequest>, IResourceProvider {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public PreferReturnEnum getDefaultPreferReturn() {
|
public PreferHeader.PreferReturnEnum getDefaultPreferReturn() {
|
||||||
return PreferReturnEnum.REPRESENTATION;
|
return PreferHeader.PreferReturnEnum.REPRESENTATION;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -301,12 +301,12 @@ public class GenericJaxRsClientDstu2Test {
|
||||||
Patient p = new Patient();
|
Patient p = new Patient();
|
||||||
p.addName().addFamily("FOOFAMILY");
|
p.addName().addFamily("FOOFAMILY");
|
||||||
|
|
||||||
client.create().resource(p).prefer(PreferReturnEnum.MINIMAL).execute();
|
client.create().resource(p).prefer(PreferHeader.PreferReturnEnum.MINIMAL).execute();
|
||||||
assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size());
|
assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size());
|
||||||
assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_MINIMAL, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue());
|
assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_MINIMAL, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue());
|
||||||
|
|
||||||
|
|
||||||
client.create().resource(p).prefer(PreferReturnEnum.REPRESENTATION).execute();
|
client.create().resource(p).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute();
|
||||||
assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size());
|
assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size());
|
||||||
assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_REPRESENTATION, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue());
|
assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_REPRESENTATION, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue());
|
||||||
|
|
||||||
|
@ -1927,12 +1927,12 @@ public class GenericJaxRsClientDstu2Test {
|
||||||
p.setId(new IdDt("1"));
|
p.setId(new IdDt("1"));
|
||||||
p.addName().addFamily("FOOFAMILY");
|
p.addName().addFamily("FOOFAMILY");
|
||||||
|
|
||||||
client.update().resource(p).prefer(PreferReturnEnum.MINIMAL).execute();
|
client.update().resource(p).prefer(PreferHeader.PreferReturnEnum.MINIMAL).execute();
|
||||||
assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size());
|
assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size());
|
||||||
assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_MINIMAL, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue());
|
assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_MINIMAL, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue());
|
||||||
|
|
||||||
|
|
||||||
client.update().resource(p).prefer(PreferReturnEnum.REPRESENTATION).execute();
|
client.update().resource(p).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute();
|
||||||
assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size());
|
assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size());
|
||||||
assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_REPRESENTATION, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue());
|
assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_REPRESENTATION, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue());
|
||||||
|
|
||||||
|
|
|
@ -321,12 +321,12 @@ public class GenericJaxRsClientDstu3Test {
|
||||||
Patient p = new Patient();
|
Patient p = new Patient();
|
||||||
p.addName().setFamily("FOOFAMILY");
|
p.addName().setFamily("FOOFAMILY");
|
||||||
|
|
||||||
client.create().resource(p).prefer(PreferReturnEnum.MINIMAL).execute();
|
client.create().resource(p).prefer(PreferHeader.PreferReturnEnum.MINIMAL).execute();
|
||||||
assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size());
|
assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size());
|
||||||
assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_MINIMAL, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue());
|
assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_MINIMAL, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue());
|
||||||
|
|
||||||
|
|
||||||
client.create().resource(p).prefer(PreferReturnEnum.REPRESENTATION).execute();
|
client.create().resource(p).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute();
|
||||||
assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size());
|
assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size());
|
||||||
assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_REPRESENTATION, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue());
|
assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_REPRESENTATION, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue());
|
||||||
|
|
||||||
|
@ -1980,12 +1980,12 @@ public class GenericJaxRsClientDstu3Test {
|
||||||
p.setId(new IdType("1"));
|
p.setId(new IdType("1"));
|
||||||
p.addName().setFamily("FOOFAMILY");
|
p.addName().setFamily("FOOFAMILY");
|
||||||
|
|
||||||
client.update().resource(p).prefer(PreferReturnEnum.MINIMAL).execute();
|
client.update().resource(p).prefer(PreferHeader.PreferReturnEnum.MINIMAL).execute();
|
||||||
assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size());
|
assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size());
|
||||||
assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_MINIMAL, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue());
|
assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_MINIMAL, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue());
|
||||||
|
|
||||||
|
|
||||||
client.update().resource(p).prefer(PreferReturnEnum.REPRESENTATION).execute();
|
client.update().resource(p).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute();
|
||||||
assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size());
|
assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size());
|
||||||
assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_REPRESENTATION, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue());
|
assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_REPRESENTATION, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue());
|
||||||
|
|
||||||
|
|
|
@ -7,10 +7,7 @@ import ca.uhn.fhir.jaxrs.server.test.TestJaxRsConformanceRestProviderDstu3;
|
||||||
import ca.uhn.fhir.jaxrs.server.test.TestJaxRsMockPageProviderDstu3;
|
import ca.uhn.fhir.jaxrs.server.test.TestJaxRsMockPageProviderDstu3;
|
||||||
import ca.uhn.fhir.jaxrs.server.test.TestJaxRsMockPatientRestProviderDstu3;
|
import ca.uhn.fhir.jaxrs.server.test.TestJaxRsMockPatientRestProviderDstu3;
|
||||||
import ca.uhn.fhir.model.primitive.UriDt;
|
import ca.uhn.fhir.model.primitive.UriDt;
|
||||||
import ca.uhn.fhir.rest.api.EncodingEnum;
|
import ca.uhn.fhir.rest.api.*;
|
||||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
|
||||||
import ca.uhn.fhir.rest.api.PreferReturnEnum;
|
|
||||||
import ca.uhn.fhir.rest.api.SearchStyleEnum;
|
|
||||||
import ca.uhn.fhir.rest.client.api.IGenericClient;
|
import ca.uhn.fhir.rest.client.api.IGenericClient;
|
||||||
import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum;
|
import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum;
|
||||||
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
|
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
|
||||||
|
@ -138,7 +135,7 @@ public class AbstractJaxRsResourceProviderDstu3Test {
|
||||||
client.setEncoding(EncodingEnum.JSON);
|
client.setEncoding(EncodingEnum.JSON);
|
||||||
|
|
||||||
MethodOutcome response = client.create().resource(toCreate).conditional()
|
MethodOutcome response = client.create().resource(toCreate).conditional()
|
||||||
.where(Patient.IDENTIFIER.exactly().identifier("2")).prefer(PreferReturnEnum.REPRESENTATION).execute();
|
.where(Patient.IDENTIFIER.exactly().identifier("2")).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute();
|
||||||
|
|
||||||
assertEquals("myIdentifier", patientCaptor.getValue().getIdentifier().get(0).getValue());
|
assertEquals("myIdentifier", patientCaptor.getValue().getIdentifier().get(0).getValue());
|
||||||
IBaseResource resource = response.getResource();
|
IBaseResource resource = response.getResource();
|
||||||
|
@ -161,7 +158,7 @@ public class AbstractJaxRsResourceProviderDstu3Test {
|
||||||
|
|
||||||
when(mock.create(patientCaptor.capture(), isNull(String.class))).thenReturn(outcome);
|
when(mock.create(patientCaptor.capture(), isNull(String.class))).thenReturn(outcome);
|
||||||
client.setEncoding(EncodingEnum.JSON);
|
client.setEncoding(EncodingEnum.JSON);
|
||||||
final MethodOutcome response = client.create().resource(toCreate).prefer(PreferReturnEnum.REPRESENTATION)
|
final MethodOutcome response = client.create().resource(toCreate).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION)
|
||||||
.execute();
|
.execute();
|
||||||
IBaseResource resource = response.getResource();
|
IBaseResource resource = response.getResource();
|
||||||
compareResultId(1, resource);
|
compareResultId(1, resource);
|
||||||
|
|
|
@ -13,10 +13,7 @@ import ca.uhn.fhir.model.primitive.DateDt;
|
||||||
import ca.uhn.fhir.model.primitive.IdDt;
|
import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
import ca.uhn.fhir.model.primitive.StringDt;
|
import ca.uhn.fhir.model.primitive.StringDt;
|
||||||
import ca.uhn.fhir.model.primitive.UriDt;
|
import ca.uhn.fhir.model.primitive.UriDt;
|
||||||
import ca.uhn.fhir.rest.api.EncodingEnum;
|
import ca.uhn.fhir.rest.api.*;
|
||||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
|
||||||
import ca.uhn.fhir.rest.api.PreferReturnEnum;
|
|
||||||
import ca.uhn.fhir.rest.api.SearchStyleEnum;
|
|
||||||
import ca.uhn.fhir.rest.client.api.IGenericClient;
|
import ca.uhn.fhir.rest.client.api.IGenericClient;
|
||||||
import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum;
|
import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum;
|
||||||
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
|
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
|
||||||
|
@ -134,7 +131,7 @@ public class AbstractJaxRsResourceProviderTest {
|
||||||
client.setEncoding(EncodingEnum.JSON);
|
client.setEncoding(EncodingEnum.JSON);
|
||||||
|
|
||||||
MethodOutcome response = client.create().resource(toCreate).conditional()
|
MethodOutcome response = client.create().resource(toCreate).conditional()
|
||||||
.where(Patient.IDENTIFIER.exactly().identifier("2")).prefer(PreferReturnEnum.REPRESENTATION).execute();
|
.where(Patient.IDENTIFIER.exactly().identifier("2")).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute();
|
||||||
|
|
||||||
assertEquals("myIdentifier", patientCaptor.getValue().getIdentifierFirstRep().getValue());
|
assertEquals("myIdentifier", patientCaptor.getValue().getIdentifierFirstRep().getValue());
|
||||||
IResource resource = (IResource) response.getResource();
|
IResource resource = (IResource) response.getResource();
|
||||||
|
@ -157,7 +154,7 @@ public class AbstractJaxRsResourceProviderTest {
|
||||||
|
|
||||||
when(mock.create(patientCaptor.capture(), isNull(String.class))).thenReturn(outcome);
|
when(mock.create(patientCaptor.capture(), isNull(String.class))).thenReturn(outcome);
|
||||||
client.setEncoding(EncodingEnum.JSON);
|
client.setEncoding(EncodingEnum.JSON);
|
||||||
final MethodOutcome response = client.create().resource(toCreate).prefer(PreferReturnEnum.REPRESENTATION)
|
final MethodOutcome response = client.create().resource(toCreate).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION)
|
||||||
.execute();
|
.execute();
|
||||||
IResource resource = (IResource) response.getResource();
|
IResource resource = (IResource) response.getResource();
|
||||||
compareResultId(1, resource);
|
compareResultId(1, resource);
|
||||||
|
|
|
@ -138,7 +138,7 @@ public class JaxRsPatientProviderDstu3Test {
|
||||||
existing.setId((IdType) null);
|
existing.setId((IdType) null);
|
||||||
existing.getName().add(new HumanName().setFamily("Created Patient 54"));
|
existing.getName().add(new HumanName().setFamily("Created Patient 54"));
|
||||||
client.setEncoding(EncodingEnum.JSON);
|
client.setEncoding(EncodingEnum.JSON);
|
||||||
final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute();
|
final MethodOutcome results = client.create().resource(existing).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute();
|
||||||
System.out.println(results.getId());
|
System.out.println(results.getId());
|
||||||
final Patient patient = (Patient) results.getResource();
|
final Patient patient = (Patient) results.getResource();
|
||||||
System.out.println(patient);
|
System.out.println(patient);
|
||||||
|
@ -154,7 +154,7 @@ public class JaxRsPatientProviderDstu3Test {
|
||||||
existing.setId((IdType) null);
|
existing.setId((IdType) null);
|
||||||
existing.getName().add(new HumanName().setFamily("Created Patient 54"));
|
existing.getName().add(new HumanName().setFamily("Created Patient 54"));
|
||||||
client.setEncoding(EncodingEnum.XML);
|
client.setEncoding(EncodingEnum.XML);
|
||||||
final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute();
|
final MethodOutcome results = client.create().resource(existing).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute();
|
||||||
System.out.println(results.getId());
|
System.out.println(results.getId());
|
||||||
final Patient patient = (Patient) results.getResource();
|
final Patient patient = (Patient) results.getResource();
|
||||||
|
|
||||||
|
@ -187,7 +187,7 @@ public class JaxRsPatientProviderDstu3Test {
|
||||||
public void testDeletePatient() {
|
public void testDeletePatient() {
|
||||||
final Patient existing = new Patient();
|
final Patient existing = new Patient();
|
||||||
existing.getName().add(new HumanName().setFamily("Created Patient XYZ"));
|
existing.getName().add(new HumanName().setFamily("Created Patient XYZ"));
|
||||||
final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute();
|
final MethodOutcome results = client.create().resource(existing).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute();
|
||||||
System.out.println(results.getId());
|
System.out.println(results.getId());
|
||||||
final Patient patient = (Patient) results.getResource();
|
final Patient patient = (Patient) results.getResource();
|
||||||
client.delete().resourceById(patient.getIdElement()).execute();
|
client.delete().resourceById(patient.getIdElement()).execute();
|
||||||
|
|
|
@ -7,7 +7,7 @@ import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
import ca.uhn.fhir.model.valueset.BundleEntryTransactionMethodEnum;
|
import ca.uhn.fhir.model.valueset.BundleEntryTransactionMethodEnum;
|
||||||
import ca.uhn.fhir.rest.api.EncodingEnum;
|
import ca.uhn.fhir.rest.api.EncodingEnum;
|
||||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||||
import ca.uhn.fhir.rest.api.PreferReturnEnum;
|
import ca.uhn.fhir.rest.api.PreferHeader;
|
||||||
import ca.uhn.fhir.rest.api.SearchStyleEnum;
|
import ca.uhn.fhir.rest.api.SearchStyleEnum;
|
||||||
import ca.uhn.fhir.rest.client.api.IGenericClient;
|
import ca.uhn.fhir.rest.client.api.IGenericClient;
|
||||||
import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum;
|
import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum;
|
||||||
|
@ -152,7 +152,7 @@ public class JaxRsPatientProviderR4Test {
|
||||||
existing.setId((IdDt) null);
|
existing.setId((IdDt) null);
|
||||||
existing.getNameFirstRep().setFamily("Created Patient 54");
|
existing.getNameFirstRep().setFamily("Created Patient 54");
|
||||||
client.setEncoding(EncodingEnum.JSON);
|
client.setEncoding(EncodingEnum.JSON);
|
||||||
final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute();
|
final MethodOutcome results = client.create().resource(existing).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute();
|
||||||
System.out.println(results.getId());
|
System.out.println(results.getId());
|
||||||
final Patient patient = (Patient) results.getResource();
|
final Patient patient = (Patient) results.getResource();
|
||||||
System.out.println(patient);
|
System.out.println(patient);
|
||||||
|
@ -167,7 +167,7 @@ public class JaxRsPatientProviderR4Test {
|
||||||
existing.setId((IdDt) null);
|
existing.setId((IdDt) null);
|
||||||
existing.getNameFirstRep().setFamily("Created Patient 54");
|
existing.getNameFirstRep().setFamily("Created Patient 54");
|
||||||
client.setEncoding(EncodingEnum.XML);
|
client.setEncoding(EncodingEnum.XML);
|
||||||
final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute();
|
final MethodOutcome results = client.create().resource(existing).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute();
|
||||||
System.out.println(results.getId());
|
System.out.println(results.getId());
|
||||||
final Patient patient = (Patient) results.getResource();
|
final Patient patient = (Patient) results.getResource();
|
||||||
|
|
||||||
|
@ -199,7 +199,7 @@ public class JaxRsPatientProviderR4Test {
|
||||||
public void testDeletePatient() {
|
public void testDeletePatient() {
|
||||||
final Patient existing = new Patient();
|
final Patient existing = new Patient();
|
||||||
existing.getNameFirstRep().setFamily("Created Patient XYZ");
|
existing.getNameFirstRep().setFamily("Created Patient XYZ");
|
||||||
final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute();
|
final MethodOutcome results = client.create().resource(existing).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute();
|
||||||
System.out.println(results.getId());
|
System.out.println(results.getId());
|
||||||
final Patient patient = (Patient) results.getResource();
|
final Patient patient = (Patient) results.getResource();
|
||||||
client.delete().resource(patient).execute();
|
client.delete().resource(patient).execute();
|
||||||
|
|
|
@ -149,7 +149,7 @@ public class JaxRsPatientProviderTest {
|
||||||
existing.setId((IdDt) null);
|
existing.setId((IdDt) null);
|
||||||
existing.getNameFirstRep().addFamily("Created Patient 54");
|
existing.getNameFirstRep().addFamily("Created Patient 54");
|
||||||
client.setEncoding(EncodingEnum.JSON);
|
client.setEncoding(EncodingEnum.JSON);
|
||||||
final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute();
|
final MethodOutcome results = client.create().resource(existing).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute();
|
||||||
System.out.println(results.getId());
|
System.out.println(results.getId());
|
||||||
final Patient patient = (Patient) results.getResource();
|
final Patient patient = (Patient) results.getResource();
|
||||||
System.out.println(patient);
|
System.out.println(patient);
|
||||||
|
@ -164,7 +164,7 @@ public class JaxRsPatientProviderTest {
|
||||||
existing.setId((IdDt) null);
|
existing.setId((IdDt) null);
|
||||||
existing.getNameFirstRep().addFamily("Created Patient 54");
|
existing.getNameFirstRep().addFamily("Created Patient 54");
|
||||||
client.setEncoding(EncodingEnum.XML);
|
client.setEncoding(EncodingEnum.XML);
|
||||||
final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute();
|
final MethodOutcome results = client.create().resource(existing).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute();
|
||||||
System.out.println(results.getId());
|
System.out.println(results.getId());
|
||||||
final Patient patient = (Patient) results.getResource();
|
final Patient patient = (Patient) results.getResource();
|
||||||
|
|
||||||
|
@ -196,7 +196,7 @@ public class JaxRsPatientProviderTest {
|
||||||
public void testDeletePatient() {
|
public void testDeletePatient() {
|
||||||
final Patient existing = new Patient();
|
final Patient existing = new Patient();
|
||||||
existing.getNameFirstRep().addFamily("Created Patient XYZ");
|
existing.getNameFirstRep().addFamily("Created Patient XYZ");
|
||||||
final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute();
|
final MethodOutcome results = client.create().resource(existing).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute();
|
||||||
System.out.println(results.getId());
|
System.out.println(results.getId());
|
||||||
final Patient patient = (Patient) results.getResource();
|
final Patient patient = (Patient) results.getResource();
|
||||||
client.delete().resourceById(patient.getId()).execute();
|
client.delete().resourceById(patient.getId()).execute();
|
||||||
|
|
|
@ -309,6 +309,11 @@
|
||||||
<scope>provided</scope>
|
<scope>provided</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.quartz-scheduler</groupId>
|
||||||
|
<artifactId>quartz</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
|
||||||
<!-- <dependency> <groupId>org.hsqldb</groupId> <artifactId>hsqldb</artifactId> <version>2.3.2</version> </dependency> -->
|
<!-- <dependency> <groupId>org.hsqldb</groupId> <artifactId>hsqldb</artifactId> <version>2.3.2</version> </dependency> -->
|
||||||
|
|
||||||
|
@ -700,7 +705,7 @@
|
||||||
<configPackageBase>ca.uhn.fhir.jpa.config</configPackageBase>
|
<configPackageBase>ca.uhn.fhir.jpa.config</configPackageBase>
|
||||||
<packageBase>ca.uhn.fhir.jpa.rp.dstu2</packageBase>
|
<packageBase>ca.uhn.fhir.jpa.rp.dstu2</packageBase>
|
||||||
<targetResourceSpringBeansFile>hapi-fhir-server-resourceproviders-dstu2.xml</targetResourceSpringBeansFile>
|
<targetResourceSpringBeansFile>hapi-fhir-server-resourceproviders-dstu2.xml</targetResourceSpringBeansFile>
|
||||||
<baseResourceNames></baseResourceNames>
|
<baseResourceNames/>
|
||||||
<excludeResourceNames>
|
<excludeResourceNames>
|
||||||
<!-- <excludeResourceName>OperationDefinition</excludeResourceName> <excludeResourceName>OperationOutcome</excludeResourceName> -->
|
<!-- <excludeResourceName>OperationDefinition</excludeResourceName> <excludeResourceName>OperationOutcome</excludeResourceName> -->
|
||||||
</excludeResourceNames>
|
</excludeResourceNames>
|
||||||
|
|
|
@ -0,0 +1,158 @@
|
||||||
|
package ca.uhn.fhir.jpa.bulk;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||||
|
import ca.uhn.fhir.jpa.util.JsonUtil;
|
||||||
|
import ca.uhn.fhir.rest.annotation.Operation;
|
||||||
|
import ca.uhn.fhir.rest.annotation.OperationParam;
|
||||||
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
|
import ca.uhn.fhir.rest.api.PreferHeader;
|
||||||
|
import ca.uhn.fhir.rest.server.RestfulServerUtils;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
|
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||||
|
import ca.uhn.fhir.util.ArrayUtil;
|
||||||
|
import ca.uhn.fhir.util.OperationOutcomeUtil;
|
||||||
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
|
||||||
|
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||||
|
import org.hl7.fhir.r4.model.InstantType;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
import javax.servlet.http.HttpServletResponse;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
public class BulkDataExportProvider {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private IBulkDataExportSvc myBulkDataExportSvc;
|
||||||
|
@Autowired
|
||||||
|
private FhirContext myFhirContext;
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
public void setFhirContextForUnitTest(FhirContext theFhirContext) {
|
||||||
|
myFhirContext = theFhirContext;
|
||||||
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
public void setBulkDataExportSvcForUnitTests(IBulkDataExportSvc theBulkDataExportSvc) {
|
||||||
|
myBulkDataExportSvc = theBulkDataExportSvc;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* $export
|
||||||
|
*/
|
||||||
|
@Operation(name = JpaConstants.OPERATION_EXPORT, global = false /* set to true once we can handle this */, manualResponse = true, idempotent = true)
|
||||||
|
public void export(
|
||||||
|
@OperationParam(name = JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theOutputFormat,
|
||||||
|
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theType,
|
||||||
|
@OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType<Date> theSince,
|
||||||
|
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theTypeFilter,
|
||||||
|
ServletRequestDetails theRequestDetails
|
||||||
|
) {
|
||||||
|
|
||||||
|
String preferHeader = theRequestDetails.getHeader(Constants.HEADER_PREFER);
|
||||||
|
PreferHeader prefer = RestfulServerUtils.parsePreferHeader(null, preferHeader);
|
||||||
|
if (prefer.getRespondAsync() == false) {
|
||||||
|
throw new InvalidRequestException("Must request async processing for $export");
|
||||||
|
}
|
||||||
|
|
||||||
|
String outputFormat = theOutputFormat != null ? theOutputFormat.getValueAsString() : null;
|
||||||
|
|
||||||
|
Set<String> resourceTypes = null;
|
||||||
|
if (theType != null) {
|
||||||
|
resourceTypes = ArrayUtil.commaSeparatedListToCleanSet(theType.getValueAsString());
|
||||||
|
}
|
||||||
|
|
||||||
|
Date since = null;
|
||||||
|
if (theSince != null) {
|
||||||
|
since = theSince.getValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
Set<String> filters = null;
|
||||||
|
if (theTypeFilter != null) {
|
||||||
|
filters = ArrayUtil.commaSeparatedListToCleanSet(theTypeFilter.getValueAsString());
|
||||||
|
}
|
||||||
|
|
||||||
|
IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(outputFormat, resourceTypes, since, filters);
|
||||||
|
|
||||||
|
String serverBase = getServerBase(theRequestDetails);
|
||||||
|
String pollLocation = serverBase + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + outcome.getJobId();
|
||||||
|
|
||||||
|
HttpServletResponse response = theRequestDetails.getServletResponse();
|
||||||
|
|
||||||
|
// Add standard headers
|
||||||
|
theRequestDetails.getServer().addHeadersToResponse(response);
|
||||||
|
|
||||||
|
// Successful 202 Accepted
|
||||||
|
response.addHeader(Constants.HEADER_CONTENT_LOCATION, pollLocation);
|
||||||
|
response.setStatus(Constants.STATUS_HTTP_202_ACCEPTED);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* $export-poll-status
|
||||||
|
*/
|
||||||
|
@Operation(name = JpaConstants.OPERATION_EXPORT_POLL_STATUS, manualResponse = true, idempotent = true)
|
||||||
|
public void exportPollStatus(
|
||||||
|
@OperationParam(name = JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID, typeName = "string", min = 0, max = 1) IPrimitiveType<String> theJobId,
|
||||||
|
ServletRequestDetails theRequestDetails
|
||||||
|
) throws IOException {
|
||||||
|
|
||||||
|
HttpServletResponse response = theRequestDetails.getServletResponse();
|
||||||
|
theRequestDetails.getServer().addHeadersToResponse(response);
|
||||||
|
|
||||||
|
IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobStatusOrThrowResourceNotFound(theJobId.getValueAsString());
|
||||||
|
|
||||||
|
switch (status.getStatus()) {
|
||||||
|
case SUBMITTED:
|
||||||
|
case BUILDING:
|
||||||
|
|
||||||
|
response.setStatus(Constants.STATUS_HTTP_202_ACCEPTED);
|
||||||
|
response.addHeader(Constants.HEADER_X_PROGRESS, "Build in progress - Status set to " + status.getStatus() + " at " + new InstantType(status.getStatusTime()).getValueAsString());
|
||||||
|
response.addHeader(Constants.HEADER_RETRY_AFTER, "120");
|
||||||
|
break;
|
||||||
|
|
||||||
|
case COMPLETE:
|
||||||
|
|
||||||
|
response.setStatus(Constants.STATUS_HTTP_200_OK);
|
||||||
|
response.setContentType(Constants.CT_JSON);
|
||||||
|
|
||||||
|
// Create a JSON response
|
||||||
|
BulkExportResponseJson bulkResponseDocument = new BulkExportResponseJson();
|
||||||
|
bulkResponseDocument.setTransactionTime(status.getStatusTime());
|
||||||
|
bulkResponseDocument.setRequest(status.getRequest());
|
||||||
|
for (IBulkDataExportSvc.FileEntry nextFile : status.getFiles()) {
|
||||||
|
String serverBase = getServerBase(theRequestDetails);
|
||||||
|
String nextUrl = serverBase + "/" + nextFile.getResourceId().toUnqualifiedVersionless().getValue();
|
||||||
|
bulkResponseDocument
|
||||||
|
.addOutput()
|
||||||
|
.setType(nextFile.getResourceType())
|
||||||
|
.setUrl(nextUrl);
|
||||||
|
}
|
||||||
|
JsonUtil.serialize(bulkResponseDocument, response.getWriter());
|
||||||
|
response.getWriter().close();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case ERROR:
|
||||||
|
|
||||||
|
response.setStatus(Constants.STATUS_HTTP_500_INTERNAL_ERROR);
|
||||||
|
response.setContentType(Constants.CT_FHIR_JSON);
|
||||||
|
|
||||||
|
// Create an OperationOutcome response
|
||||||
|
IBaseOperationOutcome oo = OperationOutcomeUtil.newInstance(myFhirContext);
|
||||||
|
OperationOutcomeUtil.addIssue(myFhirContext, oo, "error", status.getStatusMessage(), null, null);
|
||||||
|
myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToWriter(oo, response.getWriter());
|
||||||
|
response.getWriter().close();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private String getServerBase(ServletRequestDetails theRequestDetails) {
|
||||||
|
return StringUtils.removeEnd(theRequestDetails.getServerBaseForRequest(), "/");
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,447 @@
|
||||||
|
package ca.uhn.fhir.jpa.bulk;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.jpa.dao.DaoRegistry;
|
||||||
|
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.IResultIterator;
|
||||||
|
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
|
||||||
|
import ca.uhn.fhir.jpa.entity.BulkExportCollectionEntity;
|
||||||
|
import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
|
||||||
|
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.FireAtIntervalJob;
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
||||||
|
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
|
||||||
|
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
|
import ca.uhn.fhir.jpa.util.ExpungeOptions;
|
||||||
|
import ca.uhn.fhir.parser.IParser;
|
||||||
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
|
import ca.uhn.fhir.rest.param.DateRangeParam;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||||
|
import ca.uhn.fhir.util.BinaryUtil;
|
||||||
|
import ca.uhn.fhir.util.StopWatch;
|
||||||
|
import com.google.common.collect.Sets;
|
||||||
|
import org.apache.commons.io.FileUtils;
|
||||||
|
import org.apache.commons.lang3.time.DateUtils;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseBinary;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
|
import org.hl7.fhir.r4.model.InstantType;
|
||||||
|
import org.quartz.DisallowConcurrentExecution;
|
||||||
|
import org.quartz.JobExecutionContext;
|
||||||
|
import org.quartz.PersistJobDataAfterExecution;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.data.domain.PageRequest;
|
||||||
|
import org.springframework.data.domain.Pageable;
|
||||||
|
import org.springframework.data.domain.Slice;
|
||||||
|
import org.springframework.transaction.PlatformTransactionManager;
|
||||||
|
import org.springframework.transaction.support.TransactionTemplate;
|
||||||
|
|
||||||
|
import javax.annotation.PostConstruct;
|
||||||
|
import javax.transaction.Transactional;
|
||||||
|
import java.io.ByteArrayOutputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.OutputStreamWriter;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
|
import static ca.uhn.fhir.util.UrlUtil.escapeUrlParam;
|
||||||
|
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
|
|
||||||
|
public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
||||||
|
|
||||||
|
private static final long REFRESH_INTERVAL = 10 * DateUtils.MILLIS_PER_SECOND;
|
||||||
|
private static final Logger ourLog = LoggerFactory.getLogger(BulkDataExportSvcImpl.class);
|
||||||
|
private int myReuseBulkExportForMillis = (int) (60 * DateUtils.MILLIS_PER_MINUTE);
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private IBulkExportJobDao myBulkExportJobDao;
|
||||||
|
@Autowired
|
||||||
|
private IBulkExportCollectionDao myBulkExportCollectionDao;
|
||||||
|
@Autowired
|
||||||
|
private IBulkExportCollectionFileDao myBulkExportCollectionFileDao;
|
||||||
|
@Autowired
|
||||||
|
private ISchedulerService mySchedulerService;
|
||||||
|
@Autowired
|
||||||
|
private DaoRegistry myDaoRegistry;
|
||||||
|
@Autowired
|
||||||
|
private FhirContext myContext;
|
||||||
|
@Autowired
|
||||||
|
private PlatformTransactionManager myTxManager;
|
||||||
|
private TransactionTemplate myTxTemplate;
|
||||||
|
|
||||||
|
private long myFileMaxChars = 500 * FileUtils.ONE_KB;
|
||||||
|
private int myRetentionPeriod = (int) DateUtils.MILLIS_PER_DAY;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method is called by the scheduler to run a pass of the
|
||||||
|
* generator
|
||||||
|
*/
|
||||||
|
@Transactional(value = Transactional.TxType.NEVER)
|
||||||
|
@Override
|
||||||
|
public synchronized void buildExportFiles() {
|
||||||
|
|
||||||
|
Optional<BulkExportJobEntity> jobToProcessOpt = myTxTemplate.execute(t -> {
|
||||||
|
Pageable page = PageRequest.of(0, 1);
|
||||||
|
Slice<BulkExportJobEntity> submittedJobs = myBulkExportJobDao.findByStatus(page, BulkJobStatusEnum.SUBMITTED);
|
||||||
|
if (submittedJobs.isEmpty()) {
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
return Optional.of(submittedJobs.getContent().get(0));
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!jobToProcessOpt.isPresent()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
String jobUuid = jobToProcessOpt.get().getJobId();
|
||||||
|
|
||||||
|
try {
|
||||||
|
myTxTemplate.execute(t -> {
|
||||||
|
processJob(jobUuid);
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
} catch (Exception e) {
|
||||||
|
ourLog.error("Failure while preparing bulk export extract", e);
|
||||||
|
myTxTemplate.execute(t -> {
|
||||||
|
Optional<BulkExportJobEntity> submittedJobs = myBulkExportJobDao.findByJobId(jobUuid);
|
||||||
|
if (submittedJobs.isPresent()) {
|
||||||
|
BulkExportJobEntity jobEntity = submittedJobs.get();
|
||||||
|
jobEntity.setStatus(BulkJobStatusEnum.ERROR);
|
||||||
|
jobEntity.setStatusMessage(e.getMessage());
|
||||||
|
myBulkExportJobDao.save(jobEntity);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method is called by the scheduler to run a pass of the
|
||||||
|
* generator
|
||||||
|
*/
|
||||||
|
@Transactional(value = Transactional.TxType.NEVER)
|
||||||
|
@Override
|
||||||
|
public void purgeExpiredFiles() {
|
||||||
|
Optional<BulkExportJobEntity> jobToDelete = myTxTemplate.execute(t -> {
|
||||||
|
Pageable page = PageRequest.of(0, 1);
|
||||||
|
Slice<BulkExportJobEntity> submittedJobs = myBulkExportJobDao.findByExpiry(page, new Date());
|
||||||
|
if (submittedJobs.isEmpty()) {
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
return Optional.of(submittedJobs.getContent().get(0));
|
||||||
|
});
|
||||||
|
|
||||||
|
if (jobToDelete.isPresent()) {
|
||||||
|
|
||||||
|
ourLog.info("Deleting bulk export job: {}", jobToDelete.get().getJobId());
|
||||||
|
|
||||||
|
myTxTemplate.execute(t -> {
|
||||||
|
|
||||||
|
BulkExportJobEntity job = myBulkExportJobDao.getOne(jobToDelete.get().getId());
|
||||||
|
for (BulkExportCollectionEntity nextCollection : job.getCollections()) {
|
||||||
|
for (BulkExportCollectionFileEntity nextFile : nextCollection.getFiles()) {
|
||||||
|
|
||||||
|
ourLog.info("Purging bulk data file: {}", nextFile.getResourceId());
|
||||||
|
getBinaryDao().delete(toId(nextFile.getResourceId()));
|
||||||
|
getBinaryDao().forceExpungeInExistingTransaction(toId(nextFile.getResourceId()), new ExpungeOptions().setExpungeDeletedResources(true).setExpungeOldVersions(true), null);
|
||||||
|
myBulkExportCollectionFileDao.delete(nextFile);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
myBulkExportCollectionDao.delete(nextCollection);
|
||||||
|
}
|
||||||
|
|
||||||
|
myBulkExportJobDao.delete(job);
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private void processJob(String theJobUuid) {
|
||||||
|
|
||||||
|
Optional<BulkExportJobEntity> jobOpt = myBulkExportJobDao.findByJobId(theJobUuid);
|
||||||
|
if (!jobOpt.isPresent()) {
|
||||||
|
ourLog.info("Job appears to be deleted");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
StopWatch jobStopwatch = new StopWatch();
|
||||||
|
AtomicInteger jobResourceCounter = new AtomicInteger();
|
||||||
|
|
||||||
|
BulkExportJobEntity job = jobOpt.get();
|
||||||
|
ourLog.info("Bulk export starting generation for batch export job: {}", job);
|
||||||
|
|
||||||
|
for (BulkExportCollectionEntity nextCollection : job.getCollections()) {
|
||||||
|
|
||||||
|
String nextType = nextCollection.getResourceType();
|
||||||
|
IFhirResourceDao dao = myDaoRegistry.getResourceDao(nextType);
|
||||||
|
|
||||||
|
ourLog.info("Bulk export assembling export of type {} for job {}", nextType, theJobUuid);
|
||||||
|
|
||||||
|
ISearchBuilder sb = dao.newSearchBuilder();
|
||||||
|
Class<? extends IBaseResource> nextTypeClass = myContext.getResourceDefinition(nextType).getImplementingClass();
|
||||||
|
sb.setType(nextTypeClass, nextType);
|
||||||
|
|
||||||
|
SearchParameterMap map = new SearchParameterMap();
|
||||||
|
map.setLoadSynchronous(true);
|
||||||
|
if (job.getSince() != null) {
|
||||||
|
map.setLastUpdated(new DateRangeParam(job.getSince(), null));
|
||||||
|
}
|
||||||
|
|
||||||
|
IResultIterator resultIterator = sb.createQuery(map, new SearchRuntimeDetails(null, theJobUuid), null);
|
||||||
|
storeResultsToFiles(nextCollection, sb, resultIterator, jobResourceCounter, jobStopwatch);
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
job.setStatus(BulkJobStatusEnum.COMPLETE);
|
||||||
|
updateExpiry(job);
|
||||||
|
myBulkExportJobDao.save(job);
|
||||||
|
|
||||||
|
ourLog.info("Bulk export completed job in {}: {}", jobStopwatch, job);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private void storeResultsToFiles(BulkExportCollectionEntity theExportCollection, ISearchBuilder theSearchBuilder, IResultIterator theResultIterator, AtomicInteger theJobResourceCounter, StopWatch theJobStopwatch) {
|
||||||
|
|
||||||
|
try (IResultIterator query = theResultIterator) {
|
||||||
|
if (!query.hasNext()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
AtomicInteger fileCounter = new AtomicInteger(0);
|
||||||
|
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
|
||||||
|
OutputStreamWriter writer = new OutputStreamWriter(outputStream, Constants.CHARSET_UTF8);
|
||||||
|
IParser parser = myContext.newJsonParser().setPrettyPrint(false);
|
||||||
|
|
||||||
|
List<Long> pidsSpool = new ArrayList<>();
|
||||||
|
List<IBaseResource> resourcesSpool = new ArrayList<>();
|
||||||
|
while (query.hasNext()) {
|
||||||
|
pidsSpool.add(query.next());
|
||||||
|
fileCounter.incrementAndGet();
|
||||||
|
theJobResourceCounter.incrementAndGet();
|
||||||
|
|
||||||
|
if (pidsSpool.size() >= 10 || !query.hasNext()) {
|
||||||
|
|
||||||
|
theSearchBuilder.loadResourcesByPid(pidsSpool, Collections.emptyList(), resourcesSpool, false, null);
|
||||||
|
|
||||||
|
for (IBaseResource nextFileResource : resourcesSpool) {
|
||||||
|
parser.encodeResourceToWriter(nextFileResource, writer);
|
||||||
|
writer.append("\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
pidsSpool.clear();
|
||||||
|
resourcesSpool.clear();
|
||||||
|
|
||||||
|
if (outputStream.size() >= myFileMaxChars || !query.hasNext()) {
|
||||||
|
Optional<IIdType> createdId = flushToFiles(theExportCollection, fileCounter, outputStream);
|
||||||
|
createdId.ifPresent(theIIdType -> ourLog.info("Created resource {} for bulk export file containing {} resources of type {} - Total {} resources ({}/sec)", theIIdType.toUnqualifiedVersionless().getValue(), fileCounter.get(), theExportCollection.getResourceType(), theJobResourceCounter.get(), theJobStopwatch.formatThroughput(theJobResourceCounter.get(), TimeUnit.SECONDS)));
|
||||||
|
fileCounter.set(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new InternalErrorException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Optional<IIdType> flushToFiles(BulkExportCollectionEntity theCollection, AtomicInteger theCounter, ByteArrayOutputStream theOutputStream) {
|
||||||
|
if (theOutputStream.size() > 0) {
|
||||||
|
IBaseBinary binary = BinaryUtil.newBinary(myContext);
|
||||||
|
binary.setContentType(Constants.CT_FHIR_NDJSON);
|
||||||
|
binary.setContent(theOutputStream.toByteArray());
|
||||||
|
|
||||||
|
IIdType createdId = getBinaryDao().create(binary).getResource().getIdElement();
|
||||||
|
|
||||||
|
BulkExportCollectionFileEntity file = new BulkExportCollectionFileEntity();
|
||||||
|
theCollection.getFiles().add(file);
|
||||||
|
file.setCollection(theCollection);
|
||||||
|
file.setResource(createdId.getIdPart());
|
||||||
|
myBulkExportCollectionFileDao.saveAndFlush(file);
|
||||||
|
theOutputStream.reset();
|
||||||
|
|
||||||
|
return Optional.of(createdId);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
private IFhirResourceDao<IBaseBinary> getBinaryDao() {
|
||||||
|
return myDaoRegistry.getResourceDao("Binary");
|
||||||
|
}
|
||||||
|
|
||||||
|
@PostConstruct
|
||||||
|
public void start() {
|
||||||
|
ourLog.info("Bulk export service starting with refresh interval {}", StopWatch.formatMillis(REFRESH_INTERVAL));
|
||||||
|
myTxTemplate = new TransactionTemplate(myTxManager);
|
||||||
|
|
||||||
|
ScheduledJobDefinition jobDetail = new ScheduledJobDefinition();
|
||||||
|
jobDetail.setId(BulkDataExportSvcImpl.class.getName());
|
||||||
|
jobDetail.setJobClass(BulkDataExportSvcImpl.SubmitJob.class);
|
||||||
|
mySchedulerService.scheduleFixedDelay(REFRESH_INTERVAL, true, jobDetail);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Transactional
|
||||||
|
@Override
|
||||||
|
public JobInfo submitJob(String theOutputFormat, Set<String> theResourceTypes, Date theSince, Set<String> theFilters) {
|
||||||
|
String outputFormat = Constants.CT_FHIR_NDJSON;
|
||||||
|
if (isNotBlank(theOutputFormat)) {
|
||||||
|
outputFormat = theOutputFormat;
|
||||||
|
}
|
||||||
|
if (!Constants.CTS_NDJSON.contains(outputFormat)) {
|
||||||
|
throw new InvalidRequestException("Invalid output format: " + theOutputFormat);
|
||||||
|
}
|
||||||
|
|
||||||
|
StringBuilder requestBuilder = new StringBuilder();
|
||||||
|
requestBuilder.append("/").append(JpaConstants.OPERATION_EXPORT);
|
||||||
|
requestBuilder.append("?").append(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT).append("=").append(escapeUrlParam(outputFormat));
|
||||||
|
Set<String> resourceTypes = theResourceTypes;
|
||||||
|
if (resourceTypes != null) {
|
||||||
|
requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_TYPE).append("=").append(String.join(",", resourceTypes));
|
||||||
|
}
|
||||||
|
Date since = theSince;
|
||||||
|
if (since != null) {
|
||||||
|
requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_SINCE).append("=").append(new InstantType(since).setTimeZoneZulu(true).getValueAsString());
|
||||||
|
}
|
||||||
|
if (theFilters != null && theFilters.size() > 0) {
|
||||||
|
requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_TYPE).append("=").append(String.join(",", theFilters));
|
||||||
|
}
|
||||||
|
String request = requestBuilder.toString();
|
||||||
|
|
||||||
|
Date cutoff = DateUtils.addMilliseconds(new Date(), -myReuseBulkExportForMillis);
|
||||||
|
Pageable page = PageRequest.of(0, 10);
|
||||||
|
Slice<BulkExportJobEntity> existing = myBulkExportJobDao.findExistingJob(page, request, cutoff, BulkJobStatusEnum.ERROR);
|
||||||
|
if (existing.isEmpty() == false) {
|
||||||
|
return toSubmittedJobInfo(existing.iterator().next());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (theResourceTypes == null || resourceTypes.isEmpty()) {
|
||||||
|
// This is probably not a useful default, but having the default be "download the whole
|
||||||
|
// server" seems like a risky default too. We'll deal with that by having the default involve
|
||||||
|
// only returning a small time span
|
||||||
|
resourceTypes = myContext.getResourceNames();
|
||||||
|
if (since == null) {
|
||||||
|
since = DateUtils.addDays(new Date(), -1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
BulkExportJobEntity job = new BulkExportJobEntity();
|
||||||
|
job.setJobId(UUID.randomUUID().toString());
|
||||||
|
job.setStatus(BulkJobStatusEnum.SUBMITTED);
|
||||||
|
job.setSince(since);
|
||||||
|
job.setCreated(new Date());
|
||||||
|
job.setRequest(request);
|
||||||
|
|
||||||
|
updateExpiry(job);
|
||||||
|
myBulkExportJobDao.save(job);
|
||||||
|
|
||||||
|
for (String nextType : resourceTypes) {
|
||||||
|
if (!myDaoRegistry.isResourceTypeSupported(nextType)) {
|
||||||
|
throw new InvalidRequestException("Unknown or unsupported resource type: " + nextType);
|
||||||
|
}
|
||||||
|
|
||||||
|
BulkExportCollectionEntity collection = new BulkExportCollectionEntity();
|
||||||
|
collection.setJob(job);
|
||||||
|
collection.setResourceType(nextType);
|
||||||
|
job.getCollections().add(collection);
|
||||||
|
myBulkExportCollectionDao.save(collection);
|
||||||
|
}
|
||||||
|
|
||||||
|
ourLog.info("Bulk export job submitted: {}", job.toString());
|
||||||
|
|
||||||
|
return toSubmittedJobInfo(job);
|
||||||
|
}
|
||||||
|
|
||||||
|
private JobInfo toSubmittedJobInfo(BulkExportJobEntity theJob) {
|
||||||
|
return new JobInfo().setJobId(theJob.getJobId());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private void updateExpiry(BulkExportJobEntity theJob) {
|
||||||
|
theJob.setExpiry(DateUtils.addMilliseconds(new Date(), myRetentionPeriod));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Transactional
|
||||||
|
@Override
|
||||||
|
public JobInfo getJobStatusOrThrowResourceNotFound(String theJobId) {
|
||||||
|
BulkExportJobEntity job = myBulkExportJobDao
|
||||||
|
.findByJobId(theJobId)
|
||||||
|
.orElseThrow(() -> new ResourceNotFoundException(theJobId));
|
||||||
|
|
||||||
|
JobInfo retVal = new JobInfo();
|
||||||
|
retVal.setJobId(theJobId);
|
||||||
|
retVal.setStatus(job.getStatus());
|
||||||
|
retVal.setStatus(job.getStatus());
|
||||||
|
retVal.setStatusTime(job.getStatusTime());
|
||||||
|
retVal.setStatusMessage(job.getStatusMessage());
|
||||||
|
retVal.setRequest(job.getRequest());
|
||||||
|
|
||||||
|
if (job.getStatus() == BulkJobStatusEnum.COMPLETE) {
|
||||||
|
for (BulkExportCollectionEntity nextCollection : job.getCollections()) {
|
||||||
|
for (BulkExportCollectionFileEntity nextFile : nextCollection.getFiles()) {
|
||||||
|
retVal.addFile()
|
||||||
|
.setResourceType(nextCollection.getResourceType())
|
||||||
|
.setResourceId(toQualifiedBinaryId(nextFile.getResourceId()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
private IIdType toId(String theResourceId) {
|
||||||
|
IIdType retVal = myContext.getVersion().newIdType();
|
||||||
|
retVal.setValue(theResourceId);
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
private IIdType toQualifiedBinaryId(String theIdPart) {
|
||||||
|
IIdType retVal = myContext.getVersion().newIdType();
|
||||||
|
retVal.setParts(null, "Binary", theIdPart, null);
|
||||||
|
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
@Transactional
|
||||||
|
public synchronized void cancelAndPurgeAllJobs() {
|
||||||
|
myBulkExportCollectionFileDao.deleteAll();
|
||||||
|
myBulkExportCollectionDao.deleteAll();
|
||||||
|
myBulkExportJobDao.deleteAll();
|
||||||
|
}
|
||||||
|
|
||||||
|
@DisallowConcurrentExecution
|
||||||
|
@PersistJobDataAfterExecution
|
||||||
|
public static class SubmitJob extends FireAtIntervalJob {
|
||||||
|
@Autowired
|
||||||
|
private IBulkDataExportSvc myTarget;
|
||||||
|
|
||||||
|
public SubmitJob() {
|
||||||
|
super(REFRESH_INTERVAL);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void doExecute(JobExecutionContext theContext) {
|
||||||
|
myTarget.buildExportFiles();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,110 @@
|
||||||
|
package ca.uhn.fhir.jpa.bulk;
|
||||||
|
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.util.JsonDateDeserializer;
|
||||||
|
import ca.uhn.fhir.jpa.util.JsonDateSerializer;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonAutoDetect;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
|
||||||
|
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
|
||||||
|
@JsonAutoDetect(creatorVisibility = JsonAutoDetect.Visibility.NONE, fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE)
|
||||||
|
public class BulkExportResponseJson {
|
||||||
|
|
||||||
|
@JsonProperty("transactionTime")
|
||||||
|
@JsonSerialize(using = JsonDateSerializer.class)
|
||||||
|
@JsonDeserialize(using = JsonDateDeserializer.class)
|
||||||
|
private Date myTransactionTime;
|
||||||
|
|
||||||
|
@JsonProperty("request")
|
||||||
|
private String myRequest;
|
||||||
|
@JsonProperty("requiresAccessToken")
|
||||||
|
private Boolean myRequiresAccessToken;
|
||||||
|
@JsonProperty("output")
|
||||||
|
private List<Output> myOutput;
|
||||||
|
@JsonProperty("error")
|
||||||
|
private List<Output> myError;
|
||||||
|
|
||||||
|
public Date getTransactionTime() {
|
||||||
|
return myTransactionTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
public BulkExportResponseJson setTransactionTime(Date theTransactionTime) {
|
||||||
|
myTransactionTime = theTransactionTime;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getRequest() {
|
||||||
|
return myRequest;
|
||||||
|
}
|
||||||
|
|
||||||
|
public BulkExportResponseJson setRequest(String theRequest) {
|
||||||
|
myRequest = theRequest;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Boolean getRequiresAccessToken() {
|
||||||
|
return myRequiresAccessToken;
|
||||||
|
}
|
||||||
|
|
||||||
|
public BulkExportResponseJson setRequiresAccessToken(Boolean theRequiresAccessToken) {
|
||||||
|
myRequiresAccessToken = theRequiresAccessToken;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Output> getOutput() {
|
||||||
|
if (myOutput == null) {
|
||||||
|
myOutput = new ArrayList<>();
|
||||||
|
}
|
||||||
|
return myOutput;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Output> getError() {
|
||||||
|
if (myError == null) {
|
||||||
|
myError = new ArrayList<>();
|
||||||
|
}
|
||||||
|
return myError;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Output addOutput() {
|
||||||
|
Output retVal = new Output();
|
||||||
|
getOutput().add(retVal);
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||||
|
@JsonAutoDetect(creatorVisibility = JsonAutoDetect.Visibility.NONE, fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE)
|
||||||
|
public static class Output {
|
||||||
|
|
||||||
|
@JsonProperty("type")
|
||||||
|
private String myType;
|
||||||
|
@JsonProperty("url")
|
||||||
|
private String myUrl;
|
||||||
|
|
||||||
|
public String getType() {
|
||||||
|
return myType;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Output setType(String theType) {
|
||||||
|
myType = theType;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getUrl() {
|
||||||
|
return myUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Output setUrl(String theUrl) {
|
||||||
|
myUrl = theUrl;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,10 @@
|
||||||
|
package ca.uhn.fhir.jpa.bulk;
|
||||||
|
|
||||||
|
public enum BulkJobStatusEnum {
|
||||||
|
|
||||||
|
SUBMITTED,
|
||||||
|
BUILDING,
|
||||||
|
COMPLETE,
|
||||||
|
ERROR
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,114 @@
|
||||||
|
package ca.uhn.fhir.jpa.bulk;
|
||||||
|
|
||||||
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
|
|
||||||
|
import javax.transaction.Transactional;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
public interface IBulkDataExportSvc {
|
||||||
|
void buildExportFiles();
|
||||||
|
|
||||||
|
@Transactional(value = Transactional.TxType.NEVER)
|
||||||
|
void purgeExpiredFiles();
|
||||||
|
|
||||||
|
JobInfo submitJob(String theOutputFormat, Set<String> theResourceTypes, Date theSince, Set<String> theFilters);
|
||||||
|
|
||||||
|
JobInfo getJobStatusOrThrowResourceNotFound(String theJobId);
|
||||||
|
|
||||||
|
void cancelAndPurgeAllJobs();
|
||||||
|
|
||||||
|
class JobInfo {
|
||||||
|
private String myJobId;
|
||||||
|
private BulkJobStatusEnum myStatus;
|
||||||
|
private List<FileEntry> myFiles;
|
||||||
|
private String myRequest;
|
||||||
|
private Date myStatusTime;
|
||||||
|
private String myStatusMessage;
|
||||||
|
|
||||||
|
public String getRequest() {
|
||||||
|
return myRequest;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRequest(String theRequest) {
|
||||||
|
myRequest = theRequest;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Date getStatusTime() {
|
||||||
|
return myStatusTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
public JobInfo setStatusTime(Date theStatusTime) {
|
||||||
|
myStatusTime = theStatusTime;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getJobId() {
|
||||||
|
return myJobId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public JobInfo setJobId(String theJobId) {
|
||||||
|
myJobId = theJobId;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<FileEntry> getFiles() {
|
||||||
|
if (myFiles == null) {
|
||||||
|
myFiles = new ArrayList<>();
|
||||||
|
}
|
||||||
|
return myFiles;
|
||||||
|
}
|
||||||
|
|
||||||
|
public BulkJobStatusEnum getStatus() {
|
||||||
|
return myStatus;
|
||||||
|
}
|
||||||
|
|
||||||
|
public JobInfo setStatus(BulkJobStatusEnum theStatus) {
|
||||||
|
myStatus = theStatus;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getStatusMessage() {
|
||||||
|
return myStatusMessage;
|
||||||
|
}
|
||||||
|
|
||||||
|
public JobInfo setStatusMessage(String theStatusMessage) {
|
||||||
|
myStatusMessage = theStatusMessage;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public FileEntry addFile() {
|
||||||
|
FileEntry retVal = new FileEntry();
|
||||||
|
getFiles().add(retVal);
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class FileEntry {
|
||||||
|
private String myResourceType;
|
||||||
|
private IIdType myResourceId;
|
||||||
|
|
||||||
|
public String getResourceType() {
|
||||||
|
return myResourceType;
|
||||||
|
}
|
||||||
|
|
||||||
|
public FileEntry setResourceType(String theResourceType) {
|
||||||
|
myResourceType = theResourceType;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public IIdType getResourceId() {
|
||||||
|
return myResourceId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public FileEntry setResourceId(IIdType theResourceId) {
|
||||||
|
myResourceId = theResourceId;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -6,11 +6,17 @@ import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
||||||
import ca.uhn.fhir.interceptor.executor.InterceptorService;
|
import ca.uhn.fhir.interceptor.executor.InterceptorService;
|
||||||
import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider;
|
import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider;
|
||||||
import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor;
|
import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor;
|
||||||
|
import ca.uhn.fhir.jpa.bulk.BulkDataExportSvcImpl;
|
||||||
|
import ca.uhn.fhir.jpa.bulk.BulkDataExportProvider;
|
||||||
|
import ca.uhn.fhir.jpa.bulk.IBulkDataExportSvc;
|
||||||
import ca.uhn.fhir.jpa.dao.DaoRegistry;
|
import ca.uhn.fhir.jpa.dao.DaoRegistry;
|
||||||
import ca.uhn.fhir.jpa.graphql.JpaStorageServices;
|
import ca.uhn.fhir.jpa.graphql.JpaStorageServices;
|
||||||
import ca.uhn.fhir.jpa.interceptor.JpaConsentContextServices;
|
import ca.uhn.fhir.jpa.interceptor.JpaConsentContextServices;
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||||
import ca.uhn.fhir.jpa.provider.SubscriptionTriggeringProvider;
|
import ca.uhn.fhir.jpa.provider.SubscriptionTriggeringProvider;
|
||||||
import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider;
|
import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider;
|
||||||
|
import ca.uhn.fhir.jpa.sched.AutowiringSpringBeanJobFactory;
|
||||||
|
import ca.uhn.fhir.jpa.sched.SchedulerServiceImpl;
|
||||||
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
|
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
|
||||||
import ca.uhn.fhir.jpa.search.IStaleSearchDeletingSvc;
|
import ca.uhn.fhir.jpa.search.IStaleSearchDeletingSvc;
|
||||||
import ca.uhn.fhir.jpa.search.StaleSearchDeletingSvcImpl;
|
import ca.uhn.fhir.jpa.search.StaleSearchDeletingSvcImpl;
|
||||||
|
@ -29,7 +35,6 @@ import ca.uhn.fhir.jpa.subscription.module.matcher.InMemorySubscriptionMatcher;
|
||||||
import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices;
|
import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices;
|
||||||
import org.hibernate.jpa.HibernatePersistenceProvider;
|
import org.hibernate.jpa.HibernatePersistenceProvider;
|
||||||
import org.hl7.fhir.utilities.graphql.IGraphQLStorageServices;
|
import org.hl7.fhir.utilities.graphql.IGraphQLStorageServices;
|
||||||
import org.springframework.beans.factory.annotation.Autowire;
|
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.context.annotation.*;
|
import org.springframework.context.annotation.*;
|
||||||
import org.springframework.core.env.Environment;
|
import org.springframework.core.env.Environment;
|
||||||
|
@ -38,15 +43,10 @@ import org.springframework.dao.annotation.PersistenceExceptionTranslationPostPro
|
||||||
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
|
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
|
||||||
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
|
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
|
||||||
import org.springframework.scheduling.TaskScheduler;
|
import org.springframework.scheduling.TaskScheduler;
|
||||||
import org.springframework.scheduling.annotation.EnableScheduling;
|
|
||||||
import org.springframework.scheduling.annotation.SchedulingConfigurer;
|
|
||||||
import org.springframework.scheduling.concurrent.ConcurrentTaskScheduler;
|
import org.springframework.scheduling.concurrent.ConcurrentTaskScheduler;
|
||||||
import org.springframework.scheduling.concurrent.ScheduledExecutorFactoryBean;
|
import org.springframework.scheduling.concurrent.ScheduledExecutorFactoryBean;
|
||||||
import org.springframework.scheduling.config.ScheduledTaskRegistrar;
|
|
||||||
import org.springframework.web.socket.config.annotation.WebSocketConfigurer;
|
import org.springframework.web.socket.config.annotation.WebSocketConfigurer;
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* #%L
|
* #%L
|
||||||
* HAPI FHIR JPA Server
|
* HAPI FHIR JPA Server
|
||||||
|
@ -69,7 +69,6 @@ import javax.annotation.Nonnull;
|
||||||
|
|
||||||
|
|
||||||
@Configuration
|
@Configuration
|
||||||
@EnableScheduling
|
|
||||||
@EnableJpaRepositories(basePackages = "ca.uhn.fhir.jpa.dao.data")
|
@EnableJpaRepositories(basePackages = "ca.uhn.fhir.jpa.dao.data")
|
||||||
@ComponentScan(basePackages = "ca.uhn.fhir.jpa", excludeFilters = {
|
@ComponentScan(basePackages = "ca.uhn.fhir.jpa", excludeFilters = {
|
||||||
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, value = BaseConfig.class),
|
@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, value = BaseConfig.class),
|
||||||
|
@ -77,8 +76,7 @@ import javax.annotation.Nonnull;
|
||||||
@ComponentScan.Filter(type = FilterType.REGEX, pattern = ".*\\.test\\..*"),
|
@ComponentScan.Filter(type = FilterType.REGEX, pattern = ".*\\.test\\..*"),
|
||||||
@ComponentScan.Filter(type = FilterType.REGEX, pattern = ".*Test.*"),
|
@ComponentScan.Filter(type = FilterType.REGEX, pattern = ".*Test.*"),
|
||||||
@ComponentScan.Filter(type = FilterType.REGEX, pattern = "ca.uhn.fhir.jpa.subscription.module.standalone.*")})
|
@ComponentScan.Filter(type = FilterType.REGEX, pattern = "ca.uhn.fhir.jpa.subscription.module.standalone.*")})
|
||||||
|
public abstract class BaseConfig {
|
||||||
public abstract class BaseConfig implements SchedulingConfigurer {
|
|
||||||
|
|
||||||
public static final String TASK_EXECUTOR_NAME = "hapiJpaTaskExecutor";
|
public static final String TASK_EXECUTOR_NAME = "hapiJpaTaskExecutor";
|
||||||
public static final String GRAPHQL_PROVIDER_NAME = "myGraphQLProvider";
|
public static final String GRAPHQL_PROVIDER_NAME = "myGraphQLProvider";
|
||||||
|
@ -86,18 +84,12 @@ public abstract class BaseConfig implements SchedulingConfigurer {
|
||||||
@Autowired
|
@Autowired
|
||||||
protected Environment myEnv;
|
protected Environment myEnv;
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void configureTasks(@Nonnull ScheduledTaskRegistrar theTaskRegistrar) {
|
|
||||||
theTaskRegistrar.setTaskScheduler(taskScheduler());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Bean("myDaoRegistry")
|
@Bean("myDaoRegistry")
|
||||||
public DaoRegistry daoRegistry() {
|
public DaoRegistry daoRegistry() {
|
||||||
return new DaoRegistry();
|
return new DaoRegistry();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean(autowire = Autowire.BY_TYPE)
|
@Bean
|
||||||
public DatabaseBackedPagingProvider databaseBackedPagingProvider() {
|
public DatabaseBackedPagingProvider databaseBackedPagingProvider() {
|
||||||
return new DatabaseBackedPagingProvider();
|
return new DatabaseBackedPagingProvider();
|
||||||
}
|
}
|
||||||
|
@ -226,7 +218,7 @@ public abstract class BaseConfig implements SchedulingConfigurer {
|
||||||
* Subclasses may override
|
* Subclasses may override
|
||||||
*/
|
*/
|
||||||
protected boolean isSupported(String theResourceType) {
|
protected boolean isSupported(String theResourceType) {
|
||||||
return daoRegistry().getResourceDaoIfExists(theResourceType) != null;
|
return daoRegistry().getResourceDaoOrNull(theResourceType) != null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
|
@ -241,6 +233,30 @@ public abstract class BaseConfig implements SchedulingConfigurer {
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public ISchedulerService schedulerService() {
|
||||||
|
return new SchedulerServiceImpl();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public AutowiringSpringBeanJobFactory schedulerJobFactory() {
|
||||||
|
return new AutowiringSpringBeanJobFactory();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@Lazy
|
||||||
|
public IBulkDataExportSvc bulkDataExportSvc() {
|
||||||
|
return new BulkDataExportSvcImpl();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@Lazy
|
||||||
|
public BulkDataExportProvider bulkDataExportProvider() {
|
||||||
|
return new BulkDataExportProvider();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
public static void configureEntityManagerFactory(LocalContainerEntityManagerFactoryBean theFactory, FhirContext theCtx) {
|
public static void configureEntityManagerFactory(LocalContainerEntityManagerFactoryBean theFactory, FhirContext theCtx) {
|
||||||
theFactory.setJpaDialect(hibernateJpaDialect(theCtx.getLocalizer()));
|
theFactory.setJpaDialect(hibernateJpaDialect(theCtx.getLocalizer()));
|
||||||
theFactory.setPackagesToScan("ca.uhn.fhir.jpa.model.entity", "ca.uhn.fhir.jpa.entity");
|
theFactory.setPackagesToScan("ca.uhn.fhir.jpa.model.entity", "ca.uhn.fhir.jpa.entity");
|
||||||
|
|
|
@ -80,7 +80,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
@Transactional(propagation = Propagation.REQUIRED)
|
@Transactional(propagation = Propagation.REQUIRED)
|
||||||
public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends BaseHapiFhirDao<T> implements IFhirResourceDao<T> {
|
public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends BaseHapiFhirDao<T> implements IFhirResourceDao<T> {
|
||||||
|
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(BaseHapiFhirResourceDao.class);
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirResourceDao.class);
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
protected PlatformTransactionManager myPlatformTransactionManager;
|
protected PlatformTransactionManager myPlatformTransactionManager;
|
||||||
|
@ -551,10 +551,22 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
myEntityManager.merge(entity);
|
myEntityManager.merge(entity);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void validateExpungeEnabled() {
|
||||||
|
if (!myDaoConfig.isExpungeEnabled()) {
|
||||||
|
throw new MethodNotAllowedException("$expunge is not enabled on this server");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Transactional(propagation = Propagation.NEVER)
|
@Transactional(propagation = Propagation.NEVER)
|
||||||
public ExpungeOutcome expunge(IIdType theId, ExpungeOptions theExpungeOptions, RequestDetails theRequest) {
|
public ExpungeOutcome expunge(IIdType theId, ExpungeOptions theExpungeOptions, RequestDetails theRequest) {
|
||||||
|
validateExpungeEnabled();
|
||||||
|
return forceExpungeInExistingTransaction(theId, theExpungeOptions, theRequest);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
@Transactional(propagation = Propagation.SUPPORTS)
|
||||||
|
public ExpungeOutcome forceExpungeInExistingTransaction(IIdType theId, ExpungeOptions theExpungeOptions, RequestDetails theRequest) {
|
||||||
TransactionTemplate txTemplate = new TransactionTemplate(myPlatformTransactionManager);
|
TransactionTemplate txTemplate = new TransactionTemplate(myPlatformTransactionManager);
|
||||||
|
|
||||||
BaseHasResource entity = txTemplate.execute(t -> readEntity(theId, theRequest));
|
BaseHasResource entity = txTemplate.execute(t -> readEntity(theId, theRequest));
|
||||||
|
|
|
@ -41,6 +41,9 @@ public class DaoRegistry implements ApplicationContextAware, IDaoRegistry {
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private FhirContext myContext;
|
private FhirContext myContext;
|
||||||
|
private volatile Map<String, IFhirResourceDao<?>> myResourceNameToResourceDao;
|
||||||
|
private volatile IFhirSystemDao<?, ?> mySystemDao;
|
||||||
|
private Set<String> mySupportedResourceTypes;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
|
@ -49,11 +52,6 @@ public class DaoRegistry implements ApplicationContextAware, IDaoRegistry {
|
||||||
super();
|
super();
|
||||||
}
|
}
|
||||||
|
|
||||||
private volatile Map<String, IFhirResourceDao<?>> myResourceNameToResourceDao;
|
|
||||||
private volatile IFhirSystemDao<?, ?> mySystemDao;
|
|
||||||
|
|
||||||
private Set<String> mySupportedResourceTypes;
|
|
||||||
|
|
||||||
public void setSupportedResourceTypes(Collection<String> theSupportedResourceTypes) {
|
public void setSupportedResourceTypes(Collection<String> theSupportedResourceTypes) {
|
||||||
HashSet<String> supportedResourceTypes = new HashSet<>();
|
HashSet<String> supportedResourceTypes = new HashSet<>();
|
||||||
if (theSupportedResourceTypes != null) {
|
if (theSupportedResourceTypes != null) {
|
||||||
|
@ -138,7 +136,10 @@ public class DaoRegistry implements ApplicationContextAware, IDaoRegistry {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isResourceTypeSupported(String theResourceType) {
|
public boolean isResourceTypeSupported(String theResourceType) {
|
||||||
return mySupportedResourceTypes == null || mySupportedResourceTypes.contains(theResourceType);
|
if (mySupportedResourceTypes == null) {
|
||||||
|
return getResourceDaoOrNull(theResourceType) != null;
|
||||||
|
}
|
||||||
|
return mySupportedResourceTypes.contains(theResourceType);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void init() {
|
private void init() {
|
||||||
|
|
|
@ -116,6 +116,8 @@ public interface IFhirResourceDao<T extends IBaseResource> extends IDao {
|
||||||
|
|
||||||
ExpungeOutcome expunge(IIdType theIIdType, ExpungeOptions theExpungeOptions, RequestDetails theRequest);
|
ExpungeOutcome expunge(IIdType theIIdType, ExpungeOptions theExpungeOptions, RequestDetails theRequest);
|
||||||
|
|
||||||
|
ExpungeOutcome forceExpungeInExistingTransaction(IIdType theId, ExpungeOptions theExpungeOptions, RequestDetails theRequest);
|
||||||
|
|
||||||
Class<T> getResourceType();
|
Class<T> getResourceType();
|
||||||
|
|
||||||
IBundleProvider history(Date theSince, Date theUntil, RequestDetails theRequestDetails);
|
IBundleProvider history(Date theSince, Date theUntil, RequestDetails theRequestDetails);
|
||||||
|
|
|
@ -185,9 +185,9 @@ public class TransactionProcessor<BUNDLE extends IBaseBundle, BUNDLEENTRY> {
|
||||||
if (theRequestDetails != null) {
|
if (theRequestDetails != null) {
|
||||||
if (outcome.getResource() != null) {
|
if (outcome.getResource() != null) {
|
||||||
String prefer = theRequestDetails.getHeader(Constants.HEADER_PREFER);
|
String prefer = theRequestDetails.getHeader(Constants.HEADER_PREFER);
|
||||||
PreferReturnEnum preferReturn = RestfulServerUtils.parsePreferHeader(null, prefer);
|
PreferHeader.PreferReturnEnum preferReturn = RestfulServerUtils.parsePreferHeader(null, prefer).getReturn();
|
||||||
if (preferReturn != null) {
|
if (preferReturn != null) {
|
||||||
if (preferReturn == PreferReturnEnum.REPRESENTATION) {
|
if (preferReturn == PreferHeader.PreferReturnEnum.REPRESENTATION) {
|
||||||
outcome.fireResourceViewCallbacks();
|
outcome.fireResourceViewCallbacks();
|
||||||
myVersionAdapter.setResource(newEntry, outcome.getResource());
|
myVersionAdapter.setResource(newEntry, outcome.getResource());
|
||||||
}
|
}
|
||||||
|
@ -211,7 +211,10 @@ public class TransactionProcessor<BUNDLE extends IBaseBundle, BUNDLEENTRY> {
|
||||||
String nextReplacementIdPart = nextReplacementId.getValueAsString();
|
String nextReplacementIdPart = nextReplacementId.getValueAsString();
|
||||||
if (isUrn(nextTemporaryId) && nextTemporaryIdPart.length() > URN_PREFIX.length()) {
|
if (isUrn(nextTemporaryId) && nextTemporaryIdPart.length() > URN_PREFIX.length()) {
|
||||||
matchUrl = matchUrl.replace(nextTemporaryIdPart, nextReplacementIdPart);
|
matchUrl = matchUrl.replace(nextTemporaryIdPart, nextReplacementIdPart);
|
||||||
matchUrl = matchUrl.replace(UrlUtil.escapeUrlParam(nextTemporaryIdPart), nextReplacementIdPart);
|
String escapedUrlParam = UrlUtil.escapeUrlParam(nextTemporaryIdPart);
|
||||||
|
if (isNotBlank(escapedUrlParam)) {
|
||||||
|
matchUrl = matchUrl.replace(escapedUrlParam, nextReplacementIdPart);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,34 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao.data;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.entity.BulkExportCollectionEntity;
|
||||||
|
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
|
||||||
|
import org.springframework.data.domain.Slice;
|
||||||
|
import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
|
import org.springframework.data.jpa.repository.Query;
|
||||||
|
import org.springframework.data.repository.query.Param;
|
||||||
|
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2019 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
|
public interface IBulkExportCollectionDao extends JpaRepository<BulkExportCollectionEntity, Long> {
|
||||||
|
// nothing currently
|
||||||
|
}
|
|
@ -0,0 +1,28 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao.data;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
|
||||||
|
import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2019 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
|
public interface IBulkExportCollectionFileDao extends JpaRepository<BulkExportCollectionFileEntity, Long> {
|
||||||
|
// nothing currently
|
||||||
|
}
|
|
@ -0,0 +1,47 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao.data;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.bulk.BulkJobStatusEnum;
|
||||||
|
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
|
||||||
|
import org.springframework.data.domain.Pageable;
|
||||||
|
import org.springframework.data.domain.Slice;
|
||||||
|
import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
|
import org.springframework.data.jpa.repository.Query;
|
||||||
|
import org.springframework.data.repository.query.Param;
|
||||||
|
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2019 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
|
public interface IBulkExportJobDao extends JpaRepository<BulkExportJobEntity, Long> {
|
||||||
|
|
||||||
|
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myJobId = :jobid")
|
||||||
|
Optional<BulkExportJobEntity> findByJobId(@Param("jobid") String theUuid);
|
||||||
|
|
||||||
|
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myStatus = :status")
|
||||||
|
Slice<BulkExportJobEntity> findByStatus(Pageable thePage, @Param("status") BulkJobStatusEnum theSubmitted);
|
||||||
|
|
||||||
|
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myExpiry < :cutoff")
|
||||||
|
Slice<BulkExportJobEntity> findByExpiry(Pageable thePage, @Param("cutoff") Date theCutoff);
|
||||||
|
|
||||||
|
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myRequest = :request AND j.myCreated > :createdAfter AND j.myStatus <> :status")
|
||||||
|
Slice<BulkExportJobEntity> findExistingJob(Pageable thePage, @Param("request") String theRequest, @Param("createdAfter") Date theCreatedAfter, @Param("status") BulkJobStatusEnum theNotStatus);
|
||||||
|
}
|
|
@ -37,11 +37,11 @@ public interface ISearchDao extends JpaRepository<Search, Long> {
|
||||||
@Query("SELECT s FROM Search s LEFT OUTER JOIN FETCH s.myIncludes WHERE s.myUuid = :uuid")
|
@Query("SELECT s FROM Search s LEFT OUTER JOIN FETCH s.myIncludes WHERE s.myUuid = :uuid")
|
||||||
Optional<Search> findByUuidAndFetchIncludes(@Param("uuid") String theUuid);
|
Optional<Search> findByUuidAndFetchIncludes(@Param("uuid") String theUuid);
|
||||||
|
|
||||||
@Query("SELECT s.myId FROM Search s WHERE s.mySearchLastReturned < :cutoff")
|
@Query("SELECT s.myId FROM Search s WHERE (s.mySearchLastReturned < :cutoff) AND (s.myExpiryOrNull IS NULL OR s.myExpiryOrNull < :now)")
|
||||||
Slice<Long> findWhereLastReturnedBefore(@Param("cutoff") Date theCutoff, Pageable thePage);
|
Slice<Long> findWhereLastReturnedBefore(@Param("cutoff") Date theCutoff, @Param("now") Date theNow, Pageable thePage);
|
||||||
|
|
||||||
@Query("SELECT s FROM Search s WHERE s.myResourceType = :type AND mySearchQueryStringHash = :hash AND s.myCreated > :cutoff AND s.myDeleted = false")
|
@Query("SELECT s FROM Search s WHERE s.myResourceType = :type AND mySearchQueryStringHash = :hash AND (s.myCreated > :cutoff) AND s.myDeleted = false")
|
||||||
Collection<Search> find(@Param("type") String theResourceType, @Param("hash") int theHashCode, @Param("cutoff") Date theCreatedCutoff);
|
Collection<Search> findWithCutoffOrExpiry(@Param("type") String theResourceType, @Param("hash") int theHashCode, @Param("cutoff") Date theCreatedCutoff);
|
||||||
|
|
||||||
@Modifying
|
@Modifying
|
||||||
@Query("UPDATE Search s SET s.mySearchLastReturned = :last WHERE s.myId = :pid")
|
@Query("UPDATE Search s SET s.mySearchLastReturned = :last WHERE s.myId = :pid")
|
||||||
|
|
|
@ -49,10 +49,6 @@ public abstract class ExpungeService {
|
||||||
public ExpungeOutcome expunge(String theResourceName, Long theResourceId, Long theVersion, ExpungeOptions theExpungeOptions, RequestDetails theRequest) {
|
public ExpungeOutcome expunge(String theResourceName, Long theResourceId, Long theVersion, ExpungeOptions theExpungeOptions, RequestDetails theRequest) {
|
||||||
ourLog.info("Expunge: ResourceName[{}] Id[{}] Version[{}] Options[{}]", theResourceName, theResourceId, theVersion, theExpungeOptions);
|
ourLog.info("Expunge: ResourceName[{}] Id[{}] Version[{}] Options[{}]", theResourceName, theResourceId, theVersion, theExpungeOptions);
|
||||||
|
|
||||||
if (!myConfig.isExpungeEnabled()) {
|
|
||||||
throw new MethodNotAllowedException("$expunge is not enabled on this server");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (theExpungeOptions.getLimit() < 1) {
|
if (theExpungeOptions.getLimit() < 1) {
|
||||||
throw new InvalidRequestException("Expunge limit may not be less than 1. Received expunge limit " + theExpungeOptions.getLimit() + ".");
|
throw new InvalidRequestException("Expunge limit may not be less than 1. Received expunge limit " + theExpungeOptions.getLimit() + ".");
|
||||||
}
|
}
|
||||||
|
|
|
@ -55,6 +55,16 @@ public class PartitionRunner {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (callableTasks.size() == 1) {
|
||||||
|
try {
|
||||||
|
callableTasks.get(0).call();
|
||||||
|
return;
|
||||||
|
} catch (Exception e) {
|
||||||
|
ourLog.error("Error while expunging.", e);
|
||||||
|
throw new InternalErrorException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
ExecutorService executorService = buildExecutor(callableTasks.size());
|
ExecutorService executorService = buildExecutor(callableTasks.size());
|
||||||
try {
|
try {
|
||||||
List<Future<Void>> futures = executorService.invokeAll(callableTasks);
|
List<Future<Void>> futures = executorService.invokeAll(callableTasks);
|
||||||
|
|
|
@ -0,0 +1,65 @@
|
||||||
|
package ca.uhn.fhir.jpa.entity;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||||
|
|
||||||
|
import javax.persistence.*;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collection;
|
||||||
|
|
||||||
|
@Entity
|
||||||
|
@Table(name = "HFJ_BLK_EXPORT_COLLECTION")
|
||||||
|
public class BulkExportCollectionEntity {
|
||||||
|
|
||||||
|
@Id
|
||||||
|
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKEXCOL_PID")
|
||||||
|
@SequenceGenerator(name = "SEQ_BLKEXCOL_PID", sequenceName = "SEQ_BLKEXCOL_PID")
|
||||||
|
@Column(name = "PID")
|
||||||
|
private Long myId;
|
||||||
|
@ManyToOne
|
||||||
|
@JoinColumn(name = "JOB_PID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name="FK_BLKEXCOL_JOB"))
|
||||||
|
private BulkExportJobEntity myJob;
|
||||||
|
@Column(name = "RES_TYPE", length = ResourceTable.RESTYPE_LEN, nullable = false)
|
||||||
|
private String myResourceType;
|
||||||
|
@Column(name = "TYPE_FILTER", length = 1000, nullable = true)
|
||||||
|
private String myFilter;
|
||||||
|
@Version
|
||||||
|
@Column(name = "OPTLOCK", nullable = false)
|
||||||
|
private int myVersion;
|
||||||
|
@OneToMany(fetch = FetchType.LAZY, mappedBy = "myCollection")
|
||||||
|
private Collection<BulkExportCollectionFileEntity> myFiles;
|
||||||
|
|
||||||
|
public void setJob(BulkExportJobEntity theJob) {
|
||||||
|
myJob = theJob;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getResourceType() {
|
||||||
|
return myResourceType;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setResourceType(String theResourceType) {
|
||||||
|
myResourceType = theResourceType;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getFilter() {
|
||||||
|
return myFilter;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setFilter(String theFilter) {
|
||||||
|
myFilter = theFilter;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getVersion() {
|
||||||
|
return myVersion;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setVersion(int theVersion) {
|
||||||
|
myVersion = theVersion;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Collection<BulkExportCollectionFileEntity> getFiles() {
|
||||||
|
if (myFiles == null) {
|
||||||
|
myFiles = new ArrayList<>();
|
||||||
|
}
|
||||||
|
return myFiles;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,33 @@
|
||||||
|
package ca.uhn.fhir.jpa.entity;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||||
|
|
||||||
|
import javax.persistence.*;
|
||||||
|
|
||||||
|
@Entity
|
||||||
|
@Table(name = "HFJ_BLK_EXPORT_COLFILE")
|
||||||
|
public class BulkExportCollectionFileEntity {
|
||||||
|
|
||||||
|
@Id
|
||||||
|
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKEXCOLFILE_PID")
|
||||||
|
@SequenceGenerator(name = "SEQ_BLKEXCOLFILE_PID", sequenceName = "SEQ_BLKEXCOLFILE_PID")
|
||||||
|
@Column(name = "PID")
|
||||||
|
private Long myId;
|
||||||
|
@ManyToOne(fetch = FetchType.LAZY)
|
||||||
|
@JoinColumn(name = "COLLECTION_PID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name="FK_BLKEXCOLFILE_COLLECT"))
|
||||||
|
private BulkExportCollectionEntity myCollection;
|
||||||
|
@Column(name = "RES_ID", length = ForcedId.MAX_FORCED_ID_LENGTH, nullable = false)
|
||||||
|
private String myResourceId;
|
||||||
|
|
||||||
|
public void setCollection(BulkExportCollectionEntity theCollection) {
|
||||||
|
myCollection = theCollection;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setResource(String theResourceId) {
|
||||||
|
myResourceId = theResourceId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getResourceId() {
|
||||||
|
return myResourceId;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,157 @@
|
||||||
|
package ca.uhn.fhir.jpa.entity;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.bulk.BulkJobStatusEnum;
|
||||||
|
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||||
|
import org.hl7.fhir.r5.model.InstantType;
|
||||||
|
|
||||||
|
import javax.persistence.*;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.Date;
|
||||||
|
|
||||||
|
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
|
|
||||||
|
@Entity
|
||||||
|
@Table(name = "HFJ_BLK_EXPORT_JOB", uniqueConstraints = {
|
||||||
|
@UniqueConstraint(name = "IDX_BLKEX_JOB_ID", columnNames = "JOB_ID")
|
||||||
|
}, indexes = {
|
||||||
|
@Index(name = "IDX_BLKEX_EXPTIME", columnList = "EXP_TIME")
|
||||||
|
})
|
||||||
|
public class BulkExportJobEntity {
|
||||||
|
|
||||||
|
public static final int REQUEST_LENGTH = 500;
|
||||||
|
public static final int STATUS_MESSAGE_LEN = 500;
|
||||||
|
@Id
|
||||||
|
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKEXJOB_PID")
|
||||||
|
@SequenceGenerator(name = "SEQ_BLKEXJOB_PID", sequenceName = "SEQ_BLKEXJOB_PID")
|
||||||
|
@Column(name = "PID")
|
||||||
|
private Long myId;
|
||||||
|
|
||||||
|
@Column(name = "JOB_ID", length = Search.UUID_COLUMN_LENGTH, nullable = false)
|
||||||
|
private String myJobId;
|
||||||
|
|
||||||
|
@Enumerated(EnumType.STRING)
|
||||||
|
@Column(name = "JOB_STATUS", length = 10, nullable = false)
|
||||||
|
private BulkJobStatusEnum myStatus;
|
||||||
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
|
@Column(name = "CREATED_TIME", nullable = false)
|
||||||
|
private Date myCreated;
|
||||||
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
|
@Column(name = "STATUS_TIME", nullable = false)
|
||||||
|
private Date myStatusTime;
|
||||||
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
|
@Column(name = "EXP_TIME", nullable = false)
|
||||||
|
private Date myExpiry;
|
||||||
|
@Column(name = "REQUEST", nullable = false, length = REQUEST_LENGTH)
|
||||||
|
private String myRequest;
|
||||||
|
@OneToMany(fetch = FetchType.LAZY, mappedBy = "myJob")
|
||||||
|
private Collection<BulkExportCollectionEntity> myCollections;
|
||||||
|
@Version
|
||||||
|
@Column(name = "OPTLOCK", nullable = false)
|
||||||
|
private int myVersion;
|
||||||
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
|
@Column(name = "EXP_SINCE", nullable = true)
|
||||||
|
private Date mySince;
|
||||||
|
@Column(name = "STATUS_MESSAGE", nullable = true, length = STATUS_MESSAGE_LEN)
|
||||||
|
private String myStatusMessage;
|
||||||
|
|
||||||
|
public Date getCreated() {
|
||||||
|
return myCreated;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCreated(Date theCreated) {
|
||||||
|
myCreated = theCreated;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getStatusMessage() {
|
||||||
|
return myStatusMessage;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setStatusMessage(String theStatusMessage) {
|
||||||
|
myStatusMessage = theStatusMessage;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getRequest() {
|
||||||
|
return myRequest;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRequest(String theRequest) {
|
||||||
|
myRequest = theRequest;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setExpiry(Date theExpiry) {
|
||||||
|
myExpiry = theExpiry;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Collection<BulkExportCollectionEntity> getCollections() {
|
||||||
|
if (myCollections == null) {
|
||||||
|
myCollections = new ArrayList<>();
|
||||||
|
}
|
||||||
|
return myCollections;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getJobId() {
|
||||||
|
return myJobId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setJobId(String theJobId) {
|
||||||
|
myJobId = theJobId;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
ToStringBuilder b = new ToStringBuilder(this);
|
||||||
|
if (isNotBlank(myJobId)) {
|
||||||
|
b.append("jobId", myJobId);
|
||||||
|
}
|
||||||
|
if (myStatus != null) {
|
||||||
|
b.append("status", myStatus + " " + new InstantType(myStatusTime).getValueAsString());
|
||||||
|
}
|
||||||
|
b.append("created", new InstantType(myExpiry).getValueAsString());
|
||||||
|
b.append("expiry", new InstantType(myExpiry).getValueAsString());
|
||||||
|
b.append("request", myRequest);
|
||||||
|
b.append("since", mySince);
|
||||||
|
if (isNotBlank(myStatusMessage)) {
|
||||||
|
b.append("statusMessage", myStatusMessage);
|
||||||
|
}
|
||||||
|
return b.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
public BulkJobStatusEnum getStatus() {
|
||||||
|
return myStatus;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setStatus(BulkJobStatusEnum theStatus) {
|
||||||
|
if (myStatus != theStatus) {
|
||||||
|
myStatusTime = new Date();
|
||||||
|
myStatus = theStatus;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public Date getStatusTime() {
|
||||||
|
return myStatusTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getVersion() {
|
||||||
|
return myVersion;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setVersion(int theVersion) {
|
||||||
|
myVersion = theVersion;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Long getId() {
|
||||||
|
return myId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Date getSince() {
|
||||||
|
if (mySince != null) {
|
||||||
|
return new Date(mySince.getTime());
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setSince(Date theSince) {
|
||||||
|
mySince = theSince;
|
||||||
|
}
|
||||||
|
}
|
|
@ -59,6 +59,9 @@ public class Search implements ICachedSearchDetails, Serializable {
|
||||||
private Integer myFailureCode;
|
private Integer myFailureCode;
|
||||||
@Column(name = "FAILURE_MESSAGE", length = FAILURE_MESSAGE_LENGTH, nullable = true)
|
@Column(name = "FAILURE_MESSAGE", length = FAILURE_MESSAGE_LENGTH, nullable = true)
|
||||||
private String myFailureMessage;
|
private String myFailureMessage;
|
||||||
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
|
@Column(name = "EXPIRY_OR_NULL", nullable = true)
|
||||||
|
private Date myExpiryOrNull;
|
||||||
@Id
|
@Id
|
||||||
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SEARCH")
|
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SEARCH")
|
||||||
@SequenceGenerator(name = "SEQ_SEARCH", sequenceName = "SEQ_SEARCH")
|
@SequenceGenerator(name = "SEQ_SEARCH", sequenceName = "SEQ_SEARCH")
|
||||||
|
@ -108,7 +111,6 @@ public class Search implements ICachedSearchDetails, Serializable {
|
||||||
@Lob
|
@Lob
|
||||||
@Column(name = "SEARCH_PARAM_MAP", nullable = true)
|
@Column(name = "SEARCH_PARAM_MAP", nullable = true)
|
||||||
private byte[] mySearchParameterMap;
|
private byte[] mySearchParameterMap;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
*/
|
*/
|
||||||
|
@ -116,6 +118,14 @@ public class Search implements ICachedSearchDetails, Serializable {
|
||||||
super();
|
super();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Date getExpiryOrNull() {
|
||||||
|
return myExpiryOrNull;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setExpiryOrNull(Date theExpiryOrNull) {
|
||||||
|
myExpiryOrNull = theExpiryOrNull;
|
||||||
|
}
|
||||||
|
|
||||||
public Boolean getDeleted() {
|
public Boolean getDeleted() {
|
||||||
return myDeleted;
|
return myDeleted;
|
||||||
}
|
}
|
||||||
|
@ -230,11 +240,15 @@ public class Search implements ICachedSearchDetails, Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setSearchQueryString(String theSearchQueryString) {
|
public void setSearchQueryString(String theSearchQueryString) {
|
||||||
if (theSearchQueryString != null && theSearchQueryString.length() > MAX_SEARCH_QUERY_STRING) {
|
if (theSearchQueryString == null || theSearchQueryString.length() > MAX_SEARCH_QUERY_STRING) {
|
||||||
mySearchQueryString = null;
|
// We want this field to always have a wide distribution of values in order
|
||||||
|
// to avoid optimizers avoiding using it if it has lots of nulls, so in the
|
||||||
|
// case of null, just put a value that will never be hit
|
||||||
|
mySearchQueryString = UUID.randomUUID().toString();
|
||||||
} else {
|
} else {
|
||||||
mySearchQueryString = theSearchQueryString;
|
mySearchQueryString = theSearchQueryString;
|
||||||
}
|
}
|
||||||
|
mySearchQueryStringHash = mySearchQueryString.hashCode();
|
||||||
}
|
}
|
||||||
|
|
||||||
public SearchTypeEnum getSearchType() {
|
public SearchTypeEnum getSearchType() {
|
||||||
|
|
|
@ -0,0 +1,29 @@
|
||||||
|
package ca.uhn.fhir.jpa.sched;
|
||||||
|
|
||||||
|
import org.quartz.spi.TriggerFiredBundle;
|
||||||
|
import org.springframework.beans.factory.config.AutowireCapableBeanFactory;
|
||||||
|
import org.springframework.context.ApplicationContext;
|
||||||
|
import org.springframework.context.ApplicationContextAware;
|
||||||
|
import org.springframework.scheduling.quartz.SpringBeanJobFactory;
|
||||||
|
|
||||||
|
public class AutowiringSpringBeanJobFactory extends SpringBeanJobFactory implements ApplicationContextAware {
|
||||||
|
|
||||||
|
private transient AutowireCapableBeanFactory myBeanFactory;
|
||||||
|
private ApplicationContext myAppCtx;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setApplicationContext(final ApplicationContext theApplicationContext) {
|
||||||
|
myAppCtx = theApplicationContext;
|
||||||
|
myBeanFactory = theApplicationContext.getAutowireCapableBeanFactory();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Object createJobInstance(final TriggerFiredBundle bundle) throws Exception {
|
||||||
|
Object job = super.createJobInstance(bundle);
|
||||||
|
myBeanFactory.autowireBean(job);
|
||||||
|
if (job instanceof ApplicationContextAware) {
|
||||||
|
((ApplicationContextAware) job).setApplicationContext(myAppCtx);
|
||||||
|
}
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,18 @@
|
||||||
|
package ca.uhn.fhir.jpa.sched;
|
||||||
|
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
|
||||||
|
|
||||||
|
import javax.annotation.PostConstruct;
|
||||||
|
|
||||||
|
public class QuartzTableSeeder {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private LocalContainerEntityManagerFactoryBean myEntityManagerFactory;
|
||||||
|
|
||||||
|
@PostConstruct
|
||||||
|
public void start() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,559 @@
|
||||||
|
package ca.uhn.fhir.jpa.sched;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||||
|
import com.google.common.collect.Sets;
|
||||||
|
import org.apache.commons.lang3.Validate;
|
||||||
|
import org.quartz.Calendar;
|
||||||
|
import org.quartz.*;
|
||||||
|
import org.quartz.impl.JobDetailImpl;
|
||||||
|
import org.quartz.impl.StdSchedulerFactory;
|
||||||
|
import org.quartz.impl.matchers.GroupMatcher;
|
||||||
|
import org.quartz.spi.JobFactory;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.context.event.ContextRefreshedEvent;
|
||||||
|
import org.springframework.context.event.EventListener;
|
||||||
|
import org.springframework.core.env.Environment;
|
||||||
|
|
||||||
|
import javax.annotation.PostConstruct;
|
||||||
|
import javax.annotation.PreDestroy;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import static org.quartz.impl.StdSchedulerFactory.PROP_SCHED_INSTANCE_NAME;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This class provides task scheduling for the entire module using the Quartz library.
|
||||||
|
* Inside here, we have two schedulers:
|
||||||
|
* <ul>
|
||||||
|
* <li>
|
||||||
|
* The <b>Local Scheduler</b> handles tasks that need to execute locally. This
|
||||||
|
* typically means things that should happen on all nodes in a clustered
|
||||||
|
* environment.
|
||||||
|
* </li>
|
||||||
|
* <li>
|
||||||
|
* The <b>Cluster Scheduler</b> handles tasks that are distributed and should be
|
||||||
|
* handled by only one node in the cluster (assuming a clustered server). If the
|
||||||
|
* server is not clustered, this scheduler acts the same way as the
|
||||||
|
* local scheduler.
|
||||||
|
* </li>
|
||||||
|
* </ul>
|
||||||
|
*/
|
||||||
|
public class SchedulerServiceImpl implements ISchedulerService {
|
||||||
|
public static final String SCHEDULING_DISABLED = "scheduling_disabled";
|
||||||
|
public static final String SCHEDULING_DISABLED_EQUALS_TRUE = SCHEDULING_DISABLED + "=true";
|
||||||
|
|
||||||
|
private static final Logger ourLog = LoggerFactory.getLogger(SchedulerServiceImpl.class);
|
||||||
|
private static int ourNextSchedulerId = 0;
|
||||||
|
private Scheduler myLocalScheduler;
|
||||||
|
private Scheduler myClusteredScheduler;
|
||||||
|
private String myThreadNamePrefix;
|
||||||
|
private boolean myLocalSchedulingEnabled;
|
||||||
|
private boolean myClusteredSchedulingEnabled;
|
||||||
|
@Autowired
|
||||||
|
private AutowiringSpringBeanJobFactory mySpringBeanJobFactory;
|
||||||
|
private AtomicBoolean myStopping = new AtomicBoolean(false);
|
||||||
|
@Autowired
|
||||||
|
private Environment myEnvironment;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructor
|
||||||
|
*/
|
||||||
|
public SchedulerServiceImpl() {
|
||||||
|
setThreadNamePrefix("hapi-fhir-jpa-scheduler");
|
||||||
|
setLocalSchedulingEnabled(true);
|
||||||
|
setClusteredSchedulingEnabled(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isLocalSchedulingEnabled() {
|
||||||
|
return myLocalSchedulingEnabled;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setLocalSchedulingEnabled(boolean theLocalSchedulingEnabled) {
|
||||||
|
myLocalSchedulingEnabled = theLocalSchedulingEnabled;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isClusteredSchedulingEnabled() {
|
||||||
|
return myClusteredSchedulingEnabled;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setClusteredSchedulingEnabled(boolean theClusteredSchedulingEnabled) {
|
||||||
|
myClusteredSchedulingEnabled = theClusteredSchedulingEnabled;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getThreadNamePrefix() {
|
||||||
|
return myThreadNamePrefix;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setThreadNamePrefix(String theThreadNamePrefix) {
|
||||||
|
myThreadNamePrefix = theThreadNamePrefix;
|
||||||
|
}
|
||||||
|
|
||||||
|
@PostConstruct
|
||||||
|
public void start() throws SchedulerException {
|
||||||
|
myLocalScheduler = createLocalScheduler();
|
||||||
|
myClusteredScheduler = createClusteredScheduler();
|
||||||
|
myStopping.set(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* We defer startup of executing started tasks until we're sure we're ready for it
|
||||||
|
* and the startup is completely done
|
||||||
|
*/
|
||||||
|
@EventListener
|
||||||
|
public void contextStarted(ContextRefreshedEvent theEvent) throws SchedulerException {
|
||||||
|
try {
|
||||||
|
ourLog.info("Starting task schedulers for context {}", theEvent != null ? theEvent.getApplicationContext().getId() : "null");
|
||||||
|
if (myLocalScheduler != null) {
|
||||||
|
myLocalScheduler.start();
|
||||||
|
}
|
||||||
|
if (myClusteredScheduler != null) {
|
||||||
|
myClusteredScheduler.start();
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
ourLog.error("Failed to start context", e);
|
||||||
|
throw new SchedulerException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Scheduler createLocalScheduler() throws SchedulerException {
|
||||||
|
if (!isLocalSchedulingEnabled() || isSchedulingDisabledForUnitTests()) {
|
||||||
|
return new NullScheduler();
|
||||||
|
}
|
||||||
|
Properties localProperties = new Properties();
|
||||||
|
localProperties.setProperty(PROP_SCHED_INSTANCE_NAME, "local-" + ourNextSchedulerId++);
|
||||||
|
quartzPropertiesCommon(localProperties);
|
||||||
|
quartzPropertiesLocal(localProperties);
|
||||||
|
StdSchedulerFactory factory = new StdSchedulerFactory();
|
||||||
|
factory.initialize(localProperties);
|
||||||
|
Scheduler scheduler = factory.getScheduler();
|
||||||
|
configureSchedulerCommon(scheduler);
|
||||||
|
scheduler.standby();
|
||||||
|
return scheduler;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Scheduler createClusteredScheduler() throws SchedulerException {
|
||||||
|
if (!isClusteredSchedulingEnabled() || isSchedulingDisabledForUnitTests()) {
|
||||||
|
return new NullScheduler();
|
||||||
|
}
|
||||||
|
Properties clusteredProperties = new Properties();
|
||||||
|
clusteredProperties.setProperty(PROP_SCHED_INSTANCE_NAME, "clustered-" + ourNextSchedulerId++);
|
||||||
|
quartzPropertiesCommon(clusteredProperties);
|
||||||
|
quartzPropertiesClustered(clusteredProperties);
|
||||||
|
StdSchedulerFactory factory = new StdSchedulerFactory();
|
||||||
|
factory.initialize(clusteredProperties);
|
||||||
|
Scheduler scheduler = factory.getScheduler();
|
||||||
|
configureSchedulerCommon(scheduler);
|
||||||
|
scheduler.standby();
|
||||||
|
return scheduler;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void configureSchedulerCommon(Scheduler theScheduler) throws SchedulerException {
|
||||||
|
theScheduler.setJobFactory(mySpringBeanJobFactory);
|
||||||
|
}
|
||||||
|
|
||||||
|
@PreDestroy
|
||||||
|
public void stop() throws SchedulerException {
|
||||||
|
ourLog.info("Shutting down task scheduler...");
|
||||||
|
|
||||||
|
myStopping.set(true);
|
||||||
|
myLocalScheduler.shutdown(true);
|
||||||
|
myClusteredScheduler.shutdown(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void purgeAllScheduledJobsForUnitTest() throws SchedulerException {
|
||||||
|
myLocalScheduler.clear();
|
||||||
|
myClusteredScheduler.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void logStatus() {
|
||||||
|
try {
|
||||||
|
Set<JobKey> keys = myLocalScheduler.getJobKeys(GroupMatcher.anyGroup());
|
||||||
|
String keysString = keys.stream().map(t -> t.getName()).collect(Collectors.joining(", "));
|
||||||
|
ourLog.info("Local scheduler has jobs: {}", keysString);
|
||||||
|
|
||||||
|
keys = myClusteredScheduler.getJobKeys(GroupMatcher.anyGroup());
|
||||||
|
keysString = keys.stream().map(t -> t.getName()).collect(Collectors.joining(", "));
|
||||||
|
ourLog.info("Clustered scheduler has jobs: {}", keysString);
|
||||||
|
} catch (SchedulerException e) {
|
||||||
|
throw new InternalErrorException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void scheduleFixedDelay(long theIntervalMillis, boolean theClusteredTask, ScheduledJobDefinition theJobDefinition) {
|
||||||
|
Validate.isTrue(theIntervalMillis >= 100);
|
||||||
|
|
||||||
|
Validate.notNull(theJobDefinition);
|
||||||
|
Validate.notNull(theJobDefinition.getJobClass());
|
||||||
|
Validate.notBlank(theJobDefinition.getId());
|
||||||
|
|
||||||
|
JobKey jobKey = new JobKey(theJobDefinition.getId());
|
||||||
|
|
||||||
|
JobDetailImpl jobDetail = new NonConcurrentJobDetailImpl();
|
||||||
|
jobDetail.setJobClass(theJobDefinition.getJobClass());
|
||||||
|
jobDetail.setKey(jobKey);
|
||||||
|
jobDetail.setName(theJobDefinition.getId());
|
||||||
|
jobDetail.setJobDataMap(new JobDataMap(theJobDefinition.getJobData()));
|
||||||
|
|
||||||
|
ScheduleBuilder<? extends Trigger> schedule = SimpleScheduleBuilder
|
||||||
|
.simpleSchedule()
|
||||||
|
.withIntervalInMilliseconds(theIntervalMillis)
|
||||||
|
.repeatForever();
|
||||||
|
|
||||||
|
Trigger trigger = TriggerBuilder.newTrigger()
|
||||||
|
.forJob(jobDetail)
|
||||||
|
.startNow()
|
||||||
|
.withSchedule(schedule)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
Set<? extends Trigger> triggers = Sets.newHashSet(trigger);
|
||||||
|
try {
|
||||||
|
Scheduler scheduler;
|
||||||
|
if (theClusteredTask) {
|
||||||
|
scheduler = myClusteredScheduler;
|
||||||
|
} else {
|
||||||
|
scheduler = myLocalScheduler;
|
||||||
|
}
|
||||||
|
scheduler.scheduleJob(jobDetail, triggers, true);
|
||||||
|
} catch (SchedulerException e) {
|
||||||
|
ourLog.error("Failed to schedule job", e);
|
||||||
|
throw new InternalErrorException(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isStopping() {
|
||||||
|
return myStopping.get();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Properties for the local scheduler (see the class docs to learn what this means)
|
||||||
|
*/
|
||||||
|
protected void quartzPropertiesLocal(Properties theProperties) {
|
||||||
|
// nothing
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Properties for the cluster scheduler (see the class docs to learn what this means)
|
||||||
|
*/
|
||||||
|
protected void quartzPropertiesClustered(Properties theProperties) {
|
||||||
|
// theProperties.put("org.quartz.jobStore.tablePrefix", "QRTZHFJC_");
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void quartzPropertiesCommon(Properties theProperties) {
|
||||||
|
theProperties.put("org.quartz.threadPool.threadCount", "4");
|
||||||
|
theProperties.put("org.quartz.threadPool.threadNamePrefix", getThreadNamePrefix() + "-" + theProperties.get(PROP_SCHED_INSTANCE_NAME));
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean isSchedulingDisabledForUnitTests() {
|
||||||
|
String schedulingDisabled = myEnvironment.getProperty(SCHEDULING_DISABLED);
|
||||||
|
return "true".equals(schedulingDisabled);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class NonConcurrentJobDetailImpl extends JobDetailImpl {
|
||||||
|
private static final long serialVersionUID = 5716197221121989740L;
|
||||||
|
|
||||||
|
// All HAPI FHIR jobs shouldn't allow concurrent execution
|
||||||
|
@Override
|
||||||
|
public boolean isConcurrentExectionDisallowed() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class NullScheduler implements Scheduler {
|
||||||
|
@Override
|
||||||
|
public String getSchedulerName() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getSchedulerInstanceId() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public SchedulerContext getContext() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void start() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void startDelayed(int seconds) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isStarted() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void standby() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isInStandbyMode() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void shutdown() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void shutdown(boolean waitForJobsToComplete) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isShutdown() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public SchedulerMetaData getMetaData() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<JobExecutionContext> getCurrentlyExecutingJobs() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setJobFactory(JobFactory factory) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ListenerManager getListenerManager() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Date scheduleJob(JobDetail jobDetail, Trigger trigger) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Date scheduleJob(Trigger trigger) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void scheduleJobs(Map<JobDetail, Set<? extends Trigger>> triggersAndJobs, boolean replace) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void scheduleJob(JobDetail jobDetail, Set<? extends Trigger> triggersForJob, boolean replace) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean unscheduleJob(TriggerKey triggerKey) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean unscheduleJobs(List<TriggerKey> triggerKeys) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Date rescheduleJob(TriggerKey triggerKey, Trigger newTrigger) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void addJob(JobDetail jobDetail, boolean replace) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void addJob(JobDetail jobDetail, boolean replace, boolean storeNonDurableWhileAwaitingScheduling) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean deleteJob(JobKey jobKey) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean deleteJobs(List<JobKey> jobKeys) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void triggerJob(JobKey jobKey) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void triggerJob(JobKey jobKey, JobDataMap data) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void pauseJob(JobKey jobKey) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void pauseJobs(GroupMatcher<JobKey> matcher) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void pauseTrigger(TriggerKey triggerKey) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void pauseTriggers(GroupMatcher<TriggerKey> matcher) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void resumeJob(JobKey jobKey) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void resumeJobs(GroupMatcher<JobKey> matcher) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void resumeTrigger(TriggerKey triggerKey) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void resumeTriggers(GroupMatcher<TriggerKey> matcher) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void pauseAll() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void resumeAll() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<String> getJobGroupNames() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Set<JobKey> getJobKeys(GroupMatcher<JobKey> matcher) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<? extends Trigger> getTriggersOfJob(JobKey jobKey) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<String> getTriggerGroupNames() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Set<TriggerKey> getTriggerKeys(GroupMatcher<TriggerKey> matcher) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Set<String> getPausedTriggerGroups() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public JobDetail getJobDetail(JobKey jobKey) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Trigger getTrigger(TriggerKey triggerKey) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Trigger.TriggerState getTriggerState(TriggerKey triggerKey) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void resetTriggerFromErrorState(TriggerKey triggerKey) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void addCalendar(String calName, Calendar calendar, boolean replace, boolean updateTriggers) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean deleteCalendar(String calName) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Calendar getCalendar(String calName) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<String> getCalendarNames() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean interrupt(JobKey jobKey) throws UnableToInterruptJobException {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean interrupt(String fireInstanceId) throws UnableToInterruptJobException {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean checkExists(JobKey jobKey) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean checkExists(TriggerKey triggerKey) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void clear() {
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -21,12 +21,17 @@ package ca.uhn.fhir.jpa.search;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
||||||
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
|
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
|
||||||
|
import org.quartz.Job;
|
||||||
|
import org.quartz.JobExecutionContext;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.scheduling.annotation.Scheduled;
|
|
||||||
import org.springframework.transaction.annotation.Propagation;
|
import org.springframework.transaction.annotation.Propagation;
|
||||||
import org.springframework.transaction.annotation.Transactional;
|
import org.springframework.transaction.annotation.Transactional;
|
||||||
|
|
||||||
|
import javax.annotation.PostConstruct;
|
||||||
|
|
||||||
import static ca.uhn.fhir.jpa.search.cache.DatabaseSearchCacheSvcImpl.DEFAULT_CUTOFF_SLACK;
|
import static ca.uhn.fhir.jpa.search.cache.DatabaseSearchCacheSvcImpl.DEFAULT_CUTOFF_SLACK;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -43,6 +48,8 @@ public class StaleSearchDeletingSvcImpl implements IStaleSearchDeletingSvc {
|
||||||
private DaoConfig myDaoConfig;
|
private DaoConfig myDaoConfig;
|
||||||
@Autowired
|
@Autowired
|
||||||
private ISearchCacheSvc mySearchCacheSvc;
|
private ISearchCacheSvc mySearchCacheSvc;
|
||||||
|
@Autowired
|
||||||
|
private ISchedulerService mySchedulerService;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Transactional(propagation = Propagation.NEVER)
|
@Transactional(propagation = Propagation.NEVER)
|
||||||
|
@ -50,7 +57,14 @@ public class StaleSearchDeletingSvcImpl implements IStaleSearchDeletingSvc {
|
||||||
mySearchCacheSvc.pollForStaleSearchesAndDeleteThem();
|
mySearchCacheSvc.pollForStaleSearchesAndDeleteThem();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Scheduled(fixedDelay = DEFAULT_CUTOFF_SLACK)
|
@PostConstruct
|
||||||
|
public void registerScheduledJob() {
|
||||||
|
ScheduledJobDefinition jobDetail = new ScheduledJobDefinition();
|
||||||
|
jobDetail.setId(StaleSearchDeletingSvcImpl.class.getName());
|
||||||
|
jobDetail.setJobClass(StaleSearchDeletingSvcImpl.SubmitJob.class);
|
||||||
|
mySchedulerService.scheduleFixedDelay(DEFAULT_CUTOFF_SLACK, true, jobDetail);
|
||||||
|
}
|
||||||
|
|
||||||
@Transactional(propagation = Propagation.NEVER)
|
@Transactional(propagation = Propagation.NEVER)
|
||||||
@Override
|
@Override
|
||||||
public synchronized void schedulePollForStaleSearches() {
|
public synchronized void schedulePollForStaleSearches() {
|
||||||
|
@ -58,4 +72,14 @@ public class StaleSearchDeletingSvcImpl implements IStaleSearchDeletingSvc {
|
||||||
pollForStaleSearchesAndDeleteThem();
|
pollForStaleSearchesAndDeleteThem();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static class SubmitJob implements Job {
|
||||||
|
@Autowired
|
||||||
|
private IStaleSearchDeletingSvc myTarget;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void execute(JobExecutionContext theContext) {
|
||||||
|
myTarget.schedulePollForStaleSearches();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
package ca.uhn.fhir.jpa.search;
|
||||||
|
|
||||||
|
public class WarmSearchDefinition {
|
||||||
|
|
||||||
|
private String mySearchUrl;
|
||||||
|
private long myRefreshPeriodMillis;
|
||||||
|
|
||||||
|
}
|
|
@ -21,12 +21,16 @@ package ca.uhn.fhir.jpa.search.cache;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.entity.Search;
|
import ca.uhn.fhir.jpa.entity.Search;
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
||||||
import org.apache.commons.lang3.time.DateUtils;
|
import org.apache.commons.lang3.time.DateUtils;
|
||||||
|
import org.quartz.Job;
|
||||||
|
import org.quartz.JobExecutionContext;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.scheduling.annotation.Scheduled;
|
|
||||||
import org.springframework.transaction.PlatformTransactionManager;
|
import org.springframework.transaction.PlatformTransactionManager;
|
||||||
import org.springframework.transaction.support.TransactionTemplate;
|
import org.springframework.transaction.support.TransactionTemplate;
|
||||||
|
|
||||||
|
import javax.annotation.PostConstruct;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -36,6 +40,8 @@ public abstract class BaseSearchCacheSvcImpl implements ISearchCacheSvc {
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private PlatformTransactionManager myTxManager;
|
private PlatformTransactionManager myTxManager;
|
||||||
|
@Autowired
|
||||||
|
private ISchedulerService mySchedulerService;
|
||||||
|
|
||||||
private ConcurrentHashMap<Long, Date> myUnsyncedLastUpdated = new ConcurrentHashMap<>();
|
private ConcurrentHashMap<Long, Date> myUnsyncedLastUpdated = new ConcurrentHashMap<>();
|
||||||
|
|
||||||
|
@ -44,11 +50,18 @@ public abstract class BaseSearchCacheSvcImpl implements ISearchCacheSvc {
|
||||||
myUnsyncedLastUpdated.put(theSearch.getId(), theDate);
|
myUnsyncedLastUpdated.put(theSearch.getId(), theDate);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@PostConstruct
|
||||||
|
public void registerScheduledJob() {
|
||||||
|
ScheduledJobDefinition jobDetail = new ScheduledJobDefinition();
|
||||||
|
jobDetail.setId(BaseSearchCacheSvcImpl.class.getName());
|
||||||
|
jobDetail.setJobClass(BaseSearchCacheSvcImpl.SubmitJob.class);
|
||||||
|
mySchedulerService.scheduleFixedDelay(10 * DateUtils.MILLIS_PER_SECOND, false, jobDetail);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Scheduled(fixedDelay = 10 * DateUtils.MILLIS_PER_SECOND)
|
|
||||||
public void flushLastUpdated() {
|
public void flushLastUpdated() {
|
||||||
TransactionTemplate txTemplate = new TransactionTemplate(myTxManager);
|
TransactionTemplate txTemplate = new TransactionTemplate(myTxManager);
|
||||||
txTemplate.execute(t->{
|
txTemplate.execute(t -> {
|
||||||
for (Iterator<Map.Entry<Long, Date>> iter = myUnsyncedLastUpdated.entrySet().iterator(); iter.hasNext(); ) {
|
for (Iterator<Map.Entry<Long, Date>> iter = myUnsyncedLastUpdated.entrySet().iterator(); iter.hasNext(); ) {
|
||||||
Map.Entry<Long, Date> next = iter.next();
|
Map.Entry<Long, Date> next = iter.next();
|
||||||
flushLastUpdated(next.getKey(), next.getValue());
|
flushLastUpdated(next.getKey(), next.getValue());
|
||||||
|
@ -60,5 +73,15 @@ public abstract class BaseSearchCacheSvcImpl implements ISearchCacheSvc {
|
||||||
|
|
||||||
protected abstract void flushLastUpdated(Long theSearchId, Date theLastUpdated);
|
protected abstract void flushLastUpdated(Long theSearchId, Date theLastUpdated);
|
||||||
|
|
||||||
|
public static class SubmitJob implements Job {
|
||||||
|
@Autowired
|
||||||
|
private ISearchCacheSvc myTarget;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void execute(JobExecutionContext theContext) {
|
||||||
|
myTarget.flushLastUpdated();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -136,7 +136,7 @@ public class DatabaseSearchCacheSvcImpl extends BaseSearchCacheSvcImpl {
|
||||||
@Override
|
@Override
|
||||||
public Collection<Search> findCandidatesForReuse(String theResourceType, String theQueryString, int theQueryStringHash, Date theCreatedAfter) {
|
public Collection<Search> findCandidatesForReuse(String theResourceType, String theQueryString, int theQueryStringHash, Date theCreatedAfter) {
|
||||||
int hashCode = theQueryString.hashCode();
|
int hashCode = theQueryString.hashCode();
|
||||||
return mySearchDao.find(theResourceType, hashCode, theCreatedAfter);
|
return mySearchDao.findWithCutoffOrExpiry(theResourceType, hashCode, theCreatedAfter);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -166,7 +166,7 @@ public class DatabaseSearchCacheSvcImpl extends BaseSearchCacheSvcImpl {
|
||||||
|
|
||||||
TransactionTemplate tt = new TransactionTemplate(myTxManager);
|
TransactionTemplate tt = new TransactionTemplate(myTxManager);
|
||||||
final Slice<Long> toDelete = tt.execute(theStatus ->
|
final Slice<Long> toDelete = tt.execute(theStatus ->
|
||||||
mySearchDao.findWhereLastReturnedBefore(cutoff, PageRequest.of(0, 2000))
|
mySearchDao.findWhereLastReturnedBefore(cutoff, new Date(), PageRequest.of(0, 2000))
|
||||||
);
|
);
|
||||||
for (final Long nextSearchToDelete : toDelete) {
|
for (final Long nextSearchToDelete : toDelete) {
|
||||||
ourLog.debug("Deleting search with PID {}", nextSearchToDelete);
|
ourLog.debug("Deleting search with PID {}", nextSearchToDelete);
|
||||||
|
|
|
@ -36,11 +36,6 @@ public interface IResourceReindexingSvc {
|
||||||
*/
|
*/
|
||||||
Long markAllResourcesForReindexing(String theType);
|
Long markAllResourcesForReindexing(String theType);
|
||||||
|
|
||||||
/**
|
|
||||||
* Called automatically by the job scheduler
|
|
||||||
*/
|
|
||||||
void scheduleReindexingPass();
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return Returns null if the system did not attempt to perform a pass because one was
|
* @return Returns null if the system did not attempt to perform a pass because one was
|
||||||
* already proceeding. Otherwise, returns the number of resources affected.
|
* already proceeding. Otherwise, returns the number of resources affected.
|
||||||
|
|
|
@ -33,6 +33,8 @@ import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||||
import ca.uhn.fhir.jpa.entity.ResourceReindexJobEntity;
|
import ca.uhn.fhir.jpa.entity.ResourceReindexJobEntity;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
||||||
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
|
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
||||||
|
@ -44,12 +46,13 @@ import org.apache.commons.lang3.time.DateUtils;
|
||||||
import org.hibernate.search.util.impl.Executors;
|
import org.hibernate.search.util.impl.Executors;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
import org.hl7.fhir.r4.model.InstantType;
|
import org.hl7.fhir.r4.model.InstantType;
|
||||||
|
import org.quartz.Job;
|
||||||
|
import org.quartz.JobExecutionContext;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.data.domain.PageRequest;
|
import org.springframework.data.domain.PageRequest;
|
||||||
import org.springframework.data.domain.Slice;
|
import org.springframework.data.domain.Slice;
|
||||||
import org.springframework.scheduling.annotation.Scheduled;
|
|
||||||
import org.springframework.transaction.PlatformTransactionManager;
|
import org.springframework.transaction.PlatformTransactionManager;
|
||||||
import org.springframework.transaction.TransactionDefinition;
|
import org.springframework.transaction.TransactionDefinition;
|
||||||
import org.springframework.transaction.support.TransactionCallback;
|
import org.springframework.transaction.support.TransactionCallback;
|
||||||
|
@ -101,6 +104,8 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
|
||||||
private EntityManager myEntityManager;
|
private EntityManager myEntityManager;
|
||||||
@Autowired
|
@Autowired
|
||||||
private ISearchParamRegistry mySearchParamRegistry;
|
private ISearchParamRegistry mySearchParamRegistry;
|
||||||
|
@Autowired
|
||||||
|
private ISchedulerService mySchedulerService;
|
||||||
|
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
void setReindexJobDaoForUnitTest(IResourceReindexJobDao theReindexJobDao) {
|
void setReindexJobDaoForUnitTest(IResourceReindexJobDao theReindexJobDao) {
|
||||||
|
@ -182,11 +187,12 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
|
||||||
return job.getId();
|
return job.getId();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@PostConstruct
|
||||||
@Transactional(Transactional.TxType.NEVER)
|
public void registerScheduledJob() {
|
||||||
@Scheduled(fixedDelay = 10 * DateUtils.MILLIS_PER_SECOND)
|
ScheduledJobDefinition jobDetail = new ScheduledJobDefinition();
|
||||||
public void scheduleReindexingPass() {
|
jobDetail.setId(ResourceReindexingSvcImpl.class.getName());
|
||||||
runReindexingPass();
|
jobDetail.setJobClass(ResourceReindexingSvcImpl.SubmitJob.class);
|
||||||
|
mySchedulerService.scheduleFixedDelay(10 * DateUtils.MILLIS_PER_SECOND, true, jobDetail);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -223,6 +229,8 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
|
||||||
@Override
|
@Override
|
||||||
public void cancelAndPurgeAllJobs() {
|
public void cancelAndPurgeAllJobs() {
|
||||||
ourLog.info("Cancelling and purging all resource reindexing jobs");
|
ourLog.info("Cancelling and purging all resource reindexing jobs");
|
||||||
|
myIndexingLock.lock();
|
||||||
|
try {
|
||||||
myTxTemplate.execute(t -> {
|
myTxTemplate.execute(t -> {
|
||||||
myReindexJobDao.markAllOfTypeAsDeleted();
|
myReindexJobDao.markAllOfTypeAsDeleted();
|
||||||
return null;
|
return null;
|
||||||
|
@ -232,6 +240,9 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
|
||||||
initExecutor();
|
initExecutor();
|
||||||
|
|
||||||
expungeJobsMarkedAsDeleted();
|
expungeJobsMarkedAsDeleted();
|
||||||
|
} finally {
|
||||||
|
myIndexingLock.unlock();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private int runReindexJobs() {
|
private int runReindexJobs() {
|
||||||
|
@ -277,7 +288,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
|
||||||
}
|
}
|
||||||
|
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
public void setSearchParamRegistryForUnitTest(ISearchParamRegistry theSearchParamRegistry) {
|
void setSearchParamRegistryForUnitTest(ISearchParamRegistry theSearchParamRegistry) {
|
||||||
mySearchParamRegistry = theSearchParamRegistry;
|
mySearchParamRegistry = theSearchParamRegistry;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -306,7 +317,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
|
||||||
Date low = theJob.getThresholdLow() != null ? theJob.getThresholdLow() : BEGINNING_OF_TIME;
|
Date low = theJob.getThresholdLow() != null ? theJob.getThresholdLow() : BEGINNING_OF_TIME;
|
||||||
Date high = theJob.getThresholdHigh();
|
Date high = theJob.getThresholdHigh();
|
||||||
|
|
||||||
// SqlQuery for resources within threshold
|
// Query for resources within threshold
|
||||||
StopWatch pageSw = new StopWatch();
|
StopWatch pageSw = new StopWatch();
|
||||||
Slice<Long> range = myTxTemplate.execute(t -> {
|
Slice<Long> range = myTxTemplate.execute(t -> {
|
||||||
PageRequest page = PageRequest.of(0, PASS_SIZE);
|
PageRequest page = PageRequest.of(0, PASS_SIZE);
|
||||||
|
@ -529,4 +540,14 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
|
||||||
return myUpdated;
|
return myUpdated;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static class SubmitJob implements Job {
|
||||||
|
@Autowired
|
||||||
|
private IResourceReindexingSvc myTarget;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void execute(JobExecutionContext theContext) {
|
||||||
|
myTarget.runReindexingPass();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,22 +26,29 @@ import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
import ca.uhn.fhir.jpa.dao.DaoRegistry;
|
import ca.uhn.fhir.jpa.dao.DaoRegistry;
|
||||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
|
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.FireAtIntervalJob;
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
||||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
import ca.uhn.fhir.util.UrlUtil;
|
import ca.uhn.fhir.util.UrlUtil;
|
||||||
|
import org.apache.commons.lang3.time.DateUtils;
|
||||||
|
import org.quartz.DisallowConcurrentExecution;
|
||||||
|
import org.quartz.JobExecutionContext;
|
||||||
|
import org.quartz.PersistJobDataAfterExecution;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.scheduling.annotation.Scheduled;
|
|
||||||
import org.springframework.stereotype.Component;
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
import javax.annotation.PostConstruct;
|
import javax.annotation.PostConstruct;
|
||||||
import java.util.ArrayList;
|
import java.util.*;
|
||||||
import java.util.LinkedHashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
@Component
|
@Component
|
||||||
public class CacheWarmingSvcImpl implements ICacheWarmingSvc {
|
public class CacheWarmingSvcImpl implements ICacheWarmingSvc {
|
||||||
|
|
||||||
|
public static final long SCHEDULED_JOB_INTERVAL = 10 * DateUtils.MILLIS_PER_SECOND;
|
||||||
|
private static final Logger ourLog = LoggerFactory.getLogger(CacheWarmingSvcImpl.class);
|
||||||
@Autowired
|
@Autowired
|
||||||
private DaoConfig myDaoConfig;
|
private DaoConfig myDaoConfig;
|
||||||
private Map<WarmCacheEntry, Long> myCacheEntryToNextRefresh = new LinkedHashMap<>();
|
private Map<WarmCacheEntry, Long> myCacheEntryToNextRefresh = new LinkedHashMap<>();
|
||||||
|
@ -51,10 +58,12 @@ public class CacheWarmingSvcImpl implements ICacheWarmingSvc {
|
||||||
private DaoRegistry myDaoRegistry;
|
private DaoRegistry myDaoRegistry;
|
||||||
@Autowired
|
@Autowired
|
||||||
private MatchUrlService myMatchUrlService;
|
private MatchUrlService myMatchUrlService;
|
||||||
|
@Autowired
|
||||||
|
private ISchedulerService mySchedulerService;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Scheduled(fixedDelay = 1000)
|
|
||||||
public synchronized void performWarmingPass() {
|
public synchronized void performWarmingPass() {
|
||||||
|
ourLog.trace("Starting cache warming pass for {} tasks", myCacheEntryToNextRefresh.size());
|
||||||
|
|
||||||
for (WarmCacheEntry nextCacheEntry : new ArrayList<>(myCacheEntryToNextRefresh.keySet())) {
|
for (WarmCacheEntry nextCacheEntry : new ArrayList<>(myCacheEntryToNextRefresh.keySet())) {
|
||||||
|
|
||||||
|
@ -74,6 +83,14 @@ public class CacheWarmingSvcImpl implements ICacheWarmingSvc {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@PostConstruct
|
||||||
|
public void registerScheduledJob() {
|
||||||
|
ScheduledJobDefinition jobDetail = new ScheduledJobDefinition();
|
||||||
|
jobDetail.setId(CacheWarmingSvcImpl.class.getName());
|
||||||
|
jobDetail.setJobClass(CacheWarmingSvcImpl.SubmitJob.class);
|
||||||
|
mySchedulerService.scheduleFixedDelay(SCHEDULED_JOB_INTERVAL, true, jobDetail);
|
||||||
|
}
|
||||||
|
|
||||||
private void refreshNow(WarmCacheEntry theCacheEntry) {
|
private void refreshNow(WarmCacheEntry theCacheEntry) {
|
||||||
String nextUrl = theCacheEntry.getUrl();
|
String nextUrl = theCacheEntry.getUrl();
|
||||||
|
|
||||||
|
@ -98,7 +115,7 @@ public class CacheWarmingSvcImpl implements ICacheWarmingSvc {
|
||||||
initCacheMap();
|
initCacheMap();
|
||||||
}
|
}
|
||||||
|
|
||||||
public synchronized void initCacheMap() {
|
public synchronized Set<WarmCacheEntry> initCacheMap() {
|
||||||
|
|
||||||
myCacheEntryToNextRefresh.clear();
|
myCacheEntryToNextRefresh.clear();
|
||||||
List<WarmCacheEntry> warmCacheEntries = myDaoConfig.getWarmCacheEntries();
|
List<WarmCacheEntry> warmCacheEntries = myDaoConfig.getWarmCacheEntries();
|
||||||
|
@ -110,6 +127,24 @@ public class CacheWarmingSvcImpl implements ICacheWarmingSvc {
|
||||||
|
|
||||||
myCacheEntryToNextRefresh.put(next, 0L);
|
myCacheEntryToNextRefresh.put(next, 0L);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return Collections.unmodifiableSet(myCacheEntryToNextRefresh.keySet());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@DisallowConcurrentExecution
|
||||||
|
@PersistJobDataAfterExecution
|
||||||
|
public static class SubmitJob extends FireAtIntervalJob {
|
||||||
|
@Autowired
|
||||||
|
private ICacheWarmingSvc myTarget;
|
||||||
|
|
||||||
|
public SubmitJob() {
|
||||||
|
super(SCHEDULED_JOB_INTERVAL);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void doExecute(JobExecutionContext theContext) {
|
||||||
|
myTarget.performWarmingPass();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,9 +20,6 @@ package ca.uhn.fhir.jpa.search.warm;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.springframework.scheduling.annotation.Scheduled;
|
|
||||||
|
|
||||||
public interface ICacheWarmingSvc {
|
public interface ICacheWarmingSvc {
|
||||||
@Scheduled(fixedDelay = 1000)
|
|
||||||
void performWarmingPass();
|
void performWarmingPass();
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,4 +30,6 @@ import java.util.List;
|
||||||
|
|
||||||
public interface ISubscriptionTriggeringSvc {
|
public interface ISubscriptionTriggeringSvc {
|
||||||
IBaseParameters triggerSubscription(List<UriParam> theResourceIds, List<StringParam> theSearchUrls, @IdParam IIdType theSubscriptionId);
|
IBaseParameters triggerSubscription(List<UriParam> theResourceIds, List<StringParam> theSearchUrls, @IdParam IIdType theSubscriptionId);
|
||||||
|
|
||||||
|
void runDeliveryPass();
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,6 +25,9 @@ import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
import ca.uhn.fhir.jpa.dao.DaoRegistry;
|
import ca.uhn.fhir.jpa.dao.DaoRegistry;
|
||||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
|
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.FireAtIntervalJob;
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
||||||
import ca.uhn.fhir.jpa.provider.SubscriptionTriggeringProvider;
|
import ca.uhn.fhir.jpa.provider.SubscriptionTriggeringProvider;
|
||||||
import ca.uhn.fhir.jpa.search.ISearchCoordinatorSvc;
|
import ca.uhn.fhir.jpa.search.ISearchCoordinatorSvc;
|
||||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||||
|
@ -53,10 +56,12 @@ import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||||
|
import org.quartz.DisallowConcurrentExecution;
|
||||||
|
import org.quartz.JobExecutionContext;
|
||||||
|
import org.quartz.PersistJobDataAfterExecution;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.scheduling.annotation.Scheduled;
|
|
||||||
import org.springframework.stereotype.Service;
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
import javax.annotation.PostConstruct;
|
import javax.annotation.PostConstruct;
|
||||||
|
@ -73,10 +78,10 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
|
|
||||||
@Service
|
@Service
|
||||||
public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc {
|
public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc {
|
||||||
|
public static final long SCHEDULE_DELAY = DateUtils.MILLIS_PER_SECOND;
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionTriggeringProvider.class);
|
private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionTriggeringProvider.class);
|
||||||
|
|
||||||
private static final int DEFAULT_MAX_SUBMIT = 10000;
|
private static final int DEFAULT_MAX_SUBMIT = 10000;
|
||||||
|
private final List<SubscriptionTriggeringJobDetails> myActiveJobs = new ArrayList<>();
|
||||||
@Autowired
|
@Autowired
|
||||||
private FhirContext myFhirContext;
|
private FhirContext myFhirContext;
|
||||||
@Autowired
|
@Autowired
|
||||||
|
@ -89,10 +94,10 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc
|
||||||
private MatchUrlService myMatchUrlService;
|
private MatchUrlService myMatchUrlService;
|
||||||
@Autowired
|
@Autowired
|
||||||
private IResourceModifiedConsumer myResourceModifiedConsumer;
|
private IResourceModifiedConsumer myResourceModifiedConsumer;
|
||||||
|
|
||||||
private final List<SubscriptionTriggeringJobDetails> myActiveJobs = new ArrayList<>();
|
|
||||||
private int myMaxSubmitPerPass = DEFAULT_MAX_SUBMIT;
|
private int myMaxSubmitPerPass = DEFAULT_MAX_SUBMIT;
|
||||||
private ExecutorService myExecutorService;
|
private ExecutorService myExecutorService;
|
||||||
|
@Autowired
|
||||||
|
private ISchedulerService mySchedulerService;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public IBaseParameters triggerSubscription(List<UriParam> theResourceIds, List<StringParam> theSearchUrls, @IdParam IIdType theSubscriptionId) {
|
public IBaseParameters triggerSubscription(List<UriParam> theResourceIds, List<StringParam> theSearchUrls, @IdParam IIdType theSubscriptionId) {
|
||||||
|
@ -143,8 +148,8 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc
|
||||||
// Submit job for processing
|
// Submit job for processing
|
||||||
synchronized (myActiveJobs) {
|
synchronized (myActiveJobs) {
|
||||||
myActiveJobs.add(jobDetails);
|
myActiveJobs.add(jobDetails);
|
||||||
|
ourLog.info("Subscription triggering requested for {} resource and {} search - Gave job ID: {} and have {} jobs", resourceIds.size(), searchUrls.size(), jobDetails.getJobId(), myActiveJobs.size());
|
||||||
}
|
}
|
||||||
ourLog.info("Subscription triggering requested for {} resource and {} search - Gave job ID: {}", resourceIds.size(), searchUrls.size(), jobDetails.getJobId());
|
|
||||||
|
|
||||||
// Create a parameters response
|
// Create a parameters response
|
||||||
IBaseParameters retVal = ParametersUtil.newInstance(myFhirContext);
|
IBaseParameters retVal = ParametersUtil.newInstance(myFhirContext);
|
||||||
|
@ -154,10 +159,19 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Scheduled(fixedDelay = DateUtils.MILLIS_PER_SECOND)
|
@PostConstruct
|
||||||
|
public void registerScheduledJob() {
|
||||||
|
ScheduledJobDefinition jobDetail = new ScheduledJobDefinition();
|
||||||
|
jobDetail.setId(SubscriptionTriggeringSvcImpl.class.getName());
|
||||||
|
jobDetail.setJobClass(SubscriptionTriggeringSvcImpl.SubmitJob.class);
|
||||||
|
mySchedulerService.scheduleFixedDelay(SCHEDULE_DELAY, false, jobDetail);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public void runDeliveryPass() {
|
public void runDeliveryPass() {
|
||||||
|
|
||||||
synchronized (myActiveJobs) {
|
synchronized (myActiveJobs) {
|
||||||
|
|
||||||
if (myActiveJobs.isEmpty()) {
|
if (myActiveJobs.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -305,7 +319,7 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc
|
||||||
return myExecutorService.submit(() -> {
|
return myExecutorService.submit(() -> {
|
||||||
for (int i = 0; ; i++) {
|
for (int i = 0; ; i++) {
|
||||||
try {
|
try {
|
||||||
myResourceModifiedConsumer.submitResourceModified(msg);
|
myResourceModifiedConsumer.submitResourceModified(msg);
|
||||||
break;
|
break;
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
if (i >= 3) {
|
if (i >= 3) {
|
||||||
|
@ -375,6 +389,22 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@DisallowConcurrentExecution
|
||||||
|
@PersistJobDataAfterExecution
|
||||||
|
public static class SubmitJob extends FireAtIntervalJob {
|
||||||
|
@Autowired
|
||||||
|
private ISubscriptionTriggeringSvc myTarget;
|
||||||
|
|
||||||
|
public SubmitJob() {
|
||||||
|
super(SCHEDULE_DELAY);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void doExecute(JobExecutionContext theContext) {
|
||||||
|
myTarget.runDeliveryPass();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static class SubscriptionTriggeringJobDetails {
|
private static class SubscriptionTriggeringJobDetails {
|
||||||
|
|
||||||
private String myJobId;
|
private String myJobId;
|
||||||
|
|
|
@ -28,6 +28,9 @@ import ca.uhn.fhir.jpa.dao.data.*;
|
||||||
import ca.uhn.fhir.jpa.entity.*;
|
import ca.uhn.fhir.jpa.entity.*;
|
||||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
|
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
||||||
|
import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl;
|
||||||
import ca.uhn.fhir.jpa.util.ScrollableResultsIterator;
|
import ca.uhn.fhir.jpa.util.ScrollableResultsIterator;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||||
|
@ -61,6 +64,8 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||||
import org.hl7.fhir.r4.model.*;
|
import org.hl7.fhir.r4.model.*;
|
||||||
|
import org.quartz.Job;
|
||||||
|
import org.quartz.JobExecutionContext;
|
||||||
import org.hl7.fhir.r4.model.codesystems.ConceptSubsumptionOutcome;
|
import org.hl7.fhir.r4.model.codesystems.ConceptSubsumptionOutcome;
|
||||||
import org.springframework.beans.BeansException;
|
import org.springframework.beans.BeansException;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
@ -162,6 +167,8 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||||
private PlatformTransactionManager myTxManager;
|
private PlatformTransactionManager myTxManager;
|
||||||
@Autowired
|
@Autowired
|
||||||
private ITermValueSetConceptViewDao myTermValueSetConceptViewDao;
|
private ITermValueSetConceptViewDao myTermValueSetConceptViewDao;
|
||||||
|
@Autowired
|
||||||
|
private ISchedulerService mySchedulerService;
|
||||||
|
|
||||||
private void addCodeIfNotAlreadyAdded(IValueSetConceptAccumulator theValueSetCodeAccumulator, Set<String> theAddedCodes, TermConcept theConcept, boolean theAdd, AtomicInteger theCodeCounter) {
|
private void addCodeIfNotAlreadyAdded(IValueSetConceptAccumulator theValueSetCodeAccumulator, Set<String> theAddedCodes, TermConcept theConcept, boolean theAdd, AtomicInteger theCodeCounter) {
|
||||||
String codeSystem = theConcept.getCodeSystemVersion().getCodeSystem().getCodeSystemUri();
|
String codeSystem = theConcept.getCodeSystemVersion().getCodeSystem().getCodeSystemUri();
|
||||||
|
@ -1529,7 +1536,6 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Scheduled(fixedRate = 5000)
|
|
||||||
@Transactional(propagation = Propagation.NEVER)
|
@Transactional(propagation = Propagation.NEVER)
|
||||||
@Override
|
@Override
|
||||||
public synchronized void saveDeferred() {
|
public synchronized void saveDeferred() {
|
||||||
|
@ -1616,6 +1622,24 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||||
myTxTemplate = new TransactionTemplate(myTransactionManager);
|
myTxTemplate = new TransactionTemplate(myTransactionManager);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@PostConstruct
|
||||||
|
public void registerScheduledJob() {
|
||||||
|
// Register scheduled job to save deferred concepts
|
||||||
|
// In the future it would be great to make this a cluster-aware task somehow
|
||||||
|
ScheduledJobDefinition jobDefinition = new ScheduledJobDefinition();
|
||||||
|
jobDefinition.setId(BaseHapiTerminologySvcImpl.class.getName() + "_saveDeferred");
|
||||||
|
jobDefinition.setJobClass(SaveDeferredJob.class);
|
||||||
|
mySchedulerService.scheduleFixedDelay(5000, false, jobDefinition);
|
||||||
|
|
||||||
|
// Register scheduled job to save deferred concepts
|
||||||
|
// In the future it would be great to make this a cluster-aware task somehow
|
||||||
|
ScheduledJobDefinition vsJobDefinition = new ScheduledJobDefinition();
|
||||||
|
vsJobDefinition.setId(BaseHapiTerminologySvcImpl.class.getName() + "_preExpandValueSets");
|
||||||
|
vsJobDefinition.setJobClass(PreExpandValueSetsJob.class);
|
||||||
|
mySchedulerService.scheduleFixedDelay(10 * DateUtils.MILLIS_PER_MINUTE, true, vsJobDefinition);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Transactional(propagation = Propagation.REQUIRED)
|
@Transactional(propagation = Propagation.REQUIRED)
|
||||||
public void storeNewCodeSystemVersion(Long theCodeSystemResourcePid, String theSystemUri, String theSystemName, String theSystemVersionId, TermCodeSystemVersion theCodeSystemVersion) {
|
public void storeNewCodeSystemVersion(Long theCodeSystemResourcePid, String theSystemUri, String theSystemName, String theSystemVersionId, TermCodeSystemVersion theCodeSystemVersion) {
|
||||||
|
@ -1696,7 +1720,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||||
|
|
||||||
ourLog.info("Saving {} concepts...", totalCodeCount);
|
ourLog.info("Saving {} concepts...", totalCodeCount);
|
||||||
|
|
||||||
IdentityHashMap<TermConcept, Object> conceptsStack2 = new IdentityHashMap<TermConcept, Object>();
|
IdentityHashMap<TermConcept, Object> conceptsStack2 = new IdentityHashMap<>();
|
||||||
for (TermConcept next : theCodeSystemVersion.getConcepts()) {
|
for (TermConcept next : theCodeSystemVersion.getConcepts()) {
|
||||||
persistChildren(next, codeSystemVersion, conceptsStack2, totalCodeCount);
|
persistChildren(next, codeSystemVersion, conceptsStack2, totalCodeCount);
|
||||||
}
|
}
|
||||||
|
@ -1939,7 +1963,6 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||||
ourLog.info("Done storing TermConceptMap[{}]", termConceptMap.getId());
|
ourLog.info("Done storing TermConceptMap[{}]", termConceptMap.getId());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Scheduled(fixedDelay = 600000) // 10 minutes.
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void preExpandDeferredValueSetsToTerminologyTables() {
|
public synchronized void preExpandDeferredValueSetsToTerminologyTables() {
|
||||||
if (isNotSafeToPreExpandValueSets()) {
|
if (isNotSafeToPreExpandValueSets()) {
|
||||||
|
@ -2497,6 +2520,28 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||||
return new VersionIndependentConcept(system, code);
|
return new VersionIndependentConcept(system, code);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static class SaveDeferredJob implements Job {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private IHapiTerminologySvc myTerminologySvc;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void execute(JobExecutionContext theContext) {
|
||||||
|
myTerminologySvc.saveDeferred();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class PreExpandValueSetsJob implements Job {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private IHapiTerminologySvc myTerminologySvc;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void execute(JobExecutionContext theContext) {
|
||||||
|
myTerminologySvc.preExpandDeferredValueSetsToTerminologyTables();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This method is present only for unit tests, do not call from client code
|
* This method is present only for unit tests, do not call from client code
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -0,0 +1,75 @@
|
||||||
|
package ca.uhn.fhir.jpa.util;
|
||||||
|
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* Smile CDR - CDR
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2016 - 2018 Simpatico Intelligent Systems Inc
|
||||||
|
* %%
|
||||||
|
* All rights reserved.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import com.fasterxml.jackson.databind.SerializationFeature;
|
||||||
|
|
||||||
|
import javax.annotation.Nonnull;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.StringWriter;
|
||||||
|
import java.io.Writer;
|
||||||
|
|
||||||
|
public class JsonUtil {
|
||||||
|
|
||||||
|
private static final ObjectMapper ourMapperPrettyPrint;
|
||||||
|
private static final ObjectMapper ourMapperNonPrettyPrint;
|
||||||
|
|
||||||
|
static {
|
||||||
|
ourMapperPrettyPrint = new ObjectMapper();
|
||||||
|
ourMapperPrettyPrint.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||||
|
ourMapperPrettyPrint.enable(SerializationFeature.INDENT_OUTPUT);
|
||||||
|
|
||||||
|
ourMapperNonPrettyPrint = new ObjectMapper();
|
||||||
|
ourMapperNonPrettyPrint.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||||
|
ourMapperNonPrettyPrint.disable(SerializationFeature.INDENT_OUTPUT);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse JSON
|
||||||
|
*/
|
||||||
|
public static <T> T deserialize(@Nonnull String theInput, @Nonnull Class<T> theType) throws IOException {
|
||||||
|
return ourMapperPrettyPrint.readerFor(theType).readValue(theInput);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Encode JSON
|
||||||
|
*/
|
||||||
|
public static String serialize(@Nonnull Object theInput) throws IOException {
|
||||||
|
return serialize(theInput, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Encode JSON
|
||||||
|
*/
|
||||||
|
public static String serialize(@Nonnull Object theInput, boolean thePrettyPrint) throws IOException {
|
||||||
|
StringWriter sw = new StringWriter();
|
||||||
|
if (thePrettyPrint) {
|
||||||
|
ourMapperPrettyPrint.writeValue(sw, theInput);
|
||||||
|
} else {
|
||||||
|
ourMapperNonPrettyPrint.writeValue(sw, theInput);
|
||||||
|
}
|
||||||
|
return sw.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Encode JSON
|
||||||
|
*/
|
||||||
|
public static void serialize(@Nonnull Object theInput, @Nonnull Writer theWriter) throws IOException {
|
||||||
|
// Note: We append a string here rather than just having ourMapper write directly
|
||||||
|
// to the Writer because ourMapper seems to close the writer for some stupid
|
||||||
|
// reason.. There's probably a way of preventing that bit I'm not sure what that
|
||||||
|
// is and it's not a big deal here.
|
||||||
|
theWriter.append(serialize(theInput));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -20,14 +20,106 @@ package ca.uhn.fhir.jpa.util;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||||
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
|
import org.apache.commons.lang3.time.DateUtils;
|
||||||
|
import org.quartz.Job;
|
||||||
|
import org.quartz.JobExecutionContext;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
import javax.annotation.PostConstruct;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.concurrent.Callable;
|
import java.util.concurrent.Callable;
|
||||||
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
|
|
||||||
|
public class ResourceCountCache {
|
||||||
|
|
||||||
|
private static final Logger ourLog = LoggerFactory.getLogger(ResourceCountCache.class);
|
||||||
|
private static Long ourNowForUnitTest;
|
||||||
|
private final Callable<Map<String, Long>> myFetcher;
|
||||||
|
private volatile long myCacheMillis;
|
||||||
|
private AtomicReference<Map<String, Long>> myCapabilityStatement = new AtomicReference<>();
|
||||||
|
private long myLastFetched;
|
||||||
|
@Autowired
|
||||||
|
private ISchedulerService mySchedulerService;
|
||||||
|
|
||||||
public class ResourceCountCache extends SingleItemLoadingCache<Map<String, Long>> {
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
*/
|
*/
|
||||||
public ResourceCountCache(Callable<Map<String, Long>> theFetcher) {
|
public ResourceCountCache(Callable<Map<String, Long>> theFetcher) {
|
||||||
super(theFetcher);
|
myFetcher = theFetcher;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public synchronized void clear() {
|
||||||
|
ourLog.info("Clearing cache");
|
||||||
|
myCapabilityStatement.set(null);
|
||||||
|
myLastFetched = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public synchronized Map<String, Long> get() {
|
||||||
|
return myCapabilityStatement.get();
|
||||||
|
}
|
||||||
|
|
||||||
|
private Map<String, Long> refresh() {
|
||||||
|
Map<String, Long> retVal;
|
||||||
|
try {
|
||||||
|
retVal = myFetcher.call();
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new InternalErrorException(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
myCapabilityStatement.set(retVal);
|
||||||
|
myLastFetched = now();
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCacheMillis(long theCacheMillis) {
|
||||||
|
myCacheMillis = theCacheMillis;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void update() {
|
||||||
|
if (myCacheMillis > 0) {
|
||||||
|
long now = now();
|
||||||
|
long expiry = now - myCacheMillis;
|
||||||
|
if (myLastFetched < expiry) {
|
||||||
|
refresh();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@PostConstruct
|
||||||
|
public void registerScheduledJob() {
|
||||||
|
ScheduledJobDefinition jobDetail = new ScheduledJobDefinition();
|
||||||
|
jobDetail.setId(ResourceCountCache.class.getName());
|
||||||
|
jobDetail.setJobClass(ResourceCountCache.SubmitJob.class);
|
||||||
|
mySchedulerService.scheduleFixedDelay(10 * DateUtils.MILLIS_PER_MINUTE, false, jobDetail);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class SubmitJob implements Job {
|
||||||
|
@Autowired
|
||||||
|
private ResourceCountCache myTarget;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void execute(JobExecutionContext theContext) {
|
||||||
|
myTarget.update();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static long now() {
|
||||||
|
if (ourNowForUnitTest != null) {
|
||||||
|
return ourNowForUnitTest;
|
||||||
|
}
|
||||||
|
return System.currentTimeMillis();
|
||||||
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
static void setNowForUnitTest(Long theNowForUnitTest) {
|
||||||
|
ourNowForUnitTest = theNowForUnitTest;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,101 +0,0 @@
|
||||||
package ca.uhn.fhir.jpa.util;
|
|
||||||
|
|
||||||
/*-
|
|
||||||
* #%L
|
|
||||||
* HAPI FHIR JPA Server
|
|
||||||
* %%
|
|
||||||
* Copyright (C) 2014 - 2019 University Health Network
|
|
||||||
* %%
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
* #L%
|
|
||||||
*/
|
|
||||||
|
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
import org.springframework.scheduling.annotation.Scheduled;
|
|
||||||
|
|
||||||
import java.util.concurrent.Callable;
|
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This is a simple cache for CapabilityStatement resources to
|
|
||||||
* be returned as server metadata.
|
|
||||||
*/
|
|
||||||
public class SingleItemLoadingCache<T> {
|
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(SingleItemLoadingCache.class);
|
|
||||||
private static Long ourNowForUnitTest;
|
|
||||||
private final Callable<T> myFetcher;
|
|
||||||
private volatile long myCacheMillis;
|
|
||||||
private AtomicReference<T> myCapabilityStatement = new AtomicReference<>();
|
|
||||||
private long myLastFetched;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Constructor
|
|
||||||
*/
|
|
||||||
public SingleItemLoadingCache(Callable<T> theFetcher) {
|
|
||||||
myFetcher = theFetcher;
|
|
||||||
}
|
|
||||||
|
|
||||||
public synchronized void clear() {
|
|
||||||
ourLog.info("Clearing cache");
|
|
||||||
myCapabilityStatement.set(null);
|
|
||||||
myLastFetched = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
public synchronized T get() {
|
|
||||||
return myCapabilityStatement.get();
|
|
||||||
}
|
|
||||||
|
|
||||||
private T refresh() {
|
|
||||||
T retVal;
|
|
||||||
try {
|
|
||||||
retVal = myFetcher.call();
|
|
||||||
} catch (Exception e) {
|
|
||||||
throw new InternalErrorException(e);
|
|
||||||
}
|
|
||||||
|
|
||||||
myCapabilityStatement.set(retVal);
|
|
||||||
myLastFetched = now();
|
|
||||||
return retVal;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setCacheMillis(long theCacheMillis) {
|
|
||||||
myCacheMillis = theCacheMillis;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Scheduled(fixedDelay = 60000)
|
|
||||||
public void update() {
|
|
||||||
if (myCacheMillis > 0) {
|
|
||||||
long now = now();
|
|
||||||
long expiry = now - myCacheMillis;
|
|
||||||
if (myLastFetched < expiry) {
|
|
||||||
refresh();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static long now() {
|
|
||||||
if (ourNowForUnitTest != null) {
|
|
||||||
return ourNowForUnitTest;
|
|
||||||
}
|
|
||||||
return System.currentTimeMillis();
|
|
||||||
}
|
|
||||||
|
|
||||||
@VisibleForTesting
|
|
||||||
static void setNowForUnitTest(Long theNowForUnitTest) {
|
|
||||||
ourNowForUnitTest = theNowForUnitTest;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -259,7 +259,7 @@ public class TestUtil {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void sleepAtLeast(int theMillis) {
|
public static void sleepAtLeast(long theMillis) {
|
||||||
long start = System.currentTimeMillis();
|
long start = System.currentTimeMillis();
|
||||||
while (System.currentTimeMillis() <= start + theMillis) {
|
while (System.currentTimeMillis() <= start + theMillis) {
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -0,0 +1,242 @@
|
||||||
|
package ca.uhn.fhir.jpa.bulk;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||||
|
import ca.uhn.fhir.jpa.util.JsonUtil;
|
||||||
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
|
import ca.uhn.fhir.rest.client.apache.ResourceEntity;
|
||||||
|
import ca.uhn.fhir.rest.server.RestfulServer;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||||
|
import ca.uhn.fhir.test.utilities.JettyUtil;
|
||||||
|
import com.google.common.base.Charsets;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||||
|
import org.apache.http.client.methods.HttpGet;
|
||||||
|
import org.apache.http.client.methods.HttpPost;
|
||||||
|
import org.apache.http.impl.client.CloseableHttpClient;
|
||||||
|
import org.apache.http.impl.client.HttpClientBuilder;
|
||||||
|
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
|
||||||
|
import org.eclipse.jetty.server.Server;
|
||||||
|
import org.eclipse.jetty.servlet.ServletHandler;
|
||||||
|
import org.eclipse.jetty.servlet.ServletHolder;
|
||||||
|
import org.hl7.fhir.r4.model.IdType;
|
||||||
|
import org.hl7.fhir.r4.model.InstantType;
|
||||||
|
import org.hl7.fhir.r4.model.Parameters;
|
||||||
|
import org.hl7.fhir.r4.model.StringType;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.mockito.ArgumentCaptor;
|
||||||
|
import org.mockito.Captor;
|
||||||
|
import org.mockito.Mock;
|
||||||
|
import org.mockito.junit.MockitoJUnitRunner;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.*;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertThat;
|
||||||
|
import static org.mockito.ArgumentMatchers.any;
|
||||||
|
import static org.mockito.Mockito.*;
|
||||||
|
|
||||||
|
@RunWith(MockitoJUnitRunner.class)
|
||||||
|
public class BulkDataExportProviderTest {
|
||||||
|
|
||||||
|
private static final String A_JOB_ID = "0000000-AAAAAA";
|
||||||
|
private static final Logger ourLog = LoggerFactory.getLogger(BulkDataExportProviderTest.class);
|
||||||
|
private Server myServer;
|
||||||
|
private FhirContext myCtx = FhirContext.forR4();
|
||||||
|
private int myPort;
|
||||||
|
@Mock
|
||||||
|
private IBulkDataExportSvc myBulkDataExportSvc;
|
||||||
|
private CloseableHttpClient myClient;
|
||||||
|
@Captor
|
||||||
|
private ArgumentCaptor<String> myOutputFormatCaptor;
|
||||||
|
@Captor
|
||||||
|
private ArgumentCaptor<Set<String>> myResourceTypesCaptor;
|
||||||
|
@Captor
|
||||||
|
private ArgumentCaptor<Date> mySinceCaptor;
|
||||||
|
@Captor
|
||||||
|
private ArgumentCaptor<Set<String>> myFiltersCaptor;
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void after() throws Exception {
|
||||||
|
JettyUtil.closeServer(myServer);
|
||||||
|
myClient.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void start() throws Exception {
|
||||||
|
myServer = new Server(0);
|
||||||
|
|
||||||
|
BulkDataExportProvider provider = new BulkDataExportProvider();
|
||||||
|
provider.setBulkDataExportSvcForUnitTests(myBulkDataExportSvc);
|
||||||
|
provider.setFhirContextForUnitTest(myCtx);
|
||||||
|
|
||||||
|
ServletHandler proxyHandler = new ServletHandler();
|
||||||
|
RestfulServer servlet = new RestfulServer(myCtx);
|
||||||
|
servlet.registerProvider(provider);
|
||||||
|
ServletHolder servletHolder = new ServletHolder(servlet);
|
||||||
|
proxyHandler.addServletWithMapping(servletHolder, "/*");
|
||||||
|
myServer.setHandler(proxyHandler);
|
||||||
|
JettyUtil.startServer(myServer);
|
||||||
|
myPort = JettyUtil.getPortForStartedServer(myServer);
|
||||||
|
|
||||||
|
PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS);
|
||||||
|
HttpClientBuilder builder = HttpClientBuilder.create();
|
||||||
|
builder.setConnectionManager(connectionManager);
|
||||||
|
myClient = builder.build();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSuccessfulInitiateBulkRequest() throws IOException {
|
||||||
|
|
||||||
|
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo()
|
||||||
|
.setJobId(A_JOB_ID);
|
||||||
|
when(myBulkDataExportSvc.submitJob(any(), any(), any(), any())).thenReturn(jobInfo);
|
||||||
|
|
||||||
|
InstantType now = InstantType.now();
|
||||||
|
|
||||||
|
Parameters input = new Parameters();
|
||||||
|
input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON));
|
||||||
|
input.addParameter(JpaConstants.PARAM_EXPORT_TYPE, new StringType("Patient, Practitioner"));
|
||||||
|
input.addParameter(JpaConstants.PARAM_EXPORT_SINCE, now);
|
||||||
|
input.addParameter(JpaConstants.PARAM_EXPORT_TYPE_FILTER, new StringType("Patient?identifier=foo"));
|
||||||
|
|
||||||
|
HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT);
|
||||||
|
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
|
||||||
|
post.setEntity(new ResourceEntity(myCtx, input));
|
||||||
|
try (CloseableHttpResponse response = myClient.execute(post)) {
|
||||||
|
ourLog.info("Response: {}", response.toString());
|
||||||
|
|
||||||
|
assertEquals(202, response.getStatusLine().getStatusCode());
|
||||||
|
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
|
||||||
|
assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
verify(myBulkDataExportSvc, times(1)).submitJob(myOutputFormatCaptor.capture(), myResourceTypesCaptor.capture(), mySinceCaptor.capture(), myFiltersCaptor.capture());
|
||||||
|
assertEquals(Constants.CT_FHIR_NDJSON, myOutputFormatCaptor.getValue());
|
||||||
|
assertThat(myResourceTypesCaptor.getValue(), containsInAnyOrder("Patient", "Practitioner"));
|
||||||
|
assertThat(mySinceCaptor.getValue(), notNullValue());
|
||||||
|
assertThat(myFiltersCaptor.getValue(), containsInAnyOrder("Patient?identifier=foo"));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testPollForStatus_BUILDING() throws IOException {
|
||||||
|
|
||||||
|
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo()
|
||||||
|
.setJobId(A_JOB_ID)
|
||||||
|
.setStatus(BulkJobStatusEnum.BUILDING)
|
||||||
|
.setStatusTime(InstantType.now().getValue());
|
||||||
|
when(myBulkDataExportSvc.getJobStatusOrThrowResourceNotFound(eq(A_JOB_ID))).thenReturn(jobInfo);
|
||||||
|
|
||||||
|
String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
|
||||||
|
JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID;
|
||||||
|
HttpGet get = new HttpGet(url);
|
||||||
|
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
|
||||||
|
try (CloseableHttpResponse response = myClient.execute(get)) {
|
||||||
|
ourLog.info("Response: {}", response.toString());
|
||||||
|
|
||||||
|
assertEquals(202, response.getStatusLine().getStatusCode());
|
||||||
|
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
|
||||||
|
assertEquals("120", response.getFirstHeader(Constants.HEADER_RETRY_AFTER).getValue());
|
||||||
|
assertThat(response.getFirstHeader(Constants.HEADER_X_PROGRESS).getValue(), containsString("Build in progress - Status set to BUILDING at 20"));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testPollForStatus_ERROR() throws IOException {
|
||||||
|
|
||||||
|
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo()
|
||||||
|
.setJobId(A_JOB_ID)
|
||||||
|
.setStatus(BulkJobStatusEnum.ERROR)
|
||||||
|
.setStatusTime(InstantType.now().getValue())
|
||||||
|
.setStatusMessage("Some Error Message");
|
||||||
|
when(myBulkDataExportSvc.getJobStatusOrThrowResourceNotFound(eq(A_JOB_ID))).thenReturn(jobInfo);
|
||||||
|
|
||||||
|
String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
|
||||||
|
JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID;
|
||||||
|
HttpGet get = new HttpGet(url);
|
||||||
|
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
|
||||||
|
try (CloseableHttpResponse response = myClient.execute(get)) {
|
||||||
|
ourLog.info("Response: {}", response.toString());
|
||||||
|
|
||||||
|
assertEquals(500, response.getStatusLine().getStatusCode());
|
||||||
|
assertEquals("Server Error", response.getStatusLine().getReasonPhrase());
|
||||||
|
|
||||||
|
String responseContent = IOUtils.toString(response.getEntity().getContent(), Charsets.UTF_8);
|
||||||
|
ourLog.info("Response content: {}", responseContent);
|
||||||
|
assertThat(responseContent, containsString("\"diagnostics\": \"Some Error Message\""));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testPollForStatus_COMPLETED() throws IOException {
|
||||||
|
|
||||||
|
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo()
|
||||||
|
.setJobId(A_JOB_ID)
|
||||||
|
.setStatus(BulkJobStatusEnum.COMPLETE)
|
||||||
|
.setStatusTime(InstantType.now().getValue());
|
||||||
|
jobInfo.addFile().setResourceType("Patient").setResourceId(new IdType("Binary/111"));
|
||||||
|
jobInfo.addFile().setResourceType("Patient").setResourceId(new IdType("Binary/222"));
|
||||||
|
jobInfo.addFile().setResourceType("Patient").setResourceId(new IdType("Binary/333"));
|
||||||
|
when(myBulkDataExportSvc.getJobStatusOrThrowResourceNotFound(eq(A_JOB_ID))).thenReturn(jobInfo);
|
||||||
|
|
||||||
|
String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
|
||||||
|
JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID;
|
||||||
|
HttpGet get = new HttpGet(url);
|
||||||
|
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
|
||||||
|
try (CloseableHttpResponse response = myClient.execute(get)) {
|
||||||
|
ourLog.info("Response: {}", response.toString());
|
||||||
|
|
||||||
|
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||||
|
assertEquals("OK", response.getStatusLine().getReasonPhrase());
|
||||||
|
assertEquals(Constants.CT_JSON, response.getEntity().getContentType().getValue());
|
||||||
|
|
||||||
|
String responseContent = IOUtils.toString(response.getEntity().getContent(), Charsets.UTF_8);
|
||||||
|
ourLog.info("Response content: {}", responseContent);
|
||||||
|
BulkExportResponseJson responseJson = JsonUtil.deserialize(responseContent, BulkExportResponseJson.class);
|
||||||
|
assertEquals(3, responseJson.getOutput().size());
|
||||||
|
assertEquals("Patient", responseJson.getOutput().get(0).getType());
|
||||||
|
assertEquals("http://localhost:" + myPort + "/Binary/111", responseJson.getOutput().get(0).getUrl());
|
||||||
|
assertEquals("Patient", responseJson.getOutput().get(1).getType());
|
||||||
|
assertEquals("http://localhost:" + myPort + "/Binary/222", responseJson.getOutput().get(1).getUrl());
|
||||||
|
assertEquals("Patient", responseJson.getOutput().get(2).getType());
|
||||||
|
assertEquals("http://localhost:" + myPort + "/Binary/333", responseJson.getOutput().get(2).getUrl());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testPollForStatus_Gone() throws IOException {
|
||||||
|
|
||||||
|
when(myBulkDataExportSvc.getJobStatusOrThrowResourceNotFound(eq(A_JOB_ID))).thenThrow(new ResourceNotFoundException("Unknown job: AAA"));
|
||||||
|
|
||||||
|
String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
|
||||||
|
JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID;
|
||||||
|
HttpGet get = new HttpGet(url);
|
||||||
|
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
|
||||||
|
try (CloseableHttpResponse response = myClient.execute(get)) {
|
||||||
|
ourLog.info("Response: {}", response.toString());
|
||||||
|
String responseContent = IOUtils.toString(response.getEntity().getContent(), Charsets.UTF_8);
|
||||||
|
ourLog.info("Response content: {}", responseContent);
|
||||||
|
|
||||||
|
assertEquals(404, response.getStatusLine().getStatusCode());
|
||||||
|
assertEquals(Constants.CT_FHIR_JSON_NEW, response.getEntity().getContentType().getValue().replaceAll(";.*", "").trim());
|
||||||
|
assertThat(responseContent, containsString("\"diagnostics\":\"Unknown job: AAA\""));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,254 @@
|
||||||
|
package ca.uhn.fhir.jpa.bulk;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
|
||||||
|
import ca.uhn.fhir.jpa.entity.BulkExportCollectionEntity;
|
||||||
|
import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
|
||||||
|
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
|
||||||
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
|
import ca.uhn.fhir.test.utilities.UnregisterScheduledProcessor;
|
||||||
|
import com.google.common.collect.Sets;
|
||||||
|
import org.apache.commons.lang3.time.DateUtils;
|
||||||
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
|
import org.hl7.fhir.r4.model.Binary;
|
||||||
|
import org.hl7.fhir.r4.model.InstantType;
|
||||||
|
import org.hl7.fhir.r4.model.Observation;
|
||||||
|
import org.hl7.fhir.r4.model.Patient;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.test.context.TestPropertySource;
|
||||||
|
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
import static org.hamcrest.CoreMatchers.containsString;
|
||||||
|
import static org.junit.Assert.*;
|
||||||
|
|
||||||
|
@TestPropertySource(properties = {
|
||||||
|
UnregisterScheduledProcessor.SCHEDULING_DISABLED_EQUALS_TRUE
|
||||||
|
})
|
||||||
|
public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
||||||
|
|
||||||
|
private static final Logger ourLog = LoggerFactory.getLogger(BulkDataExportSvcImplR4Test.class);
|
||||||
|
@Autowired
|
||||||
|
private IBulkExportJobDao myBulkExportJobDao;
|
||||||
|
@Autowired
|
||||||
|
private IBulkExportCollectionDao myBulkExportCollectionDao;
|
||||||
|
@Autowired
|
||||||
|
private IBulkExportCollectionFileDao myBulkExportCollectionFileDao;
|
||||||
|
@Autowired
|
||||||
|
private IBulkDataExportSvc myBulkDataExportSvc;
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testPurgeExpiredJobs() {
|
||||||
|
|
||||||
|
// Create an expired job
|
||||||
|
runInTransaction(() -> {
|
||||||
|
|
||||||
|
Binary b = new Binary();
|
||||||
|
b.setContent(new byte[]{0, 1, 2, 3});
|
||||||
|
String binaryId = myBinaryDao.create(b).getId().toUnqualifiedVersionless().getValue();
|
||||||
|
|
||||||
|
BulkExportJobEntity job = new BulkExportJobEntity();
|
||||||
|
job.setStatus(BulkJobStatusEnum.COMPLETE);
|
||||||
|
job.setExpiry(DateUtils.addHours(new Date(), -1));
|
||||||
|
job.setJobId(UUID.randomUUID().toString());
|
||||||
|
job.setRequest("$export");
|
||||||
|
myBulkExportJobDao.save(job);
|
||||||
|
|
||||||
|
BulkExportCollectionEntity collection = new BulkExportCollectionEntity();
|
||||||
|
job.getCollections().add(collection);
|
||||||
|
collection.setResourceType("Patient");
|
||||||
|
collection.setJob(job);
|
||||||
|
myBulkExportCollectionDao.save(collection);
|
||||||
|
|
||||||
|
BulkExportCollectionFileEntity file = new BulkExportCollectionFileEntity();
|
||||||
|
collection.getFiles().add(file);
|
||||||
|
file.setCollection(collection);
|
||||||
|
file.setResource(binaryId);
|
||||||
|
myBulkExportCollectionFileDao.save(file);
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check that things were created
|
||||||
|
runInTransaction(() -> {
|
||||||
|
assertEquals(1, myResourceTableDao.count());
|
||||||
|
assertEquals(1, myBulkExportJobDao.count());
|
||||||
|
assertEquals(1, myBulkExportCollectionDao.count());
|
||||||
|
assertEquals(1, myBulkExportCollectionFileDao.count());
|
||||||
|
});
|
||||||
|
|
||||||
|
// Run a purge pass
|
||||||
|
myBulkDataExportSvc.purgeExpiredFiles();
|
||||||
|
|
||||||
|
// Check that things were deleted
|
||||||
|
runInTransaction(() -> {
|
||||||
|
assertEquals(0, myResourceTableDao.count());
|
||||||
|
assertEquals(0, myBulkExportJobDao.count());
|
||||||
|
assertEquals(0, myBulkExportCollectionDao.count());
|
||||||
|
assertEquals(0, myBulkExportCollectionFileDao.count());
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCreateBulkLoad_InvalidOutputFormat() {
|
||||||
|
try {
|
||||||
|
myBulkDataExportSvc.submitJob(Constants.CT_FHIR_JSON_NEW, Sets.newHashSet("Patient", "Observation"), null, null);
|
||||||
|
fail();
|
||||||
|
} catch (InvalidRequestException e) {
|
||||||
|
assertEquals("Invalid output format: application/fhir+json", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCreateBulkLoad_NoResourceTypes() {
|
||||||
|
try {
|
||||||
|
myBulkDataExportSvc.submitJob(Constants.CT_FHIR_NDJSON, Sets.newHashSet(), null, null);
|
||||||
|
fail();
|
||||||
|
} catch (InvalidRequestException e) {
|
||||||
|
assertEquals("No resource types specified", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCreateBulkLoad_InvalidResourceTypes() {
|
||||||
|
try {
|
||||||
|
myBulkDataExportSvc.submitJob(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient", "FOO"), null, null);
|
||||||
|
fail();
|
||||||
|
} catch (InvalidRequestException e) {
|
||||||
|
assertEquals("Unknown or unsupported resource type: FOO", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCreateBulkLoad() {
|
||||||
|
|
||||||
|
// Create some resources to load
|
||||||
|
createResources();
|
||||||
|
|
||||||
|
// Create a bulk job
|
||||||
|
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(null, Sets.newHashSet("Patient", "Observation"), null, null);
|
||||||
|
assertNotNull(jobDetails.getJobId());
|
||||||
|
|
||||||
|
// Check the status
|
||||||
|
IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobStatusOrThrowResourceNotFound(jobDetails.getJobId());
|
||||||
|
assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus());
|
||||||
|
assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Patient", status.getRequest());
|
||||||
|
|
||||||
|
// Run a scheduled pass to build the export
|
||||||
|
myBulkDataExportSvc.buildExportFiles();
|
||||||
|
|
||||||
|
// Fetch the job again
|
||||||
|
status = myBulkDataExportSvc.getJobStatusOrThrowResourceNotFound(jobDetails.getJobId());
|
||||||
|
assertEquals(BulkJobStatusEnum.COMPLETE, status.getStatus());
|
||||||
|
assertEquals(2, status.getFiles().size());
|
||||||
|
|
||||||
|
// Iterate over the files
|
||||||
|
for (IBulkDataExportSvc.FileEntry next : status.getFiles()) {
|
||||||
|
Binary nextBinary = myBinaryDao.read(next.getResourceId());
|
||||||
|
assertEquals(Constants.CT_FHIR_NDJSON, nextBinary.getContentType());
|
||||||
|
String nextContents = new String(nextBinary.getContent(), Constants.CHARSET_UTF8);
|
||||||
|
ourLog.info("Next contents for type {}:\n{}", next.getResourceType(), nextContents);
|
||||||
|
|
||||||
|
if ("Patient".equals(next.getResourceType())) {
|
||||||
|
assertThat(nextContents, containsString("\"value\":\"PAT0\"}]}\n"));
|
||||||
|
assertEquals(10, nextContents.split("\n").length);
|
||||||
|
} else if ("Observation".equals(next.getResourceType())) {
|
||||||
|
assertThat(nextContents, containsString("\"subject\":{\"reference\":\"Patient/PAT0\"}}\n"));
|
||||||
|
assertEquals(10, nextContents.split("\n").length);
|
||||||
|
} else {
|
||||||
|
fail(next.getResourceType());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSubmitReusesExisting() {
|
||||||
|
|
||||||
|
// Submit
|
||||||
|
IBulkDataExportSvc.JobInfo jobDetails1 = myBulkDataExportSvc.submitJob(null, Sets.newHashSet("Patient", "Observation"), null, null);
|
||||||
|
assertNotNull(jobDetails1.getJobId());
|
||||||
|
|
||||||
|
// Submit again
|
||||||
|
IBulkDataExportSvc.JobInfo jobDetails2 = myBulkDataExportSvc.submitJob(null, Sets.newHashSet("Patient", "Observation"), null, null);
|
||||||
|
assertNotNull(jobDetails2.getJobId());
|
||||||
|
|
||||||
|
assertEquals(jobDetails1.getJobId(), jobDetails2.getJobId());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCreateBulkLoad_WithSince() throws InterruptedException {
|
||||||
|
|
||||||
|
// Create some resources to load
|
||||||
|
createResources();
|
||||||
|
|
||||||
|
sleepUntilTimeChanges();
|
||||||
|
InstantType cutoff = InstantType.now();
|
||||||
|
sleepUntilTimeChanges();
|
||||||
|
|
||||||
|
for (int i = 10; i < 12; i++) {
|
||||||
|
Patient patient = new Patient();
|
||||||
|
patient.setId("PAT" + i);
|
||||||
|
patient.addIdentifier().setSystem("http://mrns").setValue("PAT" + i);
|
||||||
|
myPatientDao.update(patient).getId().toUnqualifiedVersionless();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a bulk job
|
||||||
|
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(null, Sets.newHashSet("Patient", "Observation"), cutoff.getValue(), null);
|
||||||
|
assertNotNull(jobDetails.getJobId());
|
||||||
|
|
||||||
|
// Check the status
|
||||||
|
IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobStatusOrThrowResourceNotFound(jobDetails.getJobId());
|
||||||
|
assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus());
|
||||||
|
assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Patient&_since=" + cutoff.setTimeZoneZulu(true).getValueAsString(), status.getRequest());
|
||||||
|
|
||||||
|
// Run a scheduled pass to build the export
|
||||||
|
myBulkDataExportSvc.buildExportFiles();
|
||||||
|
|
||||||
|
// Fetch the job again
|
||||||
|
status = myBulkDataExportSvc.getJobStatusOrThrowResourceNotFound(jobDetails.getJobId());
|
||||||
|
assertEquals(BulkJobStatusEnum.COMPLETE, status.getStatus());
|
||||||
|
assertEquals(1, status.getFiles().size());
|
||||||
|
|
||||||
|
// Iterate over the files
|
||||||
|
for (IBulkDataExportSvc.FileEntry next : status.getFiles()) {
|
||||||
|
Binary nextBinary = myBinaryDao.read(next.getResourceId());
|
||||||
|
assertEquals(Constants.CT_FHIR_NDJSON, nextBinary.getContentType());
|
||||||
|
String nextContents = new String(nextBinary.getContent(), Constants.CHARSET_UTF8);
|
||||||
|
ourLog.info("Next contents for type {}:\n{}", next.getResourceType(), nextContents);
|
||||||
|
|
||||||
|
if ("Patient".equals(next.getResourceType())) {
|
||||||
|
assertThat(nextContents, containsString("\"id\":\"PAT10\""));
|
||||||
|
assertThat(nextContents, containsString("\"id\":\"PAT11\""));
|
||||||
|
assertEquals(2, nextContents.split("\n").length);
|
||||||
|
} else {
|
||||||
|
fail(next.getResourceType());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createResources() {
|
||||||
|
for (int i = 0; i < 10; i++) {
|
||||||
|
Patient patient = new Patient();
|
||||||
|
patient.setId("PAT" + i);
|
||||||
|
patient.addIdentifier().setSystem("http://mrns").setValue("PAT" + i);
|
||||||
|
IIdType patId = myPatientDao.update(patient).getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
|
Observation obs = new Observation();
|
||||||
|
obs.setId("OBS" + i);
|
||||||
|
obs.setStatus(Observation.ObservationStatus.FINAL);
|
||||||
|
obs.getSubject().setReference(patId.getValue());
|
||||||
|
myObservationDao.update(obs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -4,6 +4,7 @@ import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
||||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||||
import ca.uhn.fhir.interceptor.executor.InterceptorService;
|
import ca.uhn.fhir.interceptor.executor.InterceptorService;
|
||||||
|
import ca.uhn.fhir.jpa.bulk.IBulkDataExportSvc;
|
||||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||||
import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test;
|
import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test;
|
||||||
|
@ -389,9 +390,10 @@ public abstract class BaseJpaTest {
|
||||||
return IOUtils.toString(bundleRes, Constants.CHARSET_UTF8);
|
return IOUtils.toString(bundleRes, Constants.CHARSET_UTF8);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected static void purgeDatabase(DaoConfig theDaoConfig, IFhirSystemDao<?, ?> theSystemDao, IResourceReindexingSvc theResourceReindexingSvc, ISearchCoordinatorSvc theSearchCoordinatorSvc, ISearchParamRegistry theSearchParamRegistry) {
|
protected static void purgeDatabase(DaoConfig theDaoConfig, IFhirSystemDao<?, ?> theSystemDao, IResourceReindexingSvc theResourceReindexingSvc, ISearchCoordinatorSvc theSearchCoordinatorSvc, ISearchParamRegistry theSearchParamRegistry, IBulkDataExportSvc theBulkDataExportSvc) {
|
||||||
theSearchCoordinatorSvc.cancelAllActiveSearches();
|
theSearchCoordinatorSvc.cancelAllActiveSearches();
|
||||||
theResourceReindexingSvc.cancelAndPurgeAllJobs();
|
theResourceReindexingSvc.cancelAndPurgeAllJobs();
|
||||||
|
theBulkDataExportSvc.cancelAndPurgeAllJobs();
|
||||||
|
|
||||||
boolean expungeEnabled = theDaoConfig.isExpungeEnabled();
|
boolean expungeEnabled = theDaoConfig.isExpungeEnabled();
|
||||||
theDaoConfig.setExpungeEnabled(true);
|
theDaoConfig.setExpungeEnabled(true);
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
package ca.uhn.fhir.jpa.dao.dstu2;
|
package ca.uhn.fhir.jpa.dao.dstu2;
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.jpa.bulk.IBulkDataExportSvc;
|
||||||
import ca.uhn.fhir.jpa.config.TestDstu2Config;
|
import ca.uhn.fhir.jpa.config.TestDstu2Config;
|
||||||
import ca.uhn.fhir.jpa.dao.*;
|
import ca.uhn.fhir.jpa.dao.*;
|
||||||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamStringDao;
|
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamStringDao;
|
||||||
|
@ -185,6 +186,8 @@ public abstract class BaseJpaDstu2Test extends BaseJpaTest {
|
||||||
protected IFhirResourceDaoValueSet<ValueSet, CodingDt, CodeableConceptDt> myValueSetDao;
|
protected IFhirResourceDaoValueSet<ValueSet, CodingDt, CodeableConceptDt> myValueSetDao;
|
||||||
@Autowired
|
@Autowired
|
||||||
protected SubscriptionLoader mySubscriptionLoader;
|
protected SubscriptionLoader mySubscriptionLoader;
|
||||||
|
@Autowired
|
||||||
|
private IBulkDataExportSvc myBulkDataExportSvc;
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void beforeFlushFT() {
|
public void beforeFlushFT() {
|
||||||
|
@ -202,7 +205,7 @@ public abstract class BaseJpaDstu2Test extends BaseJpaTest {
|
||||||
@Before
|
@Before
|
||||||
@Transactional()
|
@Transactional()
|
||||||
public void beforePurgeDatabase() {
|
public void beforePurgeDatabase() {
|
||||||
purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry);
|
purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry, myBulkDataExportSvc);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
package ca.uhn.fhir.jpa.dao.dstu3;
|
package ca.uhn.fhir.jpa.dao.dstu3;
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.jpa.bulk.IBulkDataExportSvc;
|
||||||
import ca.uhn.fhir.jpa.config.TestDstu3Config;
|
import ca.uhn.fhir.jpa.config.TestDstu3Config;
|
||||||
import ca.uhn.fhir.jpa.dao.*;
|
import ca.uhn.fhir.jpa.dao.*;
|
||||||
import ca.uhn.fhir.jpa.dao.data.*;
|
import ca.uhn.fhir.jpa.dao.data.*;
|
||||||
|
@ -256,6 +257,8 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest {
|
||||||
protected ITermConceptMapGroupElementTargetDao myTermConceptMapGroupElementTargetDao;
|
protected ITermConceptMapGroupElementTargetDao myTermConceptMapGroupElementTargetDao;
|
||||||
@Autowired
|
@Autowired
|
||||||
private JpaValidationSupportChainDstu3 myJpaValidationSupportChainDstu3;
|
private JpaValidationSupportChainDstu3 myJpaValidationSupportChainDstu3;
|
||||||
|
@Autowired
|
||||||
|
private IBulkDataExportSvc myBulkDataExportSvc;
|
||||||
|
|
||||||
@After()
|
@After()
|
||||||
public void afterCleanupDao() {
|
public void afterCleanupDao() {
|
||||||
|
@ -302,7 +305,7 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest {
|
||||||
@Before
|
@Before
|
||||||
@Transactional()
|
@Transactional()
|
||||||
public void beforePurgeDatabase() {
|
public void beforePurgeDatabase() {
|
||||||
purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry);
|
purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry, myBulkDataExportSvc);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
package ca.uhn.fhir.jpa.dao.dstu3;
|
package ca.uhn.fhir.jpa.dao.dstu3;
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
import static org.junit.Assert.fail;
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
||||||
|
import ca.uhn.fhir.util.TestUtil;
|
||||||
import org.hl7.fhir.dstu3.model.Organization;
|
import org.hl7.fhir.dstu3.model.Organization;
|
||||||
import org.hl7.fhir.dstu3.model.Patient;
|
import org.hl7.fhir.dstu3.model.Patient;
|
||||||
import org.hl7.fhir.dstu3.model.Reference;
|
import org.hl7.fhir.dstu3.model.Reference;
|
||||||
|
@ -11,18 +12,11 @@ import org.junit.After;
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
import static org.junit.Assert.assertEquals;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
import static org.junit.Assert.fail;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
|
||||||
import ca.uhn.fhir.util.TestUtil;
|
|
||||||
|
|
||||||
public class FhirResourceDaoDstu3ReferentialIntegrityTest extends BaseJpaDstu3Test {
|
public class FhirResourceDaoDstu3ReferentialIntegrityTest extends BaseJpaDstu3Test {
|
||||||
|
|
||||||
@AfterClass
|
|
||||||
public static void afterClassClearContext() {
|
|
||||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
|
||||||
}
|
|
||||||
|
|
||||||
@After
|
@After
|
||||||
public void afterResetConfig() {
|
public void afterResetConfig() {
|
||||||
myDaoConfig.setEnforceReferentialIntegrityOnWrite(new DaoConfig().isEnforceReferentialIntegrityOnWrite());
|
myDaoConfig.setEnforceReferentialIntegrityOnWrite(new DaoConfig().isEnforceReferentialIntegrityOnWrite());
|
||||||
|
@ -46,11 +40,11 @@ public class FhirResourceDaoDstu3ReferentialIntegrityTest extends BaseJpaDstu3Te
|
||||||
@Test
|
@Test
|
||||||
public void testCreateUnknownReferenceAllow() throws Exception {
|
public void testCreateUnknownReferenceAllow() throws Exception {
|
||||||
myDaoConfig.setEnforceReferentialIntegrityOnWrite(false);
|
myDaoConfig.setEnforceReferentialIntegrityOnWrite(false);
|
||||||
|
|
||||||
Patient p = new Patient();
|
Patient p = new Patient();
|
||||||
p.setManagingOrganization(new Reference("Organization/AAA"));
|
p.setManagingOrganization(new Reference("Organization/AAA"));
|
||||||
IIdType id = myPatientDao.create(p).getId().toUnqualifiedVersionless();
|
IIdType id = myPatientDao.create(p).getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
p = myPatientDao.read(id);
|
p = myPatientDao.read(id);
|
||||||
assertEquals("Organization/AAA", p.getManagingOrganization().getReference());
|
assertEquals("Organization/AAA", p.getManagingOrganization().getReference());
|
||||||
|
|
||||||
|
@ -61,11 +55,11 @@ public class FhirResourceDaoDstu3ReferentialIntegrityTest extends BaseJpaDstu3Te
|
||||||
Organization o = new Organization();
|
Organization o = new Organization();
|
||||||
o.setName("FOO");
|
o.setName("FOO");
|
||||||
IIdType oid = myOrganizationDao.create(o).getId().toUnqualifiedVersionless();
|
IIdType oid = myOrganizationDao.create(o).getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
Patient p = new Patient();
|
Patient p = new Patient();
|
||||||
p.setManagingOrganization(new Reference(oid));
|
p.setManagingOrganization(new Reference(oid));
|
||||||
IIdType pid = myPatientDao.create(p).getId().toUnqualifiedVersionless();
|
IIdType pid = myPatientDao.create(p).getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
myOrganizationDao.delete(oid);
|
myOrganizationDao.delete(oid);
|
||||||
fail();
|
fail();
|
||||||
|
@ -81,19 +75,24 @@ public class FhirResourceDaoDstu3ReferentialIntegrityTest extends BaseJpaDstu3Te
|
||||||
@Test
|
@Test
|
||||||
public void testDeleteAllow() throws Exception {
|
public void testDeleteAllow() throws Exception {
|
||||||
myDaoConfig.setEnforceReferentialIntegrityOnDelete(false);
|
myDaoConfig.setEnforceReferentialIntegrityOnDelete(false);
|
||||||
|
|
||||||
Organization o = new Organization();
|
Organization o = new Organization();
|
||||||
o.setName("FOO");
|
o.setName("FOO");
|
||||||
IIdType oid = myOrganizationDao.create(o).getId().toUnqualifiedVersionless();
|
IIdType oid = myOrganizationDao.create(o).getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
Patient p = new Patient();
|
Patient p = new Patient();
|
||||||
p.setManagingOrganization(new Reference(oid));
|
p.setManagingOrganization(new Reference(oid));
|
||||||
IIdType pid = myPatientDao.create(p).getId().toUnqualifiedVersionless();
|
IIdType pid = myPatientDao.create(p).getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
myOrganizationDao.delete(oid);
|
myOrganizationDao.delete(oid);
|
||||||
myPatientDao.delete(pid);
|
myPatientDao.delete(pid);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@AfterClass
|
||||||
|
public static void afterClassClearContext() {
|
||||||
|
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
package ca.uhn.fhir.jpa.dao.dstu3;
|
package ca.uhn.fhir.jpa.dao.dstu3;
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.jpa.bulk.IBulkDataExportSvc;
|
||||||
import ca.uhn.fhir.jpa.config.TestDstu3WithoutLuceneConfig;
|
import ca.uhn.fhir.jpa.config.TestDstu3WithoutLuceneConfig;
|
||||||
import ca.uhn.fhir.jpa.dao.*;
|
import ca.uhn.fhir.jpa.dao.*;
|
||||||
import ca.uhn.fhir.jpa.provider.dstu3.JpaSystemProviderDstu3;
|
import ca.uhn.fhir.jpa.provider.dstu3.JpaSystemProviderDstu3;
|
||||||
|
@ -149,10 +150,12 @@ public class FhirResourceDaoDstu3SearchWithLuceneDisabledTest extends BaseJpaTes
|
||||||
private IValidationSupport myValidationSupport;
|
private IValidationSupport myValidationSupport;
|
||||||
@Autowired
|
@Autowired
|
||||||
private IResourceReindexingSvc myResourceReindexingSvc;
|
private IResourceReindexingSvc myResourceReindexingSvc;
|
||||||
|
@Autowired
|
||||||
|
private IBulkDataExportSvc myBulkDataExportSvc;
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void beforePurgeDatabase() {
|
public void beforePurgeDatabase() {
|
||||||
purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry);
|
purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry, myBulkDataExportSvc);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
|
|
|
@ -73,7 +73,7 @@ public class PartitionRunnerTest {
|
||||||
myLatch.setExpectedCount(1);
|
myLatch.setExpectedCount(1);
|
||||||
myPartitionRunner.runInPartitionedThreads(resourceIds, partitionConsumer);
|
myPartitionRunner.runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||||
PartitionCall partitionCall = (PartitionCall) PointcutLatch.getLatchInvocationParameter(myLatch.awaitExpected());
|
PartitionCall partitionCall = (PartitionCall) PointcutLatch.getLatchInvocationParameter(myLatch.awaitExpected());
|
||||||
assertEquals(EXPUNGE_THREADNAME_1, partitionCall.threadName);
|
assertEquals("main", partitionCall.threadName);
|
||||||
assertEquals(1, partitionCall.size);
|
assertEquals(1, partitionCall.size);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -86,7 +86,7 @@ public class PartitionRunnerTest {
|
||||||
myLatch.setExpectedCount(1);
|
myLatch.setExpectedCount(1);
|
||||||
myPartitionRunner.runInPartitionedThreads(resourceIds, partitionConsumer);
|
myPartitionRunner.runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||||
PartitionCall partitionCall = (PartitionCall) PointcutLatch.getLatchInvocationParameter(myLatch.awaitExpected());
|
PartitionCall partitionCall = (PartitionCall) PointcutLatch.getLatchInvocationParameter(myLatch.awaitExpected());
|
||||||
assertEquals(EXPUNGE_THREADNAME_1, partitionCall.threadName);
|
assertEquals("main", partitionCall.threadName);
|
||||||
assertEquals(2, partitionCall.size);
|
assertEquals(2, partitionCall.size);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,6 +4,7 @@ import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
||||||
import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider;
|
import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider;
|
||||||
import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor;
|
import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor;
|
||||||
|
import ca.uhn.fhir.jpa.bulk.IBulkDataExportSvc;
|
||||||
import ca.uhn.fhir.jpa.config.TestR4Config;
|
import ca.uhn.fhir.jpa.config.TestR4Config;
|
||||||
import ca.uhn.fhir.jpa.dao.*;
|
import ca.uhn.fhir.jpa.dao.*;
|
||||||
import ca.uhn.fhir.jpa.dao.data.*;
|
import ca.uhn.fhir.jpa.dao.data.*;
|
||||||
|
@ -315,6 +316,8 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
|
||||||
private List<Object> mySystemInterceptors;
|
private List<Object> mySystemInterceptors;
|
||||||
@Autowired
|
@Autowired
|
||||||
private DaoRegistry myDaoRegistry;
|
private DaoRegistry myDaoRegistry;
|
||||||
|
@Autowired
|
||||||
|
private IBulkDataExportSvc myBulkDataExportSvc;
|
||||||
|
|
||||||
@After()
|
@After()
|
||||||
public void afterCleanupDao() {
|
public void afterCleanupDao() {
|
||||||
|
@ -376,7 +379,7 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
|
||||||
@Before
|
@Before
|
||||||
@Transactional()
|
@Transactional()
|
||||||
public void beforePurgeDatabase() {
|
public void beforePurgeDatabase() {
|
||||||
purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry);
|
purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry, myBulkDataExportSvc);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
|
|
|
@ -13,7 +13,6 @@ import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||||
import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails;
|
import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails;
|
||||||
import ca.uhn.fhir.rest.server.RestfulServer;
|
import ca.uhn.fhir.rest.server.RestfulServer;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||||
import com.google.common.collect.Lists;
|
|
||||||
import org.apache.commons.collections4.ListUtils;
|
import org.apache.commons.collections4.ListUtils;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
|
@ -32,7 +31,6 @@ import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||||
import javax.servlet.ServletException;
|
import javax.servlet.ServletException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
@ -54,9 +52,9 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest {
|
||||||
private InterceptorService myInterceptorService;
|
private InterceptorService myInterceptorService;
|
||||||
private List<String> myObservationIdsOddOnly;
|
private List<String> myObservationIdsOddOnly;
|
||||||
private List<String> myObservationIdsEvenOnly;
|
private List<String> myObservationIdsEvenOnly;
|
||||||
private List<String> myObservationIdsEvenOnlyBackwards;
|
private List<String> myObservationIdsWithVersions;
|
||||||
private List<String> myObservationIdsBackwards;
|
|
||||||
private List<String> myPatientIdsEvenOnly;
|
private List<String> myPatientIdsEvenOnly;
|
||||||
|
private List<String> myObservationIdsEvenOnlyWithVersions;
|
||||||
|
|
||||||
@After
|
@After
|
||||||
public void after() {
|
public void after() {
|
||||||
|
@ -313,9 +311,9 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest {
|
||||||
* Note: Each observation in the observation list will appear twice in the actual
|
* Note: Each observation in the observation list will appear twice in the actual
|
||||||
* returned results because we create it then update it in create50Observations()
|
* returned results because we create it then update it in create50Observations()
|
||||||
*/
|
*/
|
||||||
assertEquals(sort(myObservationIdsEvenOnlyBackwards.subList(0, 3), myObservationIdsEvenOnlyBackwards.subList(0, 3)), sort(returnedIdValues));
|
|
||||||
assertEquals(1, hitCount.get());
|
assertEquals(1, hitCount.get());
|
||||||
assertEquals(sort(myObservationIdsBackwards.subList(0, 5), myObservationIdsBackwards.subList(0, 5)), sort(interceptedResourceIds));
|
assertEquals(myObservationIdsWithVersions.subList(90, myObservationIdsWithVersions.size()), sort(interceptedResourceIds));
|
||||||
|
assertEquals(myObservationIdsEvenOnlyWithVersions.subList(44, 50), sort(returnedIdValues));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -349,6 +347,7 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest {
|
||||||
private void create50Observations() {
|
private void create50Observations() {
|
||||||
myPatientIds = new ArrayList<>();
|
myPatientIds = new ArrayList<>();
|
||||||
myObservationIds = new ArrayList<>();
|
myObservationIds = new ArrayList<>();
|
||||||
|
myObservationIdsWithVersions = new ArrayList<>();
|
||||||
|
|
||||||
Patient p = new Patient();
|
Patient p = new Patient();
|
||||||
p.setActive(true);
|
p.setActive(true);
|
||||||
|
@ -370,6 +369,7 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest {
|
||||||
obs1.addIdentifier().setSystem("urn:system").setValue("I" + leftPad("" + i, 5, '0'));
|
obs1.addIdentifier().setSystem("urn:system").setValue("I" + leftPad("" + i, 5, '0'));
|
||||||
IIdType obs1id = myObservationDao.create(obs1).getId().toUnqualifiedVersionless();
|
IIdType obs1id = myObservationDao.create(obs1).getId().toUnqualifiedVersionless();
|
||||||
myObservationIds.add(obs1id.toUnqualifiedVersionless().getValue());
|
myObservationIds.add(obs1id.toUnqualifiedVersionless().getValue());
|
||||||
|
myObservationIdsWithVersions.add(obs1id.toUnqualifiedVersionless().getValue());
|
||||||
|
|
||||||
obs1.setId(obs1id);
|
obs1.setId(obs1id);
|
||||||
if (obs1id.getIdPartAsLong() % 2 == 0) {
|
if (obs1id.getIdPartAsLong() % 2 == 0) {
|
||||||
|
@ -378,6 +378,8 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest {
|
||||||
obs1.getSubject().setReference(oddPid);
|
obs1.getSubject().setReference(oddPid);
|
||||||
}
|
}
|
||||||
myObservationDao.update(obs1);
|
myObservationDao.update(obs1);
|
||||||
|
myObservationIdsWithVersions.add(obs1id.toUnqualifiedVersionless().getValue());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
myPatientIdsEvenOnly =
|
myPatientIdsEvenOnly =
|
||||||
|
@ -391,10 +393,13 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest {
|
||||||
.stream()
|
.stream()
|
||||||
.filter(t -> Long.parseLong(t.substring(t.indexOf('/') + 1)) % 2 == 0)
|
.filter(t -> Long.parseLong(t.substring(t.indexOf('/') + 1)) % 2 == 0)
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
|
myObservationIdsEvenOnlyWithVersions =
|
||||||
|
myObservationIdsWithVersions
|
||||||
|
.stream()
|
||||||
|
.filter(t -> Long.parseLong(t.substring(t.indexOf('/') + 1)) % 2 == 0)
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
myObservationIdsOddOnly = ListUtils.removeAll(myObservationIds, myObservationIdsEvenOnly);
|
myObservationIdsOddOnly = ListUtils.removeAll(myObservationIds, myObservationIdsEvenOnly);
|
||||||
myObservationIdsBackwards = Lists.reverse(myObservationIds);
|
|
||||||
myObservationIdsEvenOnlyBackwards = Lists.reverse(myObservationIdsEvenOnly);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static class PreAccessInterceptorCounting implements IAnonymousInterceptor {
|
static class PreAccessInterceptorCounting implements IAnonymousInterceptor {
|
||||||
|
|
|
@ -78,7 +78,7 @@ public class FhirResourceDaoR4CacheWarmingTest extends BaseJpaR4Test {
|
||||||
.setUrl("Patient?name=smith")
|
.setUrl("Patient?name=smith")
|
||||||
);
|
);
|
||||||
CacheWarmingSvcImpl cacheWarmingSvc = (CacheWarmingSvcImpl) myCacheWarmingSvc;
|
CacheWarmingSvcImpl cacheWarmingSvc = (CacheWarmingSvcImpl) myCacheWarmingSvc;
|
||||||
cacheWarmingSvc.initCacheMap();
|
ourLog.info("Have {} tasks", cacheWarmingSvc.initCacheMap().size());
|
||||||
|
|
||||||
Patient p1 = new Patient();
|
Patient p1 = new Patient();
|
||||||
p1.setId("p1");
|
p1.setId("p1");
|
||||||
|
|
|
@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.dao.r4;
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.jpa.config.TestR4Config;
|
import ca.uhn.fhir.jpa.config.TestR4Config;
|
||||||
|
import ca.uhn.fhir.jpa.bulk.IBulkDataExportSvc;
|
||||||
import ca.uhn.fhir.jpa.config.TestR4WithoutLuceneConfig;
|
import ca.uhn.fhir.jpa.config.TestR4WithoutLuceneConfig;
|
||||||
import ca.uhn.fhir.jpa.dao.*;
|
import ca.uhn.fhir.jpa.dao.*;
|
||||||
import ca.uhn.fhir.jpa.search.ISearchCoordinatorSvc;
|
import ca.uhn.fhir.jpa.search.ISearchCoordinatorSvc;
|
||||||
|
@ -37,7 +38,6 @@ import java.util.List;
|
||||||
|
|
||||||
import static org.junit.Assert.*;
|
import static org.junit.Assert.*;
|
||||||
import static org.mockito.ArgumentMatchers.any;
|
import static org.mockito.ArgumentMatchers.any;
|
||||||
import static org.mockito.Mockito.when;
|
|
||||||
|
|
||||||
@RunWith(SpringJUnit4ClassRunner.class)
|
@RunWith(SpringJUnit4ClassRunner.class)
|
||||||
@ContextConfiguration(classes = {TestR4WithoutLuceneConfig.class})
|
@ContextConfiguration(classes = {TestR4WithoutLuceneConfig.class})
|
||||||
|
@ -110,11 +110,13 @@ public class FhirResourceDaoR4SearchWithLuceneDisabledTest extends BaseJpaTest {
|
||||||
private IFhirSystemDao<Bundle, Meta> mySystemDao;
|
private IFhirSystemDao<Bundle, Meta> mySystemDao;
|
||||||
@Autowired
|
@Autowired
|
||||||
private IResourceReindexingSvc myResourceReindexingSvc;
|
private IResourceReindexingSvc myResourceReindexingSvc;
|
||||||
|
@Autowired
|
||||||
|
private IBulkDataExportSvc myBulkDataExportSvc;
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
@Transactional()
|
@Transactional()
|
||||||
public void beforePurgeDatabase() {
|
public void beforePurgeDatabase() {
|
||||||
purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry);
|
purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry, myBulkDataExportSvc);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
|
|
|
@ -696,7 +696,8 @@ public class FhirResourceDaoR4UniqueSearchParamTest extends BaseJpaR4Test {
|
||||||
|
|
||||||
myResourceReindexingSvc.markAllResourcesForReindexing("Observation");
|
myResourceReindexingSvc.markAllResourcesForReindexing("Observation");
|
||||||
assertEquals(1, myResourceReindexingSvc.forceReindexingPass());
|
assertEquals(1, myResourceReindexingSvc.forceReindexingPass());
|
||||||
assertEquals(0, myResourceReindexingSvc.forceReindexingPass());
|
myResourceReindexingSvc.forceReindexingPass();
|
||||||
|
myResourceReindexingSvc.forceReindexingPass();
|
||||||
assertEquals(0, myResourceReindexingSvc.forceReindexingPass());
|
assertEquals(0, myResourceReindexingSvc.forceReindexingPass());
|
||||||
|
|
||||||
uniques = myResourceIndexedCompositeStringUniqueDao.findAll();
|
uniques = myResourceIndexedCompositeStringUniqueDao.findAll();
|
||||||
|
|
|
@ -48,6 +48,11 @@ public class SearchParamExtractorR4Test {
|
||||||
return getActiveSearchParams(theResourceName).get(theParamName);
|
return getActiveSearchParams(theResourceName).get(theParamName);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void refreshCacheIfNecessary() {
|
||||||
|
// nothing
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Map<String, Map<String, RuntimeSearchParam>> getActiveSearchParams() {
|
public Map<String, Map<String, RuntimeSearchParam>> getActiveSearchParams() {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
|
|
|
@ -4,6 +4,7 @@ import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
||||||
import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider;
|
import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider;
|
||||||
import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor;
|
import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor;
|
||||||
|
import ca.uhn.fhir.jpa.bulk.IBulkDataExportSvc;
|
||||||
import ca.uhn.fhir.jpa.config.TestR5Config;
|
import ca.uhn.fhir.jpa.config.TestR5Config;
|
||||||
import ca.uhn.fhir.jpa.dao.*;
|
import ca.uhn.fhir.jpa.dao.*;
|
||||||
import ca.uhn.fhir.jpa.dao.data.*;
|
import ca.uhn.fhir.jpa.dao.data.*;
|
||||||
|
@ -315,6 +316,8 @@ public abstract class BaseJpaR5Test extends BaseJpaTest {
|
||||||
private List<Object> mySystemInterceptors;
|
private List<Object> mySystemInterceptors;
|
||||||
@Autowired
|
@Autowired
|
||||||
private DaoRegistry myDaoRegistry;
|
private DaoRegistry myDaoRegistry;
|
||||||
|
@Autowired
|
||||||
|
private IBulkDataExportSvc myBulkDataExportSvc;
|
||||||
|
|
||||||
@After()
|
@After()
|
||||||
public void afterCleanupDao() {
|
public void afterCleanupDao() {
|
||||||
|
@ -376,7 +379,7 @@ public abstract class BaseJpaR5Test extends BaseJpaTest {
|
||||||
@Before
|
@Before
|
||||||
@Transactional()
|
@Transactional()
|
||||||
public void beforePurgeDatabase() {
|
public void beforePurgeDatabase() {
|
||||||
purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry);
|
purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry, myBulkDataExportSvc);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
|
|
|
@ -4,7 +4,7 @@ import ca.uhn.fhir.jpa.config.BaseConfig;
|
||||||
import ca.uhn.fhir.jpa.config.TestR4Config;
|
import ca.uhn.fhir.jpa.config.TestR4Config;
|
||||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
import ca.uhn.fhir.rest.api.Constants;
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
import ca.uhn.fhir.rest.api.PreferReturnEnum;
|
import ca.uhn.fhir.rest.api.PreferHeader;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.client.interceptor.CapturingInterceptor;
|
import ca.uhn.fhir.rest.client.interceptor.CapturingInterceptor;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
|
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
|
||||||
|
@ -349,7 +349,7 @@ public class ConsentInterceptorResourceProviderR4Test extends BaseResourceProvid
|
||||||
|
|
||||||
Patient patient = new Patient();
|
Patient patient = new Patient();
|
||||||
patient.setActive(true);
|
patient.setActive(true);
|
||||||
IIdType id = ourClient.create().resource(patient).prefer(PreferReturnEnum.REPRESENTATION).execute().getId().toUnqualifiedVersionless();
|
IIdType id = ourClient.create().resource(patient).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute().getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
DelegatingConsentService consentService = new DelegatingConsentService();
|
DelegatingConsentService consentService = new DelegatingConsentService();
|
||||||
myConsentInterceptor = new ConsentInterceptor(consentService, IConsentContextServices.NULL_IMPL);
|
myConsentInterceptor = new ConsentInterceptor(consentService, IConsentContextServices.NULL_IMPL);
|
||||||
|
|
|
@ -2,7 +2,6 @@ package ca.uhn.fhir.jpa.provider.r4;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.dao.DaoRegistry;
|
import ca.uhn.fhir.jpa.dao.DaoRegistry;
|
||||||
import ca.uhn.fhir.jpa.util.TestUtil;
|
import ca.uhn.fhir.jpa.util.TestUtil;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
|
||||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||||
import org.hl7.fhir.r4.model.Patient;
|
import org.hl7.fhir.r4.model.Patient;
|
||||||
import org.hl7.fhir.r4.model.Practitioner;
|
import org.hl7.fhir.r4.model.Practitioner;
|
||||||
|
@ -10,16 +9,18 @@ import org.junit.AfterClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.test.annotation.DirtiesContext;
|
||||||
import org.springframework.test.context.ContextConfiguration;
|
import org.springframework.test.context.ContextConfiguration;
|
||||||
|
|
||||||
import javax.annotation.PostConstruct;
|
import javax.annotation.PostConstruct;
|
||||||
|
import javax.annotation.PreDestroy;
|
||||||
|
|
||||||
import static org.hamcrest.CoreMatchers.containsString;
|
import static org.hamcrest.CoreMatchers.containsString;
|
||||||
import static org.junit.Assert.assertEquals;
|
|
||||||
import static org.junit.Assert.assertThat;
|
import static org.junit.Assert.assertThat;
|
||||||
|
|
||||||
@SuppressWarnings("Duplicates")
|
@SuppressWarnings("Duplicates")
|
||||||
@ContextConfiguration(classes = {ResourceProviderOnlySomeResourcesProvidedR4Test.OnlySomeResourcesProvidedCtxConfig.class})
|
@ContextConfiguration(classes = {ResourceProviderOnlySomeResourcesProvidedR4Test.OnlySomeResourcesProvidedCtxConfig.class})
|
||||||
|
@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS)
|
||||||
public class ResourceProviderOnlySomeResourcesProvidedR4Test extends BaseResourceProviderR4Test {
|
public class ResourceProviderOnlySomeResourcesProvidedR4Test extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -62,6 +63,13 @@ public class ResourceProviderOnlySomeResourcesProvidedR4Test extends BaseResourc
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@PreDestroy
|
||||||
|
public void stop() {
|
||||||
|
myDaoRegistry.setSupportedResourceTypes();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@AfterClass
|
@AfterClass
|
||||||
|
|
|
@ -31,6 +31,7 @@ import com.google.common.collect.Lists;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.commons.lang3.Validate;
|
import org.apache.commons.lang3.Validate;
|
||||||
|
import org.apache.commons.lang3.time.DateUtils;
|
||||||
import org.apache.http.NameValuePair;
|
import org.apache.http.NameValuePair;
|
||||||
import org.apache.http.client.entity.UrlEncodedFormEntity;
|
import org.apache.http.client.entity.UrlEncodedFormEntity;
|
||||||
import org.apache.http.client.methods.*;
|
import org.apache.http.client.methods.*;
|
||||||
|
@ -491,13 +492,13 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
private void checkParamMissing(String paramName) throws IOException {
|
private void checkParamMissing(String paramName) throws IOException {
|
||||||
HttpGet get = new HttpGet(ourServerBase + "/Observation?" + paramName + ":missing=false");
|
HttpGet get = new HttpGet(ourServerBase + "/Observation?" + paramName + ":missing=false");
|
||||||
CloseableHttpResponse resp = ourHttpClient.execute(get);
|
CloseableHttpResponse resp = ourHttpClient.execute(get);
|
||||||
IOUtils.closeQuietly(resp.getEntity().getContent());
|
resp.getEntity().getContent().close();
|
||||||
assertEquals(200, resp.getStatusLine().getStatusCode());
|
assertEquals(200, resp.getStatusLine().getStatusCode());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private ArrayList<IBaseResource> genResourcesOfType(Bundle theRes, Class<? extends IBaseResource> theClass) {
|
private ArrayList<IBaseResource> genResourcesOfType(Bundle theRes, Class<? extends IBaseResource> theClass) {
|
||||||
ArrayList<IBaseResource> retVal = new ArrayList<IBaseResource>();
|
ArrayList<IBaseResource> retVal = new ArrayList<>();
|
||||||
for (BundleEntryComponent next : theRes.getEntry()) {
|
for (BundleEntryComponent next : theRes.getEntry()) {
|
||||||
if (next.getResource() != null) {
|
if (next.getResource() != null) {
|
||||||
if (theClass.isAssignableFrom(next.getResource().getClass())) {
|
if (theClass.isAssignableFrom(next.getResource().getClass())) {
|
||||||
|
@ -531,14 +532,11 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
ourLog.info("About to perform search for: {}", theUri);
|
ourLog.info("About to perform search for: {}", theUri);
|
||||||
|
|
||||||
CloseableHttpResponse response = ourHttpClient.execute(get);
|
try (CloseableHttpResponse response = ourHttpClient.execute(get)) {
|
||||||
try {
|
|
||||||
String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||||
ourLog.info(resp);
|
ourLog.info(resp);
|
||||||
Bundle bundle = myFhirCtx.newXmlParser().parseResource(Bundle.class, resp);
|
Bundle bundle = myFhirCtx.newXmlParser().parseResource(Bundle.class, resp);
|
||||||
ids = toUnqualifiedIdValues(bundle);
|
ids = toUnqualifiedIdValues(bundle);
|
||||||
} finally {
|
|
||||||
IOUtils.closeQuietly(response);
|
|
||||||
}
|
}
|
||||||
return ids;
|
return ids;
|
||||||
}
|
}
|
||||||
|
@ -547,14 +545,11 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
List<String> ids;
|
List<String> ids;
|
||||||
HttpGet get = new HttpGet(uri);
|
HttpGet get = new HttpGet(uri);
|
||||||
|
|
||||||
CloseableHttpResponse response = ourHttpClient.execute(get);
|
try (CloseableHttpResponse response = ourHttpClient.execute(get)) {
|
||||||
try {
|
|
||||||
String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||||
ourLog.info(resp);
|
ourLog.info(resp);
|
||||||
Bundle bundle = myFhirCtx.newXmlParser().parseResource(Bundle.class, resp);
|
Bundle bundle = myFhirCtx.newXmlParser().parseResource(Bundle.class, resp);
|
||||||
ids = toUnqualifiedVersionlessIdValues(bundle);
|
ids = toUnqualifiedVersionlessIdValues(bundle);
|
||||||
} finally {
|
|
||||||
IOUtils.closeQuietly(response);
|
|
||||||
}
|
}
|
||||||
return ids;
|
return ids;
|
||||||
}
|
}
|
||||||
|
@ -589,7 +584,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
String resBody = IOUtils.toString(ResourceProviderR4Test.class.getResource("/r4/document-father.json"), StandardCharsets.UTF_8);
|
String resBody = IOUtils.toString(ResourceProviderR4Test.class.getResource("/r4/document-father.json"), StandardCharsets.UTF_8);
|
||||||
resBody = resBody.replace("\"type\": \"document\"", "\"type\": \"transaction\"");
|
resBody = resBody.replace("\"type\": \"document\"", "\"type\": \"transaction\"");
|
||||||
try {
|
try {
|
||||||
client.create().resource(resBody).execute().getId();
|
client.create().resource(resBody).execute();
|
||||||
fail();
|
fail();
|
||||||
} catch (UnprocessableEntityException e) {
|
} catch (UnprocessableEntityException e) {
|
||||||
assertThat(e.getMessage(), containsString("Unable to store a Bundle resource on this server with a Bundle.type value of: transaction"));
|
assertThat(e.getMessage(), containsString("Unable to store a Bundle resource on this server with a Bundle.type value of: transaction"));
|
||||||
|
@ -688,7 +683,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
.create()
|
.create()
|
||||||
.resource(p)
|
.resource(p)
|
||||||
.conditionalByUrl("Patient?identifier=foo|bar")
|
.conditionalByUrl("Patient?identifier=foo|bar")
|
||||||
.prefer(PreferReturnEnum.REPRESENTATION)
|
.prefer(PreferHeader.PreferReturnEnum.REPRESENTATION)
|
||||||
.execute();
|
.execute();
|
||||||
|
|
||||||
assertEquals(id.getIdPart(), outcome.getId().getIdPart());
|
assertEquals(id.getIdPart(), outcome.getId().getIdPart());
|
||||||
|
@ -721,7 +716,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
assertEquals(200, resp.getStatusLine().getStatusCode());
|
assertEquals(200, resp.getStatusLine().getStatusCode());
|
||||||
assertEquals(resource.withVersion("2").getValue(), resp.getFirstHeader("Content-Location").getValue());
|
assertEquals(resource.withVersion("2").getValue(), resp.getFirstHeader("Content-Location").getValue());
|
||||||
} finally {
|
} finally {
|
||||||
IOUtils.closeQuietly(resp);
|
resp.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
fromDB = ourClient.read().resource(Binary.class).withId(resource.toVersionless()).execute();
|
fromDB = ourClient.read().resource(Binary.class).withId(resource.toVersionless()).execute();
|
||||||
|
@ -737,7 +732,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
assertEquals(200, resp.getStatusLine().getStatusCode());
|
assertEquals(200, resp.getStatusLine().getStatusCode());
|
||||||
assertEquals(resource.withVersion("3").getValue(), resp.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
|
assertEquals(resource.withVersion("3").getValue(), resp.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
|
||||||
} finally {
|
} finally {
|
||||||
IOUtils.closeQuietly(resp);
|
resp.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
fromDB = ourClient.read().resource(Binary.class).withId(resource.toVersionless()).execute();
|
fromDB = ourClient.read().resource(Binary.class).withId(resource.toVersionless()).execute();
|
||||||
|
@ -754,7 +749,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
try {
|
try {
|
||||||
assertEquals(400, resp.getStatusLine().getStatusCode());
|
assertEquals(400, resp.getStatusLine().getStatusCode());
|
||||||
} finally {
|
} finally {
|
||||||
IOUtils.closeQuietly(resp);
|
resp.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
fromDB = ourClient.read().resource(Binary.class).withId(resource.toVersionless()).execute();
|
fromDB = ourClient.read().resource(Binary.class).withId(resource.toVersionless()).execute();
|
||||||
|
@ -769,7 +764,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
public void testCreateBundle() throws IOException {
|
public void testCreateBundle() throws IOException {
|
||||||
String input = IOUtils.toString(getClass().getResourceAsStream("/bryn-bundle.json"), StandardCharsets.UTF_8);
|
String input = IOUtils.toString(getClass().getResourceAsStream("/bryn-bundle.json"), StandardCharsets.UTF_8);
|
||||||
Validate.notNull(input);
|
Validate.notNull(input);
|
||||||
ourClient.create().resource(input).execute().getResource();
|
ourClient.create().resource(input).execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -1659,7 +1654,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
b = parser.parseResource(Bundle.class, new InputStreamReader(ResourceProviderR4Test.class.getResourceAsStream("/r4/bug147-bundle.json")));
|
b = parser.parseResource(Bundle.class, new InputStreamReader(ResourceProviderR4Test.class.getResourceAsStream("/r4/bug147-bundle.json")));
|
||||||
|
|
||||||
Bundle resp = ourClient.transaction().withBundle(b).execute();
|
Bundle resp = ourClient.transaction().withBundle(b).execute();
|
||||||
List<IdType> ids = new ArrayList<IdType>();
|
List<IdType> ids = new ArrayList<>();
|
||||||
for (BundleEntryComponent next : resp.getEntry()) {
|
for (BundleEntryComponent next : resp.getEntry()) {
|
||||||
IdType toAdd = new IdType(next.getResponse().getLocation()).toUnqualifiedVersionless();
|
IdType toAdd = new IdType(next.getResponse().getLocation()).toUnqualifiedVersionless();
|
||||||
ids.add(toAdd);
|
ids.add(toAdd);
|
||||||
|
@ -1673,7 +1668,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
Parameters output = ourClient.operation().onInstance(patientId).named("everything").withNoParameters(Parameters.class).execute();
|
Parameters output = ourClient.operation().onInstance(patientId).named("everything").withNoParameters(Parameters.class).execute();
|
||||||
b = (Bundle) output.getParameter().get(0).getResource();
|
b = (Bundle) output.getParameter().get(0).getResource();
|
||||||
|
|
||||||
ids = new ArrayList<IdType>();
|
ids = new ArrayList<>();
|
||||||
boolean dupes = false;
|
boolean dupes = false;
|
||||||
for (BundleEntryComponent next : b.getEntry()) {
|
for (BundleEntryComponent next : b.getEntry()) {
|
||||||
IdType toAdd = next.getResource().getIdElement().toUnqualifiedVersionless();
|
IdType toAdd = next.getResource().getIdElement().toUnqualifiedVersionless();
|
||||||
|
@ -1694,7 +1689,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
Parameters output = ourClient.operation().onInstance(patientId).named("everything").withParameters(input).execute();
|
Parameters output = ourClient.operation().onInstance(patientId).named("everything").withParameters(input).execute();
|
||||||
b = (Bundle) output.getParameter().get(0).getResource();
|
b = (Bundle) output.getParameter().get(0).getResource();
|
||||||
|
|
||||||
ids = new ArrayList<IdType>();
|
ids = new ArrayList<>();
|
||||||
boolean dupes = false;
|
boolean dupes = false;
|
||||||
for (BundleEntryComponent next : b.getEntry()) {
|
for (BundleEntryComponent next : b.getEntry()) {
|
||||||
IdType toAdd = next.getResource().getIdElement().toUnqualifiedVersionless();
|
IdType toAdd = next.getResource().getIdElement().toUnqualifiedVersionless();
|
||||||
|
@ -1760,7 +1755,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
Parameters output = ourClient.operation().onInstance(patientId).named("everything").withNoParameters(Parameters.class).execute();
|
Parameters output = ourClient.operation().onInstance(patientId).named("everything").withNoParameters(Parameters.class).execute();
|
||||||
b = (Bundle) output.getParameter().get(0).getResource();
|
b = (Bundle) output.getParameter().get(0).getResource();
|
||||||
|
|
||||||
List<IdType> ids = new ArrayList<IdType>();
|
List<IdType> ids = new ArrayList<>();
|
||||||
for (BundleEntryComponent next : b.getEntry()) {
|
for (BundleEntryComponent next : b.getEntry()) {
|
||||||
IdType toAdd = next.getResource().getIdElement().toUnqualifiedVersionless();
|
IdType toAdd = next.getResource().getIdElement().toUnqualifiedVersionless();
|
||||||
ids.add(toAdd);
|
ids.add(toAdd);
|
||||||
|
@ -1892,7 +1887,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
try {
|
try {
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||||
String output = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
String output = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||||
IOUtils.closeQuietly(response.getEntity().getContent());
|
response.getEntity().getContent().close();
|
||||||
ourLog.info(output);
|
ourLog.info(output);
|
||||||
List<IIdType> ids = toUnqualifiedVersionlessIds(myFhirCtx.newXmlParser().parseResource(Bundle.class, output));
|
List<IIdType> ids = toUnqualifiedVersionlessIds(myFhirCtx.newXmlParser().parseResource(Bundle.class, output));
|
||||||
ourLog.info(ids.toString());
|
ourLog.info(ids.toString());
|
||||||
|
@ -1907,7 +1902,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
try {
|
try {
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||||
String output = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
String output = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||||
IOUtils.closeQuietly(response.getEntity().getContent());
|
response.getEntity().getContent().close();
|
||||||
ourLog.info(output);
|
ourLog.info(output);
|
||||||
List<IIdType> ids = toUnqualifiedVersionlessIds(myFhirCtx.newXmlParser().parseResource(Bundle.class, output));
|
List<IIdType> ids = toUnqualifiedVersionlessIds(myFhirCtx.newXmlParser().parseResource(Bundle.class, output));
|
||||||
ourLog.info(ids.toString());
|
ourLog.info(ids.toString());
|
||||||
|
@ -1926,7 +1921,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
// try {
|
// try {
|
||||||
// assertEquals(200, response.getStatusLine().getStatusCode());
|
// assertEquals(200, response.getStatusLine().getStatusCode());
|
||||||
// String output = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
// String output = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||||
// IOUtils.closeQuietly(response.getEntity().getContent());
|
// response.getEntity().getContent().close();
|
||||||
// ourLog.info(output);
|
// ourLog.info(output);
|
||||||
// List<IIdType> ids = toUnqualifiedVersionlessIds(myFhirCtx.newXmlParser().parseResource(Bundle.class, output));
|
// List<IIdType> ids = toUnqualifiedVersionlessIds(myFhirCtx.newXmlParser().parseResource(Bundle.class, output));
|
||||||
// ourLog.info(ids.toString());
|
// ourLog.info(ids.toString());
|
||||||
|
@ -1940,7 +1935,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
// try {
|
// try {
|
||||||
// assertEquals(200, response.getStatusLine().getStatusCode());
|
// assertEquals(200, response.getStatusLine().getStatusCode());
|
||||||
// String output = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
// String output = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||||
// IOUtils.closeQuietly(response.getEntity().getContent());
|
// response.getEntity().getContent().close();
|
||||||
// ourLog.info(output);
|
// ourLog.info(output);
|
||||||
// List<IIdType> ids = toUnqualifiedVersionlessIds(myFhirCtx.newXmlParser().parseResource(Bundle.class, output));
|
// List<IIdType> ids = toUnqualifiedVersionlessIds(myFhirCtx.newXmlParser().parseResource(Bundle.class, output));
|
||||||
// ourLog.info(ids.toString());
|
// ourLog.info(ids.toString());
|
||||||
|
@ -1964,7 +1959,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
assertEquals(53, inputBundle.getEntry().size());
|
assertEquals(53, inputBundle.getEntry().size());
|
||||||
|
|
||||||
Set<String> allIds = new TreeSet<String>();
|
Set<String> allIds = new TreeSet<>();
|
||||||
for (BundleEntryComponent nextEntry : inputBundle.getEntry()) {
|
for (BundleEntryComponent nextEntry : inputBundle.getEntry()) {
|
||||||
nextEntry.getRequest().setMethod(HTTPVerb.PUT);
|
nextEntry.getRequest().setMethod(HTTPVerb.PUT);
|
||||||
nextEntry.getRequest().setUrl(nextEntry.getResource().getId());
|
nextEntry.getRequest().setUrl(nextEntry.getResource().getId());
|
||||||
|
@ -1986,7 +1981,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
ourLog.info(myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(responseBundle));
|
ourLog.info(myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(responseBundle));
|
||||||
|
|
||||||
List<String> ids = new ArrayList<String>();
|
List<String> ids = new ArrayList<>();
|
||||||
for (BundleEntryComponent nextEntry : responseBundle.getEntry()) {
|
for (BundleEntryComponent nextEntry : responseBundle.getEntry()) {
|
||||||
ids.add(nextEntry.getResource().getIdElement().toUnqualifiedVersionless().getValue());
|
ids.add(nextEntry.getResource().getIdElement().toUnqualifiedVersionless().getValue());
|
||||||
}
|
}
|
||||||
|
@ -1995,7 +1990,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
assertThat(responseBundle.getEntry().size(), lessThanOrEqualTo(25));
|
assertThat(responseBundle.getEntry().size(), lessThanOrEqualTo(25));
|
||||||
|
|
||||||
TreeSet<String> idsSet = new TreeSet<String>();
|
TreeSet<String> idsSet = new TreeSet<>();
|
||||||
for (int i = 0; i < responseBundle.getEntry().size(); i++) {
|
for (int i = 0; i < responseBundle.getEntry().size(); i++) {
|
||||||
for (BundleEntryComponent nextEntry : responseBundle.getEntry()) {
|
for (BundleEntryComponent nextEntry : responseBundle.getEntry()) {
|
||||||
idsSet.add(nextEntry.getResource().getIdElement().toUnqualifiedVersionless().getValue());
|
idsSet.add(nextEntry.getResource().getIdElement().toUnqualifiedVersionless().getValue());
|
||||||
|
@ -2118,13 +2113,10 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
HttpPost post = new HttpPost(ourServerBase + "/Patient/" + JpaConstants.OPERATION_VALIDATE);
|
HttpPost post = new HttpPost(ourServerBase + "/Patient/" + JpaConstants.OPERATION_VALIDATE);
|
||||||
post.setEntity(new StringEntity(input, ContentType.APPLICATION_JSON));
|
post.setEntity(new StringEntity(input, ContentType.APPLICATION_JSON));
|
||||||
|
|
||||||
CloseableHttpResponse resp = ourHttpClient.execute(post);
|
try (CloseableHttpResponse resp = ourHttpClient.execute(post)) {
|
||||||
try {
|
|
||||||
String respString = IOUtils.toString(resp.getEntity().getContent(), Charsets.UTF_8);
|
String respString = IOUtils.toString(resp.getEntity().getContent(), Charsets.UTF_8);
|
||||||
ourLog.info(respString);
|
ourLog.info(respString);
|
||||||
assertEquals(200, resp.getStatusLine().getStatusCode());
|
assertEquals(200, resp.getStatusLine().getStatusCode());
|
||||||
} finally {
|
|
||||||
IOUtils.closeQuietly(resp);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2144,14 +2136,11 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
HttpPost post = new HttpPost(ourServerBase + "/Patient/$validate");
|
HttpPost post = new HttpPost(ourServerBase + "/Patient/$validate");
|
||||||
post.setEntity(new StringEntity(input, ContentType.APPLICATION_JSON));
|
post.setEntity(new StringEntity(input, ContentType.APPLICATION_JSON));
|
||||||
|
|
||||||
CloseableHttpResponse resp = ourHttpClient.execute(post);
|
try (CloseableHttpResponse resp = ourHttpClient.execute(post)) {
|
||||||
try {
|
|
||||||
String respString = IOUtils.toString(resp.getEntity().getContent(), Charsets.UTF_8);
|
String respString = IOUtils.toString(resp.getEntity().getContent(), Charsets.UTF_8);
|
||||||
ourLog.info(respString);
|
ourLog.info(respString);
|
||||||
assertThat(respString, containsString("Unknown extension http://hl7.org/fhir/ValueSet/v3-ActInvoiceGroupCode"));
|
assertThat(respString, containsString("Unknown extension http://hl7.org/fhir/ValueSet/v3-ActInvoiceGroupCode"));
|
||||||
assertEquals(200, resp.getStatusLine().getStatusCode());
|
assertEquals(200, resp.getStatusLine().getStatusCode());
|
||||||
} finally {
|
|
||||||
IOUtils.closeQuietly(resp);
|
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
ourRestServer.unregisterInterceptor(interceptor);
|
ourRestServer.unregisterInterceptor(interceptor);
|
||||||
|
@ -2247,15 +2236,12 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
myResourceCountsCache.update();
|
myResourceCountsCache.update();
|
||||||
|
|
||||||
HttpGet get = new HttpGet(ourServerBase + "/$get-resource-counts");
|
HttpGet get = new HttpGet(ourServerBase + "/$get-resource-counts");
|
||||||
CloseableHttpResponse response = ourHttpClient.execute(get);
|
try (CloseableHttpResponse response = ourHttpClient.execute(get)) {
|
||||||
try {
|
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||||
String output = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
String output = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||||
IOUtils.closeQuietly(response.getEntity().getContent());
|
response.getEntity().getContent().close();
|
||||||
ourLog.info(output);
|
ourLog.info(output);
|
||||||
assertThat(output, containsString("<parameter><name value=\"Patient\"/><valueInteger value=\""));
|
assertThat(output, containsString("<parameter><name value=\"Patient\"/><valueInteger value=\""));
|
||||||
} finally {
|
|
||||||
response.close();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2300,13 +2286,10 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
public void testHasParameterNoResults() throws Exception {
|
public void testHasParameterNoResults() throws Exception {
|
||||||
|
|
||||||
HttpGet get = new HttpGet(ourServerBase + "/AllergyIntolerance?_has=Provenance:target:userID=12345");
|
HttpGet get = new HttpGet(ourServerBase + "/AllergyIntolerance?_has=Provenance:target:userID=12345");
|
||||||
CloseableHttpResponse response = ourHttpClient.execute(get);
|
try (CloseableHttpResponse response = ourHttpClient.execute(get)) {
|
||||||
try {
|
|
||||||
String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||||
ourLog.info(resp);
|
ourLog.info(resp);
|
||||||
assertThat(resp, containsString("Invalid _has parameter syntax: _has"));
|
assertThat(resp, containsString("Invalid _has parameter syntax: _has"));
|
||||||
} finally {
|
|
||||||
IOUtils.closeQuietly(response);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -2644,7 +2627,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||||
assertThat(resp, stringContainsInOrder("THIS IS THE DESC"));
|
assertThat(resp, stringContainsInOrder("THIS IS THE DESC"));
|
||||||
} finally {
|
} finally {
|
||||||
IOUtils.closeQuietly(response.getEntity().getContent());
|
response.getEntity().getContent().close();
|
||||||
response.close();
|
response.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2798,7 +2781,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||||
assertThat(resp, containsString("Underweight"));
|
assertThat(resp, containsString("Underweight"));
|
||||||
} finally {
|
} finally {
|
||||||
IOUtils.closeQuietly(response.getEntity().getContent());
|
response.getEntity().getContent().close();
|
||||||
response.close();
|
response.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2820,14 +2803,11 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
patch.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_OPERATION_OUTCOME);
|
patch.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_OPERATION_OUTCOME);
|
||||||
patch.setEntity(new StringEntity("[ { \"op\":\"replace\", \"path\":\"/active\", \"value\":false } ]", ContentType.parse(Constants.CT_JSON_PATCH + Constants.CHARSET_UTF8_CTSUFFIX)));
|
patch.setEntity(new StringEntity("[ { \"op\":\"replace\", \"path\":\"/active\", \"value\":false } ]", ContentType.parse(Constants.CT_JSON_PATCH + Constants.CHARSET_UTF8_CTSUFFIX)));
|
||||||
|
|
||||||
CloseableHttpResponse response = ourHttpClient.execute(patch);
|
try (CloseableHttpResponse response = ourHttpClient.execute(patch)) {
|
||||||
try {
|
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||||
String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||||
assertThat(responseString, containsString("<OperationOutcome"));
|
assertThat(responseString, containsString("<OperationOutcome"));
|
||||||
assertThat(responseString, containsString("INFORMATION"));
|
assertThat(responseString, containsString("INFORMATION"));
|
||||||
} finally {
|
|
||||||
response.close();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Patient newPt = ourClient.read().resource(Patient.class).withId(pid1.getIdPart()).execute();
|
Patient newPt = ourClient.read().resource(Patient.class).withId(pid1.getIdPart()).execute();
|
||||||
|
@ -2851,14 +2831,11 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
patch.setEntity(new StringEntity("[ { \"op\":\"replace\", \"path\":\"/active\", \"value\":false } ]", ContentType.parse(Constants.CT_JSON_PATCH + Constants.CHARSET_UTF8_CTSUFFIX)));
|
patch.setEntity(new StringEntity("[ { \"op\":\"replace\", \"path\":\"/active\", \"value\":false } ]", ContentType.parse(Constants.CT_JSON_PATCH + Constants.CHARSET_UTF8_CTSUFFIX)));
|
||||||
patch.addHeader("If-Match", "W/\"9\"");
|
patch.addHeader("If-Match", "W/\"9\"");
|
||||||
|
|
||||||
CloseableHttpResponse response = ourHttpClient.execute(patch);
|
try (CloseableHttpResponse response = ourHttpClient.execute(patch)) {
|
||||||
try {
|
|
||||||
assertEquals(409, response.getStatusLine().getStatusCode());
|
assertEquals(409, response.getStatusLine().getStatusCode());
|
||||||
String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||||
assertThat(responseString, containsString("<OperationOutcome"));
|
assertThat(responseString, containsString("<OperationOutcome"));
|
||||||
assertThat(responseString, containsString("<diagnostics value=\"Version 9 is not the most recent version of this resource, unable to apply patch\"/>"));
|
assertThat(responseString, containsString("<diagnostics value=\"Version 9 is not the most recent version of this resource, unable to apply patch\"/>"));
|
||||||
} finally {
|
|
||||||
response.close();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Patient newPt = ourClient.read().resource(Patient.class).withId(pid1.getIdPart()).execute();
|
Patient newPt = ourClient.read().resource(Patient.class).withId(pid1.getIdPart()).execute();
|
||||||
|
@ -2883,14 +2860,11 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
patch.addHeader("If-Match", "W/\"1\"");
|
patch.addHeader("If-Match", "W/\"1\"");
|
||||||
patch.setEntity(new StringEntity("[ { \"op\":\"replace\", \"path\":\"/active\", \"value\":false } ]", ContentType.parse(Constants.CT_JSON_PATCH + Constants.CHARSET_UTF8_CTSUFFIX)));
|
patch.setEntity(new StringEntity("[ { \"op\":\"replace\", \"path\":\"/active\", \"value\":false } ]", ContentType.parse(Constants.CT_JSON_PATCH + Constants.CHARSET_UTF8_CTSUFFIX)));
|
||||||
|
|
||||||
CloseableHttpResponse response = ourHttpClient.execute(patch);
|
try (CloseableHttpResponse response = ourHttpClient.execute(patch)) {
|
||||||
try {
|
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||||
String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||||
assertThat(responseString, containsString("<OperationOutcome"));
|
assertThat(responseString, containsString("<OperationOutcome"));
|
||||||
assertThat(responseString, containsString("INFORMATION"));
|
assertThat(responseString, containsString("INFORMATION"));
|
||||||
} finally {
|
|
||||||
response.close();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Patient newPt = ourClient.read().resource(Patient.class).withId(pid1.getIdPart()).execute();
|
Patient newPt = ourClient.read().resource(Patient.class).withId(pid1.getIdPart()).execute();
|
||||||
|
@ -2915,14 +2889,11 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
patch.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_OPERATION_OUTCOME);
|
patch.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_OPERATION_OUTCOME);
|
||||||
patch.setEntity(new StringEntity(patchString, ContentType.parse(Constants.CT_XML_PATCH + Constants.CHARSET_UTF8_CTSUFFIX)));
|
patch.setEntity(new StringEntity(patchString, ContentType.parse(Constants.CT_XML_PATCH + Constants.CHARSET_UTF8_CTSUFFIX)));
|
||||||
|
|
||||||
CloseableHttpResponse response = ourHttpClient.execute(patch);
|
try (CloseableHttpResponse response = ourHttpClient.execute(patch)) {
|
||||||
try {
|
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||||
String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||||
assertThat(responseString, containsString("<OperationOutcome"));
|
assertThat(responseString, containsString("<OperationOutcome"));
|
||||||
assertThat(responseString, containsString("INFORMATION"));
|
assertThat(responseString, containsString("INFORMATION"));
|
||||||
} finally {
|
|
||||||
response.close();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Patient newPt = ourClient.read().resource(Patient.class).withId(pid1.getIdPart()).execute();
|
Patient newPt = ourClient.read().resource(Patient.class).withId(pid1.getIdPart()).execute();
|
||||||
|
@ -2987,11 +2958,11 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
pat = new Patient();
|
pat = new Patient();
|
||||||
pat.addIdentifier().setSystem("urn:system").setValue("testReadAllInstancesOfType_01");
|
pat.addIdentifier().setSystem("urn:system").setValue("testReadAllInstancesOfType_01");
|
||||||
ourClient.create().resource(pat).prettyPrint().encodedXml().execute().getId();
|
ourClient.create().resource(pat).prettyPrint().encodedXml().execute();
|
||||||
|
|
||||||
pat = new Patient();
|
pat = new Patient();
|
||||||
pat.addIdentifier().setSystem("urn:system").setValue("testReadAllInstancesOfType_02");
|
pat.addIdentifier().setSystem("urn:system").setValue("testReadAllInstancesOfType_02");
|
||||||
ourClient.create().resource(pat).prettyPrint().encodedXml().execute().getId();
|
ourClient.create().resource(pat).prettyPrint().encodedXml().execute();
|
||||||
|
|
||||||
{
|
{
|
||||||
Bundle returned = ourClient.search().forResource(Patient.class).encodedXml().returnBundle(Bundle.class).execute();
|
Bundle returned = ourClient.search().forResource(Patient.class).encodedXml().returnBundle(Bundle.class).execute();
|
||||||
|
@ -3115,7 +3086,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
Patient actual = ourClient.read(Patient.class, new UriDt(newId.getValue()));
|
Patient actual = ourClient.read(Patient.class, new UriDt(newId.getValue()));
|
||||||
assertEquals(1, actual.getContained().size());
|
assertEquals(1, actual.getContained().size());
|
||||||
|
|
||||||
//@formatter:off
|
|
||||||
Bundle b = ourClient
|
Bundle b = ourClient
|
||||||
.search()
|
.search()
|
||||||
.forResource("Patient")
|
.forResource("Patient")
|
||||||
|
@ -3123,7 +3093,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
.prettyPrint()
|
.prettyPrint()
|
||||||
.returnBundle(Bundle.class)
|
.returnBundle(Bundle.class)
|
||||||
.execute();
|
.execute();
|
||||||
//@formatter:on
|
|
||||||
assertEquals(1, b.getEntry().size());
|
assertEquals(1, b.getEntry().size());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -3873,6 +3842,70 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
assertEquals(uuid1, uuid2);
|
assertEquals(uuid1, uuid2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSearchReusesBeforeExpiry() {
|
||||||
|
List<IBaseResource> resources = new ArrayList<IBaseResource>();
|
||||||
|
for (int i = 0; i < 50; i++) {
|
||||||
|
Organization org = new Organization();
|
||||||
|
org.setName("HELLO");
|
||||||
|
resources.add(org);
|
||||||
|
}
|
||||||
|
ourClient.transaction().withResources(resources).prettyPrint().encodedXml().execute();
|
||||||
|
|
||||||
|
/*
|
||||||
|
* First, make sure that we don't reuse a search if
|
||||||
|
* it's not marked with an expiry
|
||||||
|
*/
|
||||||
|
{
|
||||||
|
myDaoConfig.setReuseCachedSearchResultsForMillis(10L);
|
||||||
|
Bundle result1 = ourClient
|
||||||
|
.search()
|
||||||
|
.forResource("Organization")
|
||||||
|
.returnBundle(Bundle.class)
|
||||||
|
.execute();
|
||||||
|
final String uuid1 = toSearchUuidFromLinkNext(result1);
|
||||||
|
sleepOneClick();
|
||||||
|
Bundle result2 = ourClient
|
||||||
|
.search()
|
||||||
|
.forResource("Organization")
|
||||||
|
.returnBundle(Bundle.class)
|
||||||
|
.execute();
|
||||||
|
final String uuid2 = toSearchUuidFromLinkNext(result2);
|
||||||
|
assertNotEquals(uuid1, uuid2);
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Now try one but mark it with an expiry time
|
||||||
|
* in the future
|
||||||
|
*/
|
||||||
|
{
|
||||||
|
myDaoConfig.setReuseCachedSearchResultsForMillis(1000L);
|
||||||
|
Bundle result1 = ourClient
|
||||||
|
.search()
|
||||||
|
.forResource("Organization")
|
||||||
|
.returnBundle(Bundle.class)
|
||||||
|
.execute();
|
||||||
|
final String uuid1 = toSearchUuidFromLinkNext(result1);
|
||||||
|
runInTransaction(() -> {
|
||||||
|
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid1).orElseThrow(()->new IllegalStateException());
|
||||||
|
search.setExpiryOrNull(DateUtils.addSeconds(new Date(), -2));
|
||||||
|
mySearchEntityDao.save(search);
|
||||||
|
});
|
||||||
|
sleepOneClick();
|
||||||
|
Bundle result2 = ourClient
|
||||||
|
.search()
|
||||||
|
.forResource("Organization")
|
||||||
|
.returnBundle(Bundle.class)
|
||||||
|
.execute();
|
||||||
|
|
||||||
|
// Expiry doesn't affect reusablility
|
||||||
|
final String uuid2 = toSearchUuidFromLinkNext(result2);
|
||||||
|
assertEquals(uuid1, uuid2);
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSearchReusesResultsDisabled() {
|
public void testSearchReusesResultsDisabled() {
|
||||||
List<IBaseResource> resources = new ArrayList<>();
|
List<IBaseResource> resources = new ArrayList<>();
|
||||||
|
@ -4995,13 +5028,13 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
String encoded = myFhirCtx.newJsonParser().encodeResourceToString(p);
|
String encoded = myFhirCtx.newJsonParser().encodeResourceToString(p);
|
||||||
|
|
||||||
HttpPut put = new HttpPut(ourServerBase + "/Patient/A");
|
HttpPut put = new HttpPut(ourServerBase + "/Patient/A");
|
||||||
put.setEntity(new StringEntity(encoded, "application/fhir+json", "UTF-8"));
|
put.setEntity(new StringEntity(encoded, ContentType.create("application/fhir+json", "UTF-8")));
|
||||||
|
|
||||||
CloseableHttpResponse response = ourHttpClient.execute(put);
|
CloseableHttpResponse response = ourHttpClient.execute(put);
|
||||||
try {
|
try {
|
||||||
assertEquals(201, response.getStatusLine().getStatusCode());
|
assertEquals(201, response.getStatusLine().getStatusCode());
|
||||||
} finally {
|
} finally {
|
||||||
IOUtils.closeQuietly(response);
|
response.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
p = new Patient();
|
p = new Patient();
|
||||||
|
@ -5010,13 +5043,13 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
encoded = myFhirCtx.newJsonParser().encodeResourceToString(p);
|
encoded = myFhirCtx.newJsonParser().encodeResourceToString(p);
|
||||||
|
|
||||||
put = new HttpPut(ourServerBase + "/Patient/A");
|
put = new HttpPut(ourServerBase + "/Patient/A");
|
||||||
put.setEntity(new StringEntity(encoded, "application/fhir+json", "UTF-8"));
|
put.setEntity(new StringEntity(encoded, ContentType.create("application/fhir+json", "UTF-8")));
|
||||||
|
|
||||||
response = ourHttpClient.execute(put);
|
response = ourHttpClient.execute(put);
|
||||||
try {
|
try {
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||||
} finally {
|
} finally {
|
||||||
IOUtils.closeQuietly(response);
|
response.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -5100,8 +5133,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
HttpPut post = new HttpPut(ourServerBase + "/Patient/A2");
|
HttpPut post = new HttpPut(ourServerBase + "/Patient/A2");
|
||||||
post.setEntity(new StringEntity(resource, ContentType.create(Constants.CT_FHIR_XML, "UTF-8")));
|
post.setEntity(new StringEntity(resource, ContentType.create(Constants.CT_FHIR_XML, "UTF-8")));
|
||||||
CloseableHttpResponse response = ourHttpClient.execute(post);
|
try (CloseableHttpResponse response = ourHttpClient.execute(post)) {
|
||||||
try {
|
|
||||||
String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||||
ourLog.info(responseString);
|
ourLog.info(responseString);
|
||||||
assertEquals(400, response.getStatusLine().getStatusCode());
|
assertEquals(400, response.getStatusLine().getStatusCode());
|
||||||
|
@ -5109,8 +5141,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
assertEquals(
|
assertEquals(
|
||||||
"Can not update resource, resource body must contain an ID element which matches the request URL for update (PUT) operation - Resource body ID of \"333\" does not match URL ID of \"A2\"",
|
"Can not update resource, resource body must contain an ID element which matches the request URL for update (PUT) operation - Resource body ID of \"333\" does not match URL ID of \"A2\"",
|
||||||
oo.getIssue().get(0).getDiagnostics());
|
oo.getIssue().get(0).getDiagnostics());
|
||||||
} finally {
|
|
||||||
response.close();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -5119,7 +5149,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
String input = IOUtils.toString(getClass().getResourceAsStream("/dstu3-person.json"), StandardCharsets.UTF_8);
|
String input = IOUtils.toString(getClass().getResourceAsStream("/dstu3-person.json"), StandardCharsets.UTF_8);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
MethodOutcome resp = ourClient.update().resource(input).withId("Patient/PERSON1").execute();
|
ourClient.update().resource(input).withId("Patient/PERSON1").execute();
|
||||||
} catch (InvalidRequestException e) {
|
} catch (InvalidRequestException e) {
|
||||||
assertEquals("", e.getMessage());
|
assertEquals("", e.getMessage());
|
||||||
}
|
}
|
||||||
|
@ -5139,7 +5169,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
assertThat(resp, containsString("No resource supplied for $validate operation (resource is required unless mode is "delete")"));
|
assertThat(resp, containsString("No resource supplied for $validate operation (resource is required unless mode is "delete")"));
|
||||||
assertEquals(400, response.getStatusLine().getStatusCode());
|
assertEquals(400, response.getStatusLine().getStatusCode());
|
||||||
} finally {
|
} finally {
|
||||||
IOUtils.closeQuietly(response.getEntity().getContent());
|
response.getEntity().getContent().close();
|
||||||
response.close();
|
response.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5163,7 +5193,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
assertThat(resp, containsString("No resource supplied for $validate operation (resource is required unless mode is "delete")"));
|
assertThat(resp, containsString("No resource supplied for $validate operation (resource is required unless mode is "delete")"));
|
||||||
assertEquals(400, response.getStatusLine().getStatusCode());
|
assertEquals(400, response.getStatusLine().getStatusCode());
|
||||||
} finally {
|
} finally {
|
||||||
IOUtils.closeQuietly(response.getEntity().getContent());
|
response.getEntity().getContent().close();
|
||||||
response.close();
|
response.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5189,7 +5219,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
assertThat(resp,
|
assertThat(resp,
|
||||||
stringContainsInOrder(">ERROR<", "[Patient.contact[0]]", "<pre>SHALL at least contain a contact's details or a reference to an organization", "<issue><severity value=\"error\"/>"));
|
stringContainsInOrder(">ERROR<", "[Patient.contact[0]]", "<pre>SHALL at least contain a contact's details or a reference to an organization", "<issue><severity value=\"error\"/>"));
|
||||||
} finally {
|
} finally {
|
||||||
IOUtils.closeQuietly(response.getEntity().getContent());
|
response.getEntity().getContent().close();
|
||||||
response.close();
|
response.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5217,7 +5247,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
ourLog.info(resp);
|
ourLog.info(resp);
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||||
} finally {
|
} finally {
|
||||||
IOUtils.closeQuietly(response.getEntity().getContent());
|
response.getEntity().getContent().close();
|
||||||
response.close();
|
response.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5241,7 +5271,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
assertEquals(412, response.getStatusLine().getStatusCode());
|
assertEquals(412, response.getStatusLine().getStatusCode());
|
||||||
assertThat(resp, containsString("SHALL at least contain a contact's details or a reference to an organization"));
|
assertThat(resp, containsString("SHALL at least contain a contact's details or a reference to an organization"));
|
||||||
} finally {
|
} finally {
|
||||||
IOUtils.closeQuietly(response.getEntity().getContent());
|
response.getEntity().getContent().close();
|
||||||
response.close();
|
response.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5270,7 +5300,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
ourLog.info(resp);
|
ourLog.info(resp);
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||||
} finally {
|
} finally {
|
||||||
IOUtils.closeQuietly(response.getEntity().getContent());
|
response.getEntity().getContent().close();
|
||||||
response.close();
|
response.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5298,7 +5328,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
assertThat(resp, not(containsString("warn")));
|
assertThat(resp, not(containsString("warn")));
|
||||||
assertThat(resp, not(containsString("error")));
|
assertThat(resp, not(containsString("error")));
|
||||||
} finally {
|
} finally {
|
||||||
IOUtils.closeQuietly(response.getEntity().getContent());
|
response.getEntity().getContent().close();
|
||||||
response.close();
|
response.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5325,7 +5355,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
stringContainsInOrder("<issue>", "<severity value=\"information\"/>", "<code value=\"informational\"/>", "<diagnostics value=\"No issues detected during validation\"/>",
|
stringContainsInOrder("<issue>", "<severity value=\"information\"/>", "<code value=\"informational\"/>", "<diagnostics value=\"No issues detected during validation\"/>",
|
||||||
"</issue>"));
|
"</issue>"));
|
||||||
} finally {
|
} finally {
|
||||||
IOUtils.closeQuietly(response.getEntity().getContent());
|
response.getEntity().getContent().close();
|
||||||
response.close();
|
response.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5358,7 +5388,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
assertThat(resp, containsString("</contains>"));
|
assertThat(resp, containsString("</contains>"));
|
||||||
assertThat(resp, containsString("</expansion>"));
|
assertThat(resp, containsString("</expansion>"));
|
||||||
} finally {
|
} finally {
|
||||||
IOUtils.closeQuietly(response.getEntity().getContent());
|
response.getEntity().getContent().close();
|
||||||
response.close();
|
response.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -5378,7 +5408,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
"<display value=\"Systolic blood pressure at First encounter\"/>"));
|
"<display value=\"Systolic blood pressure at First encounter\"/>"));
|
||||||
//@formatter:on
|
//@formatter:on
|
||||||
} finally {
|
} finally {
|
||||||
IOUtils.closeQuietly(response.getEntity().getContent());
|
response.getEntity().getContent().close();
|
||||||
response.close();
|
response.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,6 +9,7 @@ import ca.uhn.fhir.jpa.entity.SearchResult;
|
||||||
import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
|
import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||||
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
|
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
|
||||||
|
import ca.uhn.fhir.jpa.search.StaleSearchDeletingSvcImpl;
|
||||||
import ca.uhn.fhir.jpa.search.cache.DatabaseSearchCacheSvcImpl;
|
import ca.uhn.fhir.jpa.search.cache.DatabaseSearchCacheSvcImpl;
|
||||||
import ca.uhn.fhir.rest.gclient.IClientExecutable;
|
import ca.uhn.fhir.rest.gclient.IClientExecutable;
|
||||||
import ca.uhn.fhir.rest.gclient.IQuery;
|
import ca.uhn.fhir.rest.gclient.IQuery;
|
||||||
|
@ -28,6 +29,7 @@ import org.springframework.test.util.AopTestUtils;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
|
import static ca.uhn.fhir.jpa.util.TestUtil.sleepAtLeast;
|
||||||
import static org.hamcrest.Matchers.*;
|
import static org.hamcrest.Matchers.*;
|
||||||
import static org.junit.Assert.*;
|
import static org.junit.Assert.*;
|
||||||
|
|
||||||
|
@ -68,13 +70,11 @@ public class StaleSearchDeletingSvcR4Test extends BaseResourceProviderR4Test {
|
||||||
myPatientDao.create(pt1, mySrd).getId().toUnqualifiedVersionless();
|
myPatientDao.create(pt1, mySrd).getId().toUnqualifiedVersionless();
|
||||||
}
|
}
|
||||||
|
|
||||||
//@formatter:off
|
|
||||||
IClientExecutable<IQuery<Bundle>, Bundle> search = ourClient
|
IClientExecutable<IQuery<Bundle>, Bundle> search = ourClient
|
||||||
.search()
|
.search()
|
||||||
.forResource(Patient.class)
|
.forResource(Patient.class)
|
||||||
.where(Patient.NAME.matches().value("Everything"))
|
.where(Patient.NAME.matches().value("Everything"))
|
||||||
.returnBundle(Bundle.class);
|
.returnBundle(Bundle.class);
|
||||||
//@formatter:on
|
|
||||||
|
|
||||||
Bundle resp1 = search.execute();
|
Bundle resp1 = search.execute();
|
||||||
|
|
||||||
|
@ -172,6 +172,40 @@ public class StaleSearchDeletingSvcR4Test extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDontDeleteSearchBeforeExpiry() {
|
||||||
|
DatabaseSearchCacheSvcImpl.setMaximumResultsToDeleteForUnitTest(10);
|
||||||
|
|
||||||
|
runInTransaction(() -> {
|
||||||
|
Search search = new Search();
|
||||||
|
|
||||||
|
// Expires in one second, so it should not be deleted right away,
|
||||||
|
// but it should be deleted if we try again after one second...
|
||||||
|
search.setExpiryOrNull(DateUtils.addMilliseconds(new Date(), 1000));
|
||||||
|
|
||||||
|
search.setStatus(SearchStatusEnum.FINISHED);
|
||||||
|
search.setUuid(UUID.randomUUID().toString());
|
||||||
|
search.setCreated(DateUtils.addDays(new Date(), -10000));
|
||||||
|
search.setSearchType(SearchTypeEnum.SEARCH);
|
||||||
|
search.setResourceType("Patient");
|
||||||
|
search.setSearchLastReturned(DateUtils.addDays(new Date(), -10000));
|
||||||
|
search = mySearchEntityDao.save(search);
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
// Should not delete right now
|
||||||
|
assertEquals(1, mySearchEntityDao.count());
|
||||||
|
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
|
||||||
|
assertEquals(1, mySearchEntityDao.count());
|
||||||
|
|
||||||
|
sleepAtLeast(1100);
|
||||||
|
|
||||||
|
// Now it's time to delete
|
||||||
|
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
|
||||||
|
assertEquals(0, mySearchEntityDao.count());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
@AfterClass
|
@AfterClass
|
||||||
public static void afterClassClearContext() {
|
public static void afterClassClearContext() {
|
||||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||||
|
|
|
@ -0,0 +1,207 @@
|
||||||
|
package ca.uhn.fhir.jpa.sched;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.FireAtIntervalJob;
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.quartz.*;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.springframework.beans.BeanUtils;
|
||||||
|
import org.springframework.beans.BeansException;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.context.ApplicationContext;
|
||||||
|
import org.springframework.context.ApplicationContextAware;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.data.util.ProxyUtils;
|
||||||
|
import org.springframework.test.context.ContextConfiguration;
|
||||||
|
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||||
|
import org.springframework.test.util.AopTestUtils;
|
||||||
|
|
||||||
|
import static ca.uhn.fhir.jpa.util.TestUtil.sleepAtLeast;
|
||||||
|
import static org.hamcrest.Matchers.*;
|
||||||
|
import static org.junit.Assert.assertThat;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
|
@ContextConfiguration(classes = SchedulerServiceImplTest.TestConfiguration.class)
|
||||||
|
@RunWith(SpringJUnit4ClassRunner.class)
|
||||||
|
public class SchedulerServiceImplTest {
|
||||||
|
|
||||||
|
private static final Logger ourLog = LoggerFactory.getLogger(SchedulerServiceImplTest.class);
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private ISchedulerService mySvc;
|
||||||
|
private static long ourTaskDelay;
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void before() {
|
||||||
|
ourTaskDelay = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testScheduleTask() {
|
||||||
|
|
||||||
|
ScheduledJobDefinition def = new ScheduledJobDefinition()
|
||||||
|
.setId(CountingJob.class.getName())
|
||||||
|
.setJobClass(CountingJob.class);
|
||||||
|
|
||||||
|
mySvc.scheduleFixedDelay(100, false, def);
|
||||||
|
|
||||||
|
sleepAtLeast(1000);
|
||||||
|
|
||||||
|
ourLog.info("Fired {} times", CountingJob.ourCount);
|
||||||
|
|
||||||
|
assertThat(CountingJob.ourCount, greaterThan(3));
|
||||||
|
assertThat(CountingJob.ourCount, lessThan(20));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testStopAndStartService() throws SchedulerException {
|
||||||
|
|
||||||
|
ScheduledJobDefinition def = new ScheduledJobDefinition()
|
||||||
|
.setId(CountingJob.class.getName())
|
||||||
|
.setJobClass(CountingJob.class);
|
||||||
|
|
||||||
|
SchedulerServiceImpl svc = AopTestUtils.getTargetObject(mySvc);
|
||||||
|
svc.stop();
|
||||||
|
svc.start();
|
||||||
|
svc.contextStarted(null);
|
||||||
|
|
||||||
|
mySvc.scheduleFixedDelay(100, false, def);
|
||||||
|
|
||||||
|
sleepAtLeast(1000);
|
||||||
|
|
||||||
|
ourLog.info("Fired {} times", CountingJob.ourCount);
|
||||||
|
|
||||||
|
assertThat(CountingJob.ourCount, greaterThan(3));
|
||||||
|
assertThat(CountingJob.ourCount, lessThan(20));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testScheduleTaskLongRunningDoesntRunConcurrently() {
|
||||||
|
|
||||||
|
ScheduledJobDefinition def = new ScheduledJobDefinition()
|
||||||
|
.setId(CountingJob.class.getName())
|
||||||
|
.setJobClass(CountingJob.class);
|
||||||
|
ourTaskDelay = 500;
|
||||||
|
|
||||||
|
mySvc.scheduleFixedDelay(100, false, def);
|
||||||
|
|
||||||
|
sleepAtLeast(1000);
|
||||||
|
|
||||||
|
ourLog.info("Fired {} times", CountingJob.ourCount);
|
||||||
|
|
||||||
|
assertThat(CountingJob.ourCount, greaterThanOrEqualTo(1));
|
||||||
|
assertThat(CountingJob.ourCount, lessThan(5));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testIntervalJob() {
|
||||||
|
|
||||||
|
ScheduledJobDefinition def = new ScheduledJobDefinition()
|
||||||
|
.setId(CountingIntervalJob.class.getName())
|
||||||
|
.setJobClass(CountingIntervalJob.class);
|
||||||
|
ourTaskDelay = 500;
|
||||||
|
|
||||||
|
mySvc.scheduleFixedDelay(100, false, def);
|
||||||
|
|
||||||
|
sleepAtLeast(2000);
|
||||||
|
|
||||||
|
ourLog.info("Fired {} times", CountingIntervalJob.ourCount);
|
||||||
|
|
||||||
|
assertThat(CountingIntervalJob.ourCount, greaterThanOrEqualTo(2));
|
||||||
|
assertThat(CountingIntervalJob.ourCount, lessThan(6));
|
||||||
|
}
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void after() throws SchedulerException {
|
||||||
|
CountingJob.ourCount = 0;
|
||||||
|
CountingIntervalJob.ourCount = 0;
|
||||||
|
mySvc.purgeAllScheduledJobsForUnitTest();
|
||||||
|
}
|
||||||
|
|
||||||
|
@DisallowConcurrentExecution
|
||||||
|
public static class CountingJob implements Job, ApplicationContextAware {
|
||||||
|
|
||||||
|
private static int ourCount;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
@Qualifier("stringBean")
|
||||||
|
private String myStringBean;
|
||||||
|
private ApplicationContext myAppCtx;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void execute(JobExecutionContext theContext) {
|
||||||
|
if (!"String beans are good.".equals(myStringBean)) {
|
||||||
|
fail("Did not autowire stringBean correctly, found: " + myStringBean);
|
||||||
|
}
|
||||||
|
if (myAppCtx == null) {
|
||||||
|
fail("Did not populate appctx");
|
||||||
|
}
|
||||||
|
if (ourTaskDelay > 0) {
|
||||||
|
ourLog.info("Job has fired, going to sleep for {}ms", ourTaskDelay);
|
||||||
|
sleepAtLeast(ourTaskDelay);
|
||||||
|
ourLog.info("Done sleeping");
|
||||||
|
} else {
|
||||||
|
ourLog.info("Job has fired...");
|
||||||
|
}
|
||||||
|
ourCount++;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setApplicationContext(ApplicationContext theAppCtx) throws BeansException {
|
||||||
|
myAppCtx = theAppCtx;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@DisallowConcurrentExecution
|
||||||
|
@PersistJobDataAfterExecution
|
||||||
|
public static class CountingIntervalJob extends FireAtIntervalJob {
|
||||||
|
|
||||||
|
private static int ourCount;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
@Qualifier("stringBean")
|
||||||
|
private String myStringBean;
|
||||||
|
private ApplicationContext myAppCtx;
|
||||||
|
|
||||||
|
public CountingIntervalJob() {
|
||||||
|
super(500);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void doExecute(JobExecutionContext theContext) {
|
||||||
|
ourLog.info("Job has fired, going to sleep for {}ms", ourTaskDelay);
|
||||||
|
sleepAtLeast(ourTaskDelay);
|
||||||
|
ourCount++;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Configuration
|
||||||
|
public static class TestConfiguration {
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public ISchedulerService schedulerService() {
|
||||||
|
return new SchedulerServiceImpl();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public String stringBean() {
|
||||||
|
return "String beans are good.";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public AutowiringSpringBeanJobFactory springBeanJobFactory() {
|
||||||
|
return new AutowiringSpringBeanJobFactory();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -80,7 +80,4 @@ public class SubscriptionTestUtil {
|
||||||
subscriber.setEmailSender(myEmailSender);
|
subscriber.setEmailSender(myEmailSender);
|
||||||
}
|
}
|
||||||
|
|
||||||
public IEmailSender getEmailSender() {
|
|
||||||
return myEmailSender;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,6 +39,7 @@ public class EmailSubscriptionDstu2Test extends BaseResourceProviderDstu2Test {
|
||||||
@Autowired
|
@Autowired
|
||||||
private SubscriptionTestUtil mySubscriptionTestUtil;
|
private SubscriptionTestUtil mySubscriptionTestUtil;
|
||||||
|
|
||||||
|
@Override
|
||||||
@After
|
@After
|
||||||
public void after() throws Exception {
|
public void after() throws Exception {
|
||||||
ourLog.info("** AFTER **");
|
ourLog.info("** AFTER **");
|
||||||
|
|
|
@ -408,6 +408,19 @@ public class InMemorySubscriptionMatcherR4Test {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testReferenceAlias() {
|
||||||
|
Observation obs = new Observation();
|
||||||
|
obs.setId("Observation/123");
|
||||||
|
obs.getSubject().setReference("Patient/123");
|
||||||
|
|
||||||
|
SearchParameterMap params;
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Observation.SP_PATIENT, new ReferenceParam("Patient/123"));
|
||||||
|
assertMatched(obs, params);
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSearchResourceReferenceOnlyCorrectPath() {
|
public void testSearchResourceReferenceOnlyCorrectPath() {
|
||||||
Organization org = new Organization();
|
Organization org = new Organization();
|
||||||
|
|
|
@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.subscription.resthook;
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
|
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||||
import ca.uhn.fhir.jpa.provider.SubscriptionTriggeringProvider;
|
import ca.uhn.fhir.jpa.provider.SubscriptionTriggeringProvider;
|
||||||
import ca.uhn.fhir.jpa.provider.dstu3.BaseResourceProviderDstu3Test;
|
import ca.uhn.fhir.jpa.provider.dstu3.BaseResourceProviderDstu3Test;
|
||||||
import ca.uhn.fhir.jpa.subscription.SubscriptionTestUtil;
|
import ca.uhn.fhir.jpa.subscription.SubscriptionTestUtil;
|
||||||
|
@ -55,6 +56,10 @@ public class SubscriptionTriggeringDstu3Test extends BaseResourceProviderDstu3Te
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private SubscriptionTestUtil mySubscriptionTestUtil;
|
private SubscriptionTestUtil mySubscriptionTestUtil;
|
||||||
|
@Autowired
|
||||||
|
private SubscriptionTriggeringSvcImpl mySubscriptionTriggeringSvc;
|
||||||
|
@Autowired
|
||||||
|
private ISchedulerService mySchedulerService;
|
||||||
|
|
||||||
@After
|
@After
|
||||||
public void afterUnregisterRestHookListener() {
|
public void afterUnregisterRestHookListener() {
|
||||||
|
@ -80,9 +85,6 @@ public class SubscriptionTriggeringDstu3Test extends BaseResourceProviderDstu3Te
|
||||||
myDaoConfig.setSearchPreFetchThresholds(new DaoConfig().getSearchPreFetchThresholds());
|
myDaoConfig.setSearchPreFetchThresholds(new DaoConfig().getSearchPreFetchThresholds());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Autowired
|
|
||||||
private SubscriptionTriggeringSvcImpl mySubscriptionTriggeringSvc;
|
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void beforeRegisterRestHookListener() {
|
public void beforeRegisterRestHookListener() {
|
||||||
mySubscriptionTestUtil.registerRestHookInterceptor();
|
mySubscriptionTestUtil.registerRestHookInterceptor();
|
||||||
|
@ -98,6 +100,8 @@ public class SubscriptionTriggeringDstu3Test extends BaseResourceProviderDstu3Te
|
||||||
ourCreatedPatients.clear();
|
ourCreatedPatients.clear();
|
||||||
ourUpdatedPatients.clear();
|
ourUpdatedPatients.clear();
|
||||||
ourContentTypes.clear();
|
ourContentTypes.clear();
|
||||||
|
|
||||||
|
mySchedulerService.logStatus();
|
||||||
}
|
}
|
||||||
|
|
||||||
private Subscription createSubscription(String theCriteria, String thePayload, String theEndpoint) throws InterruptedException {
|
private Subscription createSubscription(String theCriteria, String thePayload, String theEndpoint) throws InterruptedException {
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
package ca.uhn.fhir.jpa.util;
|
package ca.uhn.fhir.jpa.util;
|
||||||
|
|
||||||
import org.hl7.fhir.dstu3.model.CapabilityStatement;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -8,29 +7,31 @@ import org.junit.runner.RunWith;
|
||||||
import org.mockito.Mock;
|
import org.mockito.Mock;
|
||||||
import org.mockito.junit.MockitoJUnitRunner;
|
import org.mockito.junit.MockitoJUnitRunner;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.concurrent.Callable;
|
import java.util.concurrent.Callable;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicLong;
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
@RunWith(MockitoJUnitRunner.class)
|
@RunWith(MockitoJUnitRunner.class)
|
||||||
public class SingleItemLoadingCacheTest {
|
public class ResourceCountCacheTest {
|
||||||
|
|
||||||
@Mock
|
@Mock
|
||||||
private Callable<CapabilityStatement> myFetcher;
|
private Callable<Map<String, Long>> myFetcher;
|
||||||
|
|
||||||
@After
|
@After
|
||||||
public void after() {
|
public void after() {
|
||||||
SingleItemLoadingCache.setNowForUnitTest(null);
|
ResourceCountCache.setNowForUnitTest(null);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void before() throws Exception {
|
public void before() throws Exception {
|
||||||
AtomicInteger id = new AtomicInteger();
|
AtomicLong id = new AtomicLong();
|
||||||
when(myFetcher.call()).thenAnswer(t->{
|
when(myFetcher.call()).thenAnswer(t->{
|
||||||
CapabilityStatement retVal = new CapabilityStatement();
|
Map<String, Long> retVal = new HashMap<>();
|
||||||
retVal.setId("" + id.incrementAndGet());
|
retVal.put("A", id.incrementAndGet());
|
||||||
return retVal;
|
return retVal;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -38,36 +39,36 @@ public class SingleItemLoadingCacheTest {
|
||||||
@Test
|
@Test
|
||||||
public void testCache() {
|
public void testCache() {
|
||||||
long start = System.currentTimeMillis();
|
long start = System.currentTimeMillis();
|
||||||
SingleItemLoadingCache.setNowForUnitTest(start);
|
ResourceCountCache.setNowForUnitTest(start);
|
||||||
|
|
||||||
// Cache is initialized on startup
|
// Cache is initialized on startup
|
||||||
SingleItemLoadingCache<CapabilityStatement> cache = new SingleItemLoadingCache<>(myFetcher);
|
ResourceCountCache cache = new ResourceCountCache(myFetcher);
|
||||||
cache.setCacheMillis(500);
|
cache.setCacheMillis(500);
|
||||||
assertEquals(null, cache.get());
|
assertEquals(null, cache.get());
|
||||||
|
|
||||||
// Not time to update yet
|
// Not time to update yet
|
||||||
cache.update();
|
cache.update();
|
||||||
assertEquals("1", cache.get().getId());
|
assertEquals(Long.valueOf(1), cache.get().get("A"));
|
||||||
|
|
||||||
// Wait a bit, still not time to update
|
// Wait a bit, still not time to update
|
||||||
SingleItemLoadingCache.setNowForUnitTest(start + 400);
|
ResourceCountCache.setNowForUnitTest(start + 400);
|
||||||
cache.update();
|
cache.update();
|
||||||
assertEquals("1", cache.get().getId());
|
assertEquals(Long.valueOf(1), cache.get().get("A"));
|
||||||
|
|
||||||
// Wait a bit more and the cache is expired
|
// Wait a bit more and the cache is expired
|
||||||
SingleItemLoadingCache.setNowForUnitTest(start + 800);
|
ResourceCountCache.setNowForUnitTest(start + 800);
|
||||||
cache.update();
|
cache.update();
|
||||||
assertEquals("2", cache.get().getId());
|
assertEquals(Long.valueOf(2), cache.get().get("A"));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testCacheWithLoadingDisabled() {
|
public void testCacheWithLoadingDisabled() {
|
||||||
long start = System.currentTimeMillis();
|
long start = System.currentTimeMillis();
|
||||||
SingleItemLoadingCache.setNowForUnitTest(start);
|
ResourceCountCache.setNowForUnitTest(start);
|
||||||
|
|
||||||
// Cache of 0 means "never load"
|
// Cache of 0 means "never load"
|
||||||
SingleItemLoadingCache<CapabilityStatement> cache = new SingleItemLoadingCache<>(myFetcher);
|
ResourceCountCache cache = new ResourceCountCache(myFetcher);
|
||||||
cache.setCacheMillis(0);
|
cache.setCacheMillis(0);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -79,11 +80,11 @@ public class SingleItemLoadingCacheTest {
|
||||||
cache.update();
|
cache.update();
|
||||||
assertEquals(null, cache.get());
|
assertEquals(null, cache.get());
|
||||||
|
|
||||||
SingleItemLoadingCache.setNowForUnitTest(start + 400);
|
ResourceCountCache.setNowForUnitTest(start + 400);
|
||||||
cache.update();
|
cache.update();
|
||||||
assertEquals(null, cache.get());
|
assertEquals(null, cache.get());
|
||||||
|
|
||||||
SingleItemLoadingCache.setNowForUnitTest(start + 80000);
|
ResourceCountCache.setNowForUnitTest(start + 80000);
|
||||||
cache.update();
|
cache.update();
|
||||||
assertEquals(null, cache.get());
|
assertEquals(null, cache.get());
|
||||||
|
|
|
@ -109,6 +109,16 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||||
.addIndex("IDX_VS_CONCEPT_ORDER")
|
.addIndex("IDX_VS_CONCEPT_ORDER")
|
||||||
.unique(true)
|
.unique(true)
|
||||||
.withColumns("VALUESET_PID", "VALUESET_ORDER");
|
.withColumns("VALUESET_PID", "VALUESET_ORDER");
|
||||||
|
|
||||||
|
|
||||||
|
// Account for RESTYPE_LEN column increasing from 30 to 35
|
||||||
|
version.onTable("HFJ_RESOURCE").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 35);
|
||||||
|
version.onTable("HFJ_HISTORY_TAG").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 35);
|
||||||
|
version.onTable("HFJ_RES_LINK").modifyColumn("SOURCE_RESOURCE_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 35);
|
||||||
|
version.onTable("HFJ_RES_LINK").modifyColumn("TARGET_RESOURCE_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 35);
|
||||||
|
version.onTable("HFJ_RES_TAG").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 35);
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void init400() {
|
protected void init400() {
|
||||||
|
|
|
@ -112,6 +112,11 @@
|
||||||
<artifactId>commons-collections4</artifactId>
|
<artifactId>commons-collections4</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.quartz-scheduler</groupId>
|
||||||
|
<artifactId>quartz</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
<!-- Java -->
|
<!-- Java -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>javax.annotation</groupId>
|
<groupId>javax.annotation</groupId>
|
||||||
|
|
|
@ -47,7 +47,7 @@ import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||||
@Index(name = "IDX_INDEXSTATUS", columnList = "SP_INDEX_STATUS")
|
@Index(name = "IDX_INDEXSTATUS", columnList = "SP_INDEX_STATUS")
|
||||||
})
|
})
|
||||||
public class ResourceTable extends BaseHasResource implements Serializable {
|
public class ResourceTable extends BaseHasResource implements Serializable {
|
||||||
static final int RESTYPE_LEN = 30;
|
public static final int RESTYPE_LEN = 35;
|
||||||
private static final int MAX_LANGUAGE_LENGTH = 20;
|
private static final int MAX_LANGUAGE_LENGTH = 20;
|
||||||
private static final int MAX_PROFILE_LENGTH = 200;
|
private static final int MAX_PROFILE_LENGTH = 200;
|
||||||
private static final long serialVersionUID = 1L;
|
private static final long serialVersionUID = 1L;
|
||||||
|
@ -199,7 +199,7 @@ public class ResourceTable extends BaseHasResource implements Serializable {
|
||||||
@OneToMany(mappedBy = "myTargetResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
@OneToMany(mappedBy = "myTargetResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
|
||||||
@OptimisticLock(excluded = true)
|
@OptimisticLock(excluded = true)
|
||||||
private Collection<ResourceLink> myResourceLinksAsTarget;
|
private Collection<ResourceLink> myResourceLinksAsTarget;
|
||||||
@Column(name = "RES_TYPE", length = RESTYPE_LEN)
|
@Column(name = "RES_TYPE", length = RESTYPE_LEN, nullable = false)
|
||||||
@Field
|
@Field
|
||||||
@OptimisticLock(excluded = true)
|
@OptimisticLock(excluded = true)
|
||||||
private String myResourceType;
|
private String myResourceType;
|
||||||
|
|
|
@ -0,0 +1,45 @@
|
||||||
|
package ca.uhn.fhir.jpa.model.sched;
|
||||||
|
|
||||||
|
import org.quartz.DisallowConcurrentExecution;
|
||||||
|
import org.quartz.Job;
|
||||||
|
import org.quartz.JobExecutionContext;
|
||||||
|
import org.quartz.PersistJobDataAfterExecution;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
@DisallowConcurrentExecution
|
||||||
|
@PersistJobDataAfterExecution
|
||||||
|
public abstract class FireAtIntervalJob implements Job {
|
||||||
|
|
||||||
|
public static final String NEXT_EXECUTION_TIME = "NEXT_EXECUTION_TIME";
|
||||||
|
private static final Logger ourLog = LoggerFactory.getLogger(FireAtIntervalJob.class);
|
||||||
|
private final long myMillisBetweenExecutions;
|
||||||
|
|
||||||
|
public FireAtIntervalJob(long theMillisBetweenExecutions) {
|
||||||
|
myMillisBetweenExecutions = theMillisBetweenExecutions;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public final void execute(JobExecutionContext theContext) {
|
||||||
|
Long nextExecution = (Long) theContext.getJobDetail().getJobDataMap().get(NEXT_EXECUTION_TIME);
|
||||||
|
|
||||||
|
if (nextExecution != null) {
|
||||||
|
long cutoff = System.currentTimeMillis();
|
||||||
|
if (nextExecution >= cutoff) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
doExecute(theContext);
|
||||||
|
} catch (Throwable t) {
|
||||||
|
ourLog.error("Job threw uncaught exception", t);
|
||||||
|
} finally {
|
||||||
|
long newNextExecution = System.currentTimeMillis() + myMillisBetweenExecutions;
|
||||||
|
theContext.getJobDetail().getJobDataMap().put(NEXT_EXECUTION_TIME, newNextExecution);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
protected abstract void doExecute(JobExecutionContext theContext);
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,21 @@
|
||||||
|
package ca.uhn.fhir.jpa.model.sched;
|
||||||
|
|
||||||
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
|
import org.quartz.SchedulerException;
|
||||||
|
|
||||||
|
public interface ISchedulerService {
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
void purgeAllScheduledJobsForUnitTest() throws SchedulerException;
|
||||||
|
|
||||||
|
void logStatus();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param theIntervalMillis How many milliseconds between passes should this job run
|
||||||
|
* @param theClusteredTask If <code>true</code>, only one instance of this task will fire across the whole cluster (when running in a clustered environment). If <code>false</code>, or if not running in a clustered environment, this task will execute locally (and should execute on all nodes of the cluster)
|
||||||
|
* @param theJobDefinition The Job to fire
|
||||||
|
*/
|
||||||
|
void scheduleFixedDelay(long theIntervalMillis, boolean theClusteredTask, ScheduledJobDefinition theJobDefinition);
|
||||||
|
|
||||||
|
boolean isStopping();
|
||||||
|
}
|
|
@ -0,0 +1,51 @@
|
||||||
|
package ca.uhn.fhir.jpa.model.sched;
|
||||||
|
|
||||||
|
import org.apache.commons.lang3.Validate;
|
||||||
|
import org.quartz.Job;
|
||||||
|
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class ScheduledJobDefinition {
|
||||||
|
|
||||||
|
|
||||||
|
private Class<? extends Job> myJobClass;
|
||||||
|
private String myId;
|
||||||
|
private Map<String, String> myJobData;
|
||||||
|
|
||||||
|
public Map<String, String> getJobData() {
|
||||||
|
Map<String, String> retVal = myJobData;
|
||||||
|
if (retVal == null) {
|
||||||
|
retVal = Collections.emptyMap();
|
||||||
|
}
|
||||||
|
return Collections.unmodifiableMap(retVal);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Class<? extends Job> getJobClass() {
|
||||||
|
return myJobClass;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ScheduledJobDefinition setJobClass(Class<? extends Job> theJobClass) {
|
||||||
|
myJobClass = theJobClass;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getId() {
|
||||||
|
return myId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ScheduledJobDefinition setId(String theId) {
|
||||||
|
myId = theId;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void addJobData(String thePropertyName, String thePropertyValue) {
|
||||||
|
Validate.notBlank(thePropertyName);
|
||||||
|
if (myJobData == null) {
|
||||||
|
myJobData = new HashMap<>();
|
||||||
|
}
|
||||||
|
Validate.isTrue(myJobData.containsKey(thePropertyName) == false);
|
||||||
|
myJobData.put(thePropertyName, thePropertyValue);
|
||||||
|
}
|
||||||
|
}
|
|
@ -174,6 +174,16 @@ public class JpaConstants {
|
||||||
*/
|
*/
|
||||||
public static final String OPERATION_UPLOAD_EXTERNAL_CODE_SYSTEM = "$upload-external-code-system";
|
public static final String OPERATION_UPLOAD_EXTERNAL_CODE_SYSTEM = "$upload-external-code-system";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Operation name for the "$export" operation
|
||||||
|
*/
|
||||||
|
public static final String OPERATION_EXPORT = "$export";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Operation name for the "$export-poll-status" operation
|
||||||
|
*/
|
||||||
|
public static final String OPERATION_EXPORT_POLL_STATUS = "$export-poll-status";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* <p>
|
* <p>
|
||||||
* This extension should be of type <code>string</code> and should be
|
* This extension should be of type <code>string</code> and should be
|
||||||
|
@ -238,5 +248,28 @@ public class JpaConstants {
|
||||||
*/
|
*/
|
||||||
public static final String EXT_META_SOURCE = "http://hapifhir.io/fhir/StructureDefinition/resource-meta-source";
|
public static final String EXT_META_SOURCE = "http://hapifhir.io/fhir/StructureDefinition/resource-meta-source";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parameter for the $export operation
|
||||||
|
*/
|
||||||
|
public static final String PARAM_EXPORT_POLL_STATUS_JOB_ID = "_jobId";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parameter for the $export operation
|
||||||
|
*/
|
||||||
|
public static final String PARAM_EXPORT_OUTPUT_FORMAT = "_outputFormat";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parameter for the $export operation
|
||||||
|
*/
|
||||||
|
public static final String PARAM_EXPORT_TYPE = "_type";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parameter for the $export operation
|
||||||
|
*/
|
||||||
|
public static final String PARAM_EXPORT_SINCE = "_since";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parameter for the $export operation
|
||||||
|
*/
|
||||||
|
public static final String PARAM_EXPORT_TYPE_FILTER = "_typeFilter";
|
||||||
}
|
}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue