Upgrade dependencies and addess Semmle issues (#1554)

* Version bumps

* FIx build issues

* Address a few Semmle issues

* Work on tests

* Some test fixes

* Test fix

* Fix deletions

* Test fix

* Fix intermittent test failure

* One more dependency bump

* Add some test logging

* Remove bad import

* Add some tests

* Test updates
This commit is contained in:
James Agnew 2019-10-22 17:10:58 -04:00 committed by GitHub
parent a375f761cb
commit 464c6c5b45
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
53 changed files with 636 additions and 296 deletions

View File

@ -247,7 +247,14 @@
<groupId>org.fusesource.jansi</groupId>
<artifactId>jansi</artifactId>
</dependency>
<!-- Test Deps -->
<dependency>
<groupId>org.awaitility</groupId>
<artifactId>awaitility</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>

View File

@ -55,6 +55,7 @@ import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseResource;
import java.io.*;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
@ -79,7 +80,7 @@ public class ExampleDataUploader extends BaseCommand {
}
}
private Bundle getBundleFromFileDstu2(Integer limit, File inputFile, FhirContext ctx) throws IOException, UnsupportedEncodingException {
private Bundle getBundleFromFileDstu2(Integer limit, File inputFile, FhirContext ctx) throws IOException {
Bundle bundle = new Bundle();
@ -98,13 +99,13 @@ public class ExampleDataUploader extends BaseCommand {
break;
}
int len = 0;
int len;
ByteArrayOutputStream bos = new ByteArrayOutputStream();
while ((len = zis.read(buffer)) > 0) {
bos.write(buffer, 0, len);
}
byte[] exampleBytes = bos.toByteArray();
String exampleString = new String(exampleBytes, "UTF-8");
String exampleString = new String(exampleBytes, StandardCharsets.UTF_8);
if (ourLog.isTraceEnabled()) {
ourLog.trace("Next example: " + exampleString);
@ -145,7 +146,7 @@ public class ExampleDataUploader extends BaseCommand {
}
@SuppressWarnings("unchecked")
private org.hl7.fhir.dstu3.model.Bundle getBundleFromFileDstu3(Integer limit, File inputFile, FhirContext ctx) throws IOException, UnsupportedEncodingException {
private org.hl7.fhir.dstu3.model.Bundle getBundleFromFileDstu3(Integer limit, File inputFile, FhirContext ctx) throws IOException {
org.hl7.fhir.dstu3.model.Bundle bundle = new org.hl7.fhir.dstu3.model.Bundle();
bundle.setType(BundleType.TRANSACTION);
@ -168,13 +169,13 @@ public class ExampleDataUploader extends BaseCommand {
break;
}
int len = 0;
int len;
ByteArrayOutputStream bos = new ByteArrayOutputStream();
while ((len = zis.read(buffer)) > 0) {
bos.write(buffer, 0, len);
}
byte[] exampleBytes = bos.toByteArray();
String exampleString = new String(exampleBytes, "UTF-8");
String exampleString = new String(exampleBytes, StandardCharsets.UTF_8);
if (ourLog.isTraceEnabled()) {
ourLog.trace("Next example: " + exampleString);
@ -229,7 +230,7 @@ public class ExampleDataUploader extends BaseCommand {
}
@SuppressWarnings("unchecked")
private org.hl7.fhir.r4.model.Bundle getBundleFromFileR4(Integer limit, File inputFile, FhirContext ctx) throws IOException, UnsupportedEncodingException {
private org.hl7.fhir.r4.model.Bundle getBundleFromFileR4(Integer limit, File inputFile, FhirContext ctx) throws IOException {
org.hl7.fhir.r4.model.Bundle bundle = new org.hl7.fhir.r4.model.Bundle();
bundle.setType(org.hl7.fhir.r4.model.Bundle.BundleType.TRANSACTION);
@ -252,13 +253,13 @@ public class ExampleDataUploader extends BaseCommand {
break;
}
int len = 0;
int len;
ByteArrayOutputStream bos = new ByteArrayOutputStream();
while ((len = zis.read(buffer)) > 0) {
bos.write(buffer, 0, len);
}
byte[] exampleBytes = bos.toByteArray();
String exampleString = new String(exampleBytes, "UTF-8");
String exampleString = new String(exampleBytes, StandardCharsets.UTF_8);
if (ourLog.isTraceEnabled()) {
ourLog.trace("Next example: " + exampleString);
@ -369,8 +370,7 @@ public class ExampleDataUploader extends BaseCommand {
private void processBundleDstu2(FhirContext ctx, Bundle bundle) {
Map<String, Integer> ids = new HashMap<String, Integer>();
Set<String> fullIds = new HashSet<String>();
Set<String> fullIds = new HashSet<>();
for (Iterator<Entry> iterator = bundle.getEntry().iterator(); iterator.hasNext(); ) {
Entry next = iterator.next();
@ -397,13 +397,14 @@ public class ExampleDataUploader extends BaseCommand {
}
}
}
Set<String> qualIds = new TreeSet<String>();
for (Iterator<Entry> iterator = bundle.getEntry().iterator(); iterator.hasNext(); ) {
Entry next = iterator.next();
Set<String> qualIds = new TreeSet<>();
for (Entry next : bundle.getEntry()) {
if (next.getResource().getId().getIdPart() != null) {
String nextId = next.getResource().getId().getValue();
next.getRequest().setMethod(HTTPVerbEnum.PUT);
next.getRequest().setUrl(nextId);
qualIds.add(nextId);
}
}
@ -449,15 +450,14 @@ public class ExampleDataUploader extends BaseCommand {
private void processBundleDstu3(FhirContext ctx, org.hl7.fhir.dstu3.model.Bundle bundle) {
Map<String, Integer> ids = new HashMap<String, Integer>();
Set<String> fullIds = new HashSet<String>();
Set<String> fullIds = new HashSet<>();
for (Iterator<BundleEntryComponent> iterator = bundle.getEntry().iterator(); iterator.hasNext(); ) {
BundleEntryComponent next = iterator.next();
// DataElement have giant IDs that seem invalid, need to investigate this..
if ("Subscription".equals(next.getResource().getResourceType()) || "DataElement".equals(next.getResource().getResourceType())
|| "OperationOutcome".equals(next.getResource().getResourceType()) || "OperationDefinition".equals(next.getResource().getResourceType())) {
if ("Subscription".equals(next.getResource().getResourceType().name()) || "DataElement".equals(next.getResource().getResourceType().name())
|| "OperationOutcome".equals(next.getResource().getResourceType().name()) || "OperationDefinition".equals(next.getResource().getResourceType().name())) {
ourLog.info("Skipping " + next.getResource().getResourceType() + " example");
iterator.remove();
} else {
@ -477,13 +477,13 @@ public class ExampleDataUploader extends BaseCommand {
}
}
}
Set<String> qualIds = new TreeSet<String>();
for (Iterator<BundleEntryComponent> iterator = bundle.getEntry().iterator(); iterator.hasNext(); ) {
BundleEntryComponent next = iterator.next();
Set<String> qualIds = new TreeSet<>();
for (BundleEntryComponent next : bundle.getEntry()) {
if (next.getResource().getIdElement().getIdPart() != null) {
String nextId = next.getResource().getIdElement().getValue();
next.getRequest().setMethod(HTTPVerb.PUT);
next.getRequest().setUrl(nextId);
qualIds.add(nextId);
}
}
@ -529,15 +529,14 @@ public class ExampleDataUploader extends BaseCommand {
private void processBundleR4(FhirContext ctx, org.hl7.fhir.r4.model.Bundle bundle) {
Map<String, Integer> ids = new HashMap<String, Integer>();
Set<String> fullIds = new HashSet<String>();
Set<String> fullIds = new HashSet<>();
for (Iterator<org.hl7.fhir.r4.model.Bundle.BundleEntryComponent> iterator = bundle.getEntry().iterator(); iterator.hasNext(); ) {
org.hl7.fhir.r4.model.Bundle.BundleEntryComponent next = iterator.next();
// DataElement have giant IDs that seem invalid, need to investigate this..
if ("Subscription".equals(next.getResource().getResourceType()) || "DataElement".equals(next.getResource().getResourceType())
|| "OperationOutcome".equals(next.getResource().getResourceType()) || "OperationDefinition".equals(next.getResource().getResourceType())) {
if ("Subscription".equals(next.getResource().getResourceType().name()) || "DataElement".equals(next.getResource().getResourceType().name())
|| "OperationOutcome".equals(next.getResource().getResourceType().name()) || "OperationDefinition".equals(next.getResource().getResourceType().name())) {
ourLog.info("Skipping " + next.getResource().getResourceType() + " example");
iterator.remove();
} else {
@ -557,13 +556,13 @@ public class ExampleDataUploader extends BaseCommand {
}
}
}
Set<String> qualIds = new TreeSet<String>();
for (Iterator<org.hl7.fhir.r4.model.Bundle.BundleEntryComponent> iterator = bundle.getEntry().iterator(); iterator.hasNext(); ) {
org.hl7.fhir.r4.model.Bundle.BundleEntryComponent next = iterator.next();
Set<String> qualIds = new TreeSet<>();
for (org.hl7.fhir.r4.model.Bundle.BundleEntryComponent next : bundle.getEntry()) {
if (next.getResource().getIdElement().getIdPart() != null) {
String nextId = next.getResource().getIdElement().getValue();
next.getRequest().setMethod(org.hl7.fhir.r4.model.Bundle.HTTPVerb.PUT);
next.getRequest().setUrl(nextId);
qualIds.add(nextId);
}
}
@ -635,7 +634,7 @@ public class ExampleDataUploader extends BaseCommand {
boolean cacheFile = theCommandLine.hasOption('c');
Collection<File> inputFiles = null;
Collection<File> inputFiles;
try {
inputFiles = loadFile(specUrl, filepath, cacheFile);
for (File inputFile : inputFiles) {
@ -694,13 +693,11 @@ public class ExampleDataUploader extends BaseCommand {
continue;
}
boolean found = false;
for (int j = 0; j < resources.size(); j++) {
String candidateTarget = resources.get(j).getIdElement().getValue();
if (isNotBlank(nextTarget) && nextTarget.equals(candidateTarget)) {
ourLog.info("Reflexively adding resource {} to bundle as it is a reference target", nextTarget);
subResourceList.add(resources.remove(j));
found = true;
break;
}
}

View File

@ -123,35 +123,34 @@ public class ExportConceptMapToCsvCommand extends AbstractImportExportCsvConcept
private void convertConceptMapToCsv(ConceptMap theConceptMap) {
Path path = Paths.get(file);
ourLog.info("Exporting ConceptMap to CSV: {}", path);
try (
Writer writer = Files.newBufferedWriter(path);
CSVPrinter csvPrinter = new CSVPrinter(
writer,
CSVFormat
.DEFAULT
.withRecordSeparator("\n")
.withHeader(Header.class)
.withQuoteMode(QuoteMode.ALL));
) {
for (ConceptMapGroupComponent group : theConceptMap.getGroup()) {
for (SourceElementComponent element : group.getElement()) {
for (ConceptMap.TargetElementComponent target : element.getTarget()) {
try (Writer writer = Files.newBufferedWriter(path)) {
List<String> columns = new ArrayList<>();
columns.add(defaultString(group.getSource()));
columns.add(defaultString(group.getSourceVersion()));
columns.add(defaultString(group.getTarget()));
columns.add(defaultString(group.getTargetVersion()));
columns.add(defaultString(element.getCode()));
columns.add(defaultString(element.getDisplay()));
columns.add(defaultString(target.getCode()));
columns.add(defaultString(target.getDisplay()));
columns.add(defaultString(target.getEquivalence().toCode()));
columns.add(defaultString(target.getComment()));
CSVFormat format = CSVFormat.DEFAULT
.withRecordSeparator("\n")
.withHeader(Header.class)
.withQuoteMode(QuoteMode.ALL);
try (CSVPrinter csvPrinter = new CSVPrinter(writer, format)) {
for (ConceptMapGroupComponent group : theConceptMap.getGroup()) {
for (SourceElementComponent element : group.getElement()) {
for (ConceptMap.TargetElementComponent target : element.getTarget()) {
csvPrinter.printRecord(columns);
List<String> columns = new ArrayList<>();
columns.add(defaultString(group.getSource()));
columns.add(defaultString(group.getSourceVersion()));
columns.add(defaultString(group.getTarget()));
columns.add(defaultString(group.getTargetVersion()));
columns.add(defaultString(element.getCode()));
columns.add(defaultString(element.getDisplay()));
columns.add(defaultString(target.getCode()));
columns.add(defaultString(target.getDisplay()));
columns.add(defaultString(target.getEquivalence().toCode()));
columns.add(defaultString(target.getComment()));
csvPrinter.printRecord(columns);
}
}
}
csvPrinter.flush();
}
} catch (IOException ioe) {
throw new InternalErrorException(ioe);

View File

@ -4,6 +4,7 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.client.api.IGenericClient;
import ca.uhn.fhir.rest.server.RestfulServer;
import ca.uhn.fhir.rest.server.interceptor.VerboseLoggingInterceptor;
import ca.uhn.fhir.test.utilities.JettyUtil;
import ca.uhn.fhir.test.utilities.LoggingRule;
import ca.uhn.fhir.util.TestUtil;
import com.google.common.base.Charsets;
@ -24,10 +25,9 @@ import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import static org.awaitility.Awaitility.await;
import static org.junit.Assert.assertEquals;
import ca.uhn.fhir.test.utilities.JettyUtil;
public class ExportConceptMapToCsvCommandDstu3Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ExportConceptMapToCsvCommandDstu3Test.class);
private static final String CM_URL = "http://example.com/conceptmap";
@ -36,7 +36,7 @@ public class ExportConceptMapToCsvCommandDstu3Test {
private static final String CS_URL_1 = "http://example.com/codesystem/1";
private static final String CS_URL_2 = "http://example.com/codesystem/2";
private static final String CS_URL_3 = "http://example.com/codesystem/3";
private static final String FILE = "./target/output.csv";
private static final String FILE = "./target/output_dstu3.csv";
private static String ourBase;
private static IGenericClient ourClient;
@ -44,13 +44,47 @@ public class ExportConceptMapToCsvCommandDstu3Test {
private static int ourPort;
private static Server ourServer;
private static String ourVersion = "dstu3";
@Rule
public LoggingRule myLoggingRule = new LoggingRule();
static {
System.setProperty("test", "true");
}
@Rule
public LoggingRule myLoggingRule = new LoggingRule();
@Test
public void testExportConceptMapToCsvCommand() throws IOException {
ourLog.debug("ConceptMap:\n" + ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(createConceptMap()));
App.main(new String[]{"export-conceptmap-to-csv",
"-v", ourVersion,
"-t", ourBase,
"-u", CM_URL,
"-f", FILE,
"-l"});
await().until(() -> new File(FILE).exists());
String expected = "\"SOURCE_CODE_SYSTEM\",\"SOURCE_CODE_SYSTEM_VERSION\",\"TARGET_CODE_SYSTEM\",\"TARGET_CODE_SYSTEM_VERSION\",\"SOURCE_CODE\",\"SOURCE_DISPLAY\",\"TARGET_CODE\",\"TARGET_DISPLAY\",\"EQUIVALENCE\",\"COMMENT\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/2\",\"Version 2t\",\"Code 1a\",\"Display 1a\",\"Code 2a\",\"Display 2a\",\"equal\",\"2a This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/2\",\"Version 2t\",\"Code 1b\",\"Display 1b\",\"Code 2b\",\"Display 2b\",\"equal\",\"2b This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/2\",\"Version 2t\",\"Code 1c\",\"Display 1c\",\"Code 2c\",\"Display 2c\",\"equal\",\"2c This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/2\",\"Version 2t\",\"Code 1d\",\"Display 1d\",\"Code 2d\",\"Display 2d\",\"equal\",\"2d This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 1a\",\"Display 1a\",\"Code 3a\",\"Display 3a\",\"equal\",\"3a This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 1b\",\"Display 1b\",\"Code 3b\",\"Display 3b\",\"equal\",\"3b This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 1c\",\"Display 1c\",\"Code 3c\",\"Display 3c\",\"equal\",\"3c This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 1d\",\"Display 1d\",\"Code 3d\",\"Display 3d\",\"equal\",\"3d This is a comment.\"\n" +
"\"http://example.com/codesystem/2\",\"Version 2s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 2a\",\"Display 2a\",\"Code 3a\",\"Display 3a\",\"equal\",\"3a This is a comment.\"\n" +
"\"http://example.com/codesystem/2\",\"Version 2s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 2b\",\"Display 2b\",\"Code 3b\",\"Display 3b\",\"equal\",\"3b This is a comment.\"\n" +
"\"http://example.com/codesystem/2\",\"Version 2s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 2c\",\"Display 2c\",\"Code 3c\",\"Display 3c\",\"equal\",\"3c This is a comment.\"\n" +
"\"http://example.com/codesystem/2\",\"Version 2s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 2d\",\"Display 2d\",\"Code 3d\",\"Display 3d\",\"equal\",\"3d This is a comment.\"\n";
ourLog.info("Going to read file: {}", FILE);
String result = IOUtils.toString(new FileInputStream(FILE), Charsets.UTF_8);
assertEquals(expected, result);
FileUtils.deleteQuietly(new File(FILE));
}
@AfterClass
public static void afterClassClearContext() throws Exception {
JettyUtil.closeServer(ourServer);
@ -72,7 +106,7 @@ public class ExportConceptMapToCsvCommandDstu3Test {
ourServer.setHandler(servletHandler);
JettyUtil.startServer(ourServer);
ourPort = JettyUtil.getPortForStartedServer(ourServer);
ourPort = JettyUtil.getPortForStartedServer(ourServer);
ourBase = "http://localhost:" + ourPort;
@ -81,38 +115,6 @@ public class ExportConceptMapToCsvCommandDstu3Test {
ourClient.create().resource(createConceptMap()).execute();
}
@Test
public void testExportConceptMapToCsvCommand() throws IOException {
ourLog.debug("ConceptMap:\n" + ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(createConceptMap()));
App.main(new String[] {"export-conceptmap-to-csv",
"-v", ourVersion,
"-t", ourBase,
"-u", CM_URL,
"-f", FILE,
"-l"});
String expected = "\"SOURCE_CODE_SYSTEM\",\"SOURCE_CODE_SYSTEM_VERSION\",\"TARGET_CODE_SYSTEM\",\"TARGET_CODE_SYSTEM_VERSION\",\"SOURCE_CODE\",\"SOURCE_DISPLAY\",\"TARGET_CODE\",\"TARGET_DISPLAY\",\"EQUIVALENCE\",\"COMMENT\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/2\",\"Version 2t\",\"Code 1a\",\"Display 1a\",\"Code 2a\",\"Display 2a\",\"equal\",\"2a This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/2\",\"Version 2t\",\"Code 1b\",\"Display 1b\",\"Code 2b\",\"Display 2b\",\"equal\",\"2b This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/2\",\"Version 2t\",\"Code 1c\",\"Display 1c\",\"Code 2c\",\"Display 2c\",\"equal\",\"2c This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/2\",\"Version 2t\",\"Code 1d\",\"Display 1d\",\"Code 2d\",\"Display 2d\",\"equal\",\"2d This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 1a\",\"Display 1a\",\"Code 3a\",\"Display 3a\",\"equal\",\"3a This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 1b\",\"Display 1b\",\"Code 3b\",\"Display 3b\",\"equal\",\"3b This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 1c\",\"Display 1c\",\"Code 3c\",\"Display 3c\",\"equal\",\"3c This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 1d\",\"Display 1d\",\"Code 3d\",\"Display 3d\",\"equal\",\"3d This is a comment.\"\n" +
"\"http://example.com/codesystem/2\",\"Version 2s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 2a\",\"Display 2a\",\"Code 3a\",\"Display 3a\",\"equal\",\"3a This is a comment.\"\n" +
"\"http://example.com/codesystem/2\",\"Version 2s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 2b\",\"Display 2b\",\"Code 3b\",\"Display 3b\",\"equal\",\"3b This is a comment.\"\n" +
"\"http://example.com/codesystem/2\",\"Version 2s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 2c\",\"Display 2c\",\"Code 3c\",\"Display 3c\",\"equal\",\"3c This is a comment.\"\n" +
"\"http://example.com/codesystem/2\",\"Version 2s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 2d\",\"Display 2d\",\"Code 3d\",\"Display 3d\",\"equal\",\"3d This is a comment.\"\n";
ourLog.info("Going to read file: {}", FILE);
String result = IOUtils.toString(new FileInputStream(FILE), Charsets.UTF_8);
assertEquals(expected, result);
FileUtils.deleteQuietly(new File(FILE));
}
static ConceptMap createConceptMap() {
ConceptMap conceptMap = new ConceptMap();
conceptMap

View File

@ -4,6 +4,7 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.client.api.IGenericClient;
import ca.uhn.fhir.rest.server.RestfulServer;
import ca.uhn.fhir.rest.server.interceptor.VerboseLoggingInterceptor;
import ca.uhn.fhir.test.utilities.JettyUtil;
import ca.uhn.fhir.util.TestUtil;
import com.google.common.base.Charsets;
import org.apache.commons.io.FileUtils;
@ -22,10 +23,9 @@ import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import static org.awaitility.Awaitility.await;
import static org.junit.Assert.assertEquals;
import ca.uhn.fhir.test.utilities.JettyUtil;
public class ExportConceptMapToCsvCommandR4Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ExportConceptMapToCsvCommandR4Test.class);
private static final String CM_URL = "http://example.com/conceptmap";
@ -34,7 +34,7 @@ public class ExportConceptMapToCsvCommandR4Test {
private static final String CS_URL_1 = "http://example.com/codesystem/1";
private static final String CS_URL_2 = "http://example.com/codesystem/2";
private static final String CS_URL_3 = "http://example.com/codesystem/3";
private static final String FILE = new File("./target/output.csv").getAbsolutePath();
private static final String FILE = new File("./target/output_r4.csv").getAbsolutePath();
private static String ourBase;
private static IGenericClient ourClient;
@ -47,6 +47,37 @@ public class ExportConceptMapToCsvCommandR4Test {
System.setProperty("test", "true");
}
@Test
public void testExportConceptMapToCsvCommand() throws IOException {
ourLog.info("ConceptMap:\n" + ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(createConceptMap()));
App.main(new String[]{"export-conceptmap-to-csv",
"-v", ourVersion,
"-t", ourBase,
"-u", CM_URL,
"-f", FILE,
"-l"});
await().until(() -> new File(FILE).exists());
String expected = "\"SOURCE_CODE_SYSTEM\",\"SOURCE_CODE_SYSTEM_VERSION\",\"TARGET_CODE_SYSTEM\",\"TARGET_CODE_SYSTEM_VERSION\",\"SOURCE_CODE\",\"SOURCE_DISPLAY\",\"TARGET_CODE\",\"TARGET_DISPLAY\",\"EQUIVALENCE\",\"COMMENT\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/2\",\"Version 2t\",\"Code 1a\",\"Display 1a\",\"Code 2a\",\"Display 2a\",\"equal\",\"2a This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/2\",\"Version 2t\",\"Code 1b\",\"Display 1b\",\"Code 2b\",\"Display 2b\",\"equal\",\"2b This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/2\",\"Version 2t\",\"Code 1c\",\"Display 1c\",\"Code 2c\",\"Display 2c\",\"equal\",\"2c This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/2\",\"Version 2t\",\"Code 1d\",\"Display 1d\",\"Code 2d\",\"Display 2d\",\"equal\",\"2d This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 1a\",\"Display 1a\",\"Code 3a\",\"Display 3a\",\"equal\",\"3a This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 1b\",\"Display 1b\",\"Code 3b\",\"Display 3b\",\"equal\",\"3b This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 1c\",\"Display 1c\",\"Code 3c\",\"Display 3c\",\"equal\",\"3c This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 1d\",\"Display 1d\",\"Code 3d\",\"Display 3d\",\"equal\",\"3d This is a comment.\"\n" +
"\"http://example.com/codesystem/2\",\"Version 2s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 2a\",\"Display 2a\",\"Code 3a\",\"Display 3a\",\"equal\",\"3a This is a comment.\"\n" +
"\"http://example.com/codesystem/2\",\"Version 2s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 2b\",\"Display 2b\",\"Code 3b\",\"Display 3b\",\"equal\",\"3b This is a comment.\"\n" +
"\"http://example.com/codesystem/2\",\"Version 2s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 2c\",\"Display 2c\",\"Code 3c\",\"Display 3c\",\"equal\",\"3c This is a comment.\"\n" +
"\"http://example.com/codesystem/2\",\"Version 2s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 2d\",\"Display 2d\",\"Code 3d\",\"Display 3d\",\"equal\",\"3d This is a comment.\"\n";
String result = IOUtils.toString(new FileInputStream(FILE), Charsets.UTF_8);
assertEquals(expected, result);
FileUtils.deleteQuietly(new File(FILE));
}
@AfterClass
public static void afterClassClearContext() throws Exception {
JettyUtil.closeServer(ourServer);
@ -68,7 +99,7 @@ public class ExportConceptMapToCsvCommandR4Test {
ourServer.setHandler(servletHandler);
JettyUtil.startServer(ourServer);
ourPort = JettyUtil.getPortForStartedServer(ourServer);
ourPort = JettyUtil.getPortForStartedServer(ourServer);
ourBase = "http://localhost:" + ourPort;
@ -77,36 +108,6 @@ public class ExportConceptMapToCsvCommandR4Test {
ourClient.create().resource(createConceptMap()).execute();
}
@Test
public void testExportConceptMapToCsvCommand() throws IOException {
ourLog.info("ConceptMap:\n" + ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(createConceptMap()));
App.main(new String[] {"export-conceptmap-to-csv",
"-v", ourVersion,
"-t", ourBase,
"-u", CM_URL,
"-f", FILE,
"-l"});
String expected = "\"SOURCE_CODE_SYSTEM\",\"SOURCE_CODE_SYSTEM_VERSION\",\"TARGET_CODE_SYSTEM\",\"TARGET_CODE_SYSTEM_VERSION\",\"SOURCE_CODE\",\"SOURCE_DISPLAY\",\"TARGET_CODE\",\"TARGET_DISPLAY\",\"EQUIVALENCE\",\"COMMENT\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/2\",\"Version 2t\",\"Code 1a\",\"Display 1a\",\"Code 2a\",\"Display 2a\",\"equal\",\"2a This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/2\",\"Version 2t\",\"Code 1b\",\"Display 1b\",\"Code 2b\",\"Display 2b\",\"equal\",\"2b This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/2\",\"Version 2t\",\"Code 1c\",\"Display 1c\",\"Code 2c\",\"Display 2c\",\"equal\",\"2c This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/2\",\"Version 2t\",\"Code 1d\",\"Display 1d\",\"Code 2d\",\"Display 2d\",\"equal\",\"2d This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 1a\",\"Display 1a\",\"Code 3a\",\"Display 3a\",\"equal\",\"3a This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 1b\",\"Display 1b\",\"Code 3b\",\"Display 3b\",\"equal\",\"3b This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 1c\",\"Display 1c\",\"Code 3c\",\"Display 3c\",\"equal\",\"3c This is a comment.\"\n" +
"\"http://example.com/codesystem/1\",\"Version 1s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 1d\",\"Display 1d\",\"Code 3d\",\"Display 3d\",\"equal\",\"3d This is a comment.\"\n" +
"\"http://example.com/codesystem/2\",\"Version 2s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 2a\",\"Display 2a\",\"Code 3a\",\"Display 3a\",\"equal\",\"3a This is a comment.\"\n" +
"\"http://example.com/codesystem/2\",\"Version 2s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 2b\",\"Display 2b\",\"Code 3b\",\"Display 3b\",\"equal\",\"3b This is a comment.\"\n" +
"\"http://example.com/codesystem/2\",\"Version 2s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 2c\",\"Display 2c\",\"Code 3c\",\"Display 3c\",\"equal\",\"3c This is a comment.\"\n" +
"\"http://example.com/codesystem/2\",\"Version 2s\",\"http://example.com/codesystem/3\",\"Version 3t\",\"Code 2d\",\"Display 2d\",\"Code 3d\",\"Display 3d\",\"equal\",\"3d This is a comment.\"\n";
String result = IOUtils.toString(new FileInputStream(FILE), Charsets.UTF_8);
assertEquals(expected, result);
FileUtils.deleteQuietly(new File(FILE));
}
static ConceptMap createConceptMap() {
ConceptMap conceptMap = new ConceptMap();
conceptMap

View File

@ -71,18 +71,18 @@ public class ApacheRestfulClientFactory extends RestfulClientFactory {
}
@Override
protected ApacheHttpClient getHttpClient(String theServerBase) {
protected synchronized ApacheHttpClient getHttpClient(String theServerBase) {
return new ApacheHttpClient(getNativeHttpClient(), new StringBuilder(theServerBase), null, null, null, null);
}
@Override
public IHttpClient getHttpClient(StringBuilder theUrl, Map<String, List<String>> theIfNoneExistParams,
public synchronized IHttpClient getHttpClient(StringBuilder theUrl, Map<String, List<String>> theIfNoneExistParams,
String theIfNoneExistString, RequestTypeEnum theRequestType, List<Header> theHeaders) {
return new ApacheHttpClient(getNativeHttpClient(), theUrl, theIfNoneExistParams, theIfNoneExistString, theRequestType,
theHeaders);
}
public synchronized HttpClient getNativeHttpClient() {
public HttpClient getNativeHttpClient() {
if (myHttpClient == null) {
//FIXME potential resoource leak

View File

@ -71,52 +71,52 @@ public abstract class RestfulClientFactory implements IRestfulClientFactory {
}
@Override
public int getConnectionRequestTimeout() {
public synchronized int getConnectionRequestTimeout() {
return myConnectionRequestTimeout;
}
@Override
public int getConnectTimeout() {
public synchronized int getConnectTimeout() {
return myConnectTimeout;
}
/**
* Return the proxy username to authenticate with the HTTP proxy
*/
protected String getProxyUsername() {
protected synchronized String getProxyUsername() {
return myProxyUsername;
}
/**
* Return the proxy password to authenticate with the HTTP proxy
*/
protected String getProxyPassword() {
protected synchronized String getProxyPassword() {
return myProxyPassword;
}
@Override
public void setProxyCredentials(String theUsername, String thePassword) {
public synchronized void setProxyCredentials(String theUsername, String thePassword) {
myProxyUsername = theUsername;
myProxyPassword = thePassword;
}
@Override
public ServerValidationModeEnum getServerValidationMode() {
public synchronized ServerValidationModeEnum getServerValidationMode() {
return myServerValidationMode;
}
@Override
public int getSocketTimeout() {
public synchronized int getSocketTimeout() {
return mySocketTimeout;
}
@Override
public int getPoolMaxTotal() {
public synchronized int getPoolMaxTotal() {
return myPoolMaxTotal;
}
@Override
public int getPoolMaxPerRoute() {
public synchronized int getPoolMaxPerRoute() {
return myPoolMaxPerRoute;
}
@ -217,7 +217,7 @@ public abstract class RestfulClientFactory implements IRestfulClientFactory {
}
@Override
public void setServerValidationMode(ServerValidationModeEnum theServerValidationMode) {
public synchronized void setServerValidationMode(ServerValidationModeEnum theServerValidationMode) {
Validate.notNull(theServerValidationMode, "theServerValidationMode may not be null");
myServerValidationMode = theServerValidationMode;
}
@ -242,13 +242,13 @@ public abstract class RestfulClientFactory implements IRestfulClientFactory {
@Deprecated // override deprecated method
@Override
public ServerValidationModeEnum getServerValidationModeEnum() {
public synchronized ServerValidationModeEnum getServerValidationModeEnum() {
return getServerValidationMode();
}
@Deprecated // override deprecated method
@Override
public void setServerValidationModeEnum(ServerValidationModeEnum theServerValidationMode) {
public synchronized void setServerValidationModeEnum(ServerValidationModeEnum theServerValidationMode) {
setServerValidationMode(theServerValidationMode);
}

View File

@ -1,39 +0,0 @@
package ca.uhn.fhir.jpa;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2019 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.google.common.base.Charsets;
import org.apache.commons.io.IOUtils;
import java.io.IOException;
import java.io.InputStream;
public class BaseTest {
protected String loadResource(String theClasspath) throws IOException {
InputStream stream = BaseTest.class.getResourceAsStream(theClasspath);
if (stream==null) {
throw new IllegalArgumentException("Unable to find resource: " + theClasspath);
}
return IOUtils.toString(stream, Charsets.UTF_8);
}
}

View File

@ -135,7 +135,7 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
@Override
public void expungeBlob(IIdType theResourceId, String theBlobId) {
Optional<BinaryStorageEntity> entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId(theBlobId, theResourceId.toUnqualifiedVersionless().getValue());
entityOpt.ifPresent(theBinaryStorageEntity -> myBinaryStorageEntityDao.delete(theBinaryStorageEntity));
entityOpt.ifPresent(theBinaryStorageEntity -> myBinaryStorageEntityDao.deleteByPid(theBinaryStorageEntity.getBlobId()));
}
@Override

View File

@ -165,7 +165,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
if (jobToDelete.isPresent()) {
ourLog.info("Deleting bulk export job: {}", jobToDelete.get().getJobId());
ourLog.info("Deleting bulk export job: {}", jobToDelete.get());
myTxTemplate.execute(t -> {
@ -176,17 +176,20 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
ourLog.info("Purging bulk data file: {}", nextFile.getResourceId());
getBinaryDao().delete(toId(nextFile.getResourceId()));
getBinaryDao().forceExpungeInExistingTransaction(toId(nextFile.getResourceId()), new ExpungeOptions().setExpungeDeletedResources(true).setExpungeOldVersions(true), null);
myBulkExportCollectionFileDao.delete(nextFile);
myBulkExportCollectionFileDao.deleteByPid(nextFile.getId());
}
myBulkExportCollectionDao.delete(nextCollection);
myBulkExportCollectionDao.deleteByPid(nextCollection.getId());
}
myBulkExportJobDao.delete(job);
ourLog.info("*** ABOUT TO DELETE");
myBulkExportJobDao.deleteByPid(job.getId());
return null;
});
ourLog.info("Finished deleting bulk export job: {}", jobToDelete.get());
}
}
@ -452,11 +455,17 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
}
@Override
@Transactional
@Transactional(Transactional.TxType.NEVER)
public synchronized void cancelAndPurgeAllJobs() {
myBulkExportCollectionFileDao.deleteAll();
myBulkExportCollectionDao.deleteAll();
myBulkExportJobDao.deleteAll();
myTxTemplate.execute(t -> {
ourLog.info("Deleting all files");
myBulkExportCollectionFileDao.deleteAllFiles();
ourLog.info("Deleting all collections");
myBulkExportCollectionDao.deleteAllFiles();
ourLog.info("Deleting all jobs");
myBulkExportJobDao.deleteAllFiles();
return null;
});
}
@DisallowConcurrentExecution

View File

@ -30,14 +30,15 @@ import ca.uhn.fhir.jpa.delete.DeleteConflictList;
import ca.uhn.fhir.jpa.delete.DeleteConflictService;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.rest.server.servlet.ServletSubRequestDetails;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.util.DeleteConflict;
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.rest.api.*;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.PatchTypeEnum;
import ca.uhn.fhir.rest.api.PreferReturnEnum;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.ParameterUtil;
import ca.uhn.fhir.rest.server.RestfulServerUtils;
@ -49,6 +50,7 @@ import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
import ca.uhn.fhir.rest.server.method.BaseMethodBinding;
import ca.uhn.fhir.rest.server.method.BaseResourceReturningMethodBinding;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.servlet.ServletSubRequestDetails;
import ca.uhn.fhir.rest.server.util.ServletRequestUtil;
import ca.uhn.fhir.util.*;
import com.google.common.base.Charsets;
@ -89,8 +91,6 @@ public class TransactionProcessor<BUNDLE extends IBaseBundle, BUNDLEENTRY> {
@Autowired
private ITransactionProcessorVersionAdapter<BUNDLE, BUNDLEENTRY> myVersionAdapter;
@Autowired
private MatchUrlService myMatchUrlService;
@Autowired
private DaoRegistry myDaoRegistry;
@Autowired(required = false)
private HapiFhirHibernateJpaDialect myHapiFhirHibernateJpaDialect;
@ -127,7 +127,6 @@ public class TransactionProcessor<BUNDLE extends IBaseBundle, BUNDLEENTRY> {
}
ourLog.info("Beginning storing collection with {} resources", myVersionAdapter.getEntries(theRequest).size());
long start = System.currentTimeMillis();
TransactionTemplate txTemplate = new TransactionTemplate(myTxManager);
txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
@ -173,7 +172,7 @@ public class TransactionProcessor<BUNDLE extends IBaseBundle, BUNDLEENTRY> {
}
}
idToPersistedOutcome.put(newId, outcome);
if (outcome.getCreated().booleanValue()) {
if (outcome.getCreated()) {
myVersionAdapter.setResponseStatus(newEntry, toStatusString(Constants.STATUS_HTTP_201_CREATED));
} else {
myVersionAdapter.setResponseStatus(newEntry, toStatusString(Constants.STATUS_HTTP_200_OK));
@ -517,7 +516,6 @@ public class TransactionProcessor<BUNDLE extends IBaseBundle, BUNDLEENTRY> {
* Look for duplicate conditional creates and consolidate them
*/
final HashMap<String, String> keyToUuid = new HashMap<>();
final IdentityHashMap<IBaseResource, String> identityToUuid = new IdentityHashMap<>();
for (int index = 0, originalIndex = 0; index < theEntries.size(); index++, originalIndex++) {
BUNDLEENTRY nextReqEntry = theEntries.get(index);
IBaseResource resource = myVersionAdapter.getResource(nextReqEntry);
@ -551,7 +549,6 @@ public class TransactionProcessor<BUNDLE extends IBaseBundle, BUNDLEENTRY> {
if (consolidateEntry) {
if (!keyToUuid.containsKey(key)) {
keyToUuid.put(key, entryUrl);
identityToUuid.put(resource, entryUrl);
} else {
ourLog.info("Discarding transaction bundle entry {} as it contained a duplicate conditional {}", originalIndex, verb);
theEntries.remove(index);

View File

@ -40,4 +40,8 @@ public interface IBinaryStorageEntityDao extends JpaRepository<BinaryStorageEnti
@Query("SELECT e FROM BinaryStorageEntity e WHERE e.myBlobId = :blob_id AND e.myResourceId = :resource_id")
Optional<BinaryStorageEntity> findByIdAndResourceId(@Param("blob_id") String theBlobId, @Param("resource_id") String theResourceId);
@Modifying
@Query("DELETE FROM BinaryStorageEntity t WHERE t.myBlobId = :pid")
void deleteByPid(@Param("pid") String theId);
}

View File

@ -1,14 +1,11 @@
package ca.uhn.fhir.jpa.dao.data;
import ca.uhn.fhir.jpa.entity.BulkExportCollectionEntity;
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
import org.springframework.data.domain.Slice;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.Optional;
/*
* #%L
* HAPI FHIR JPA Server
@ -30,5 +27,13 @@ import java.util.Optional;
*/
public interface IBulkExportCollectionDao extends JpaRepository<BulkExportCollectionEntity, Long> {
// nothing currently
@Modifying
@Query("DELETE FROM BulkExportCollectionEntity t")
void deleteAllFiles();
@Modifying
@Query("DELETE FROM BulkExportCollectionEntity t WHERE t.myId = :pid")
void deleteByPid(@Param("pid") Long theId);
}

View File

@ -2,6 +2,9 @@ package ca.uhn.fhir.jpa.dao.data;
import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
/*
* #%L
@ -24,5 +27,13 @@ import org.springframework.data.jpa.repository.JpaRepository;
*/
public interface IBulkExportCollectionFileDao extends JpaRepository<BulkExportCollectionFileEntity, Long> {
// nothing currently
@Modifying
@Query("DELETE FROM BulkExportCollectionFileEntity t")
void deleteAllFiles();
@Modifying
@Query("DELETE FROM BulkExportCollectionFileEntity t WHERE t.myId = :pid")
void deleteByPid(@Param("pid") Long theId);
}

View File

@ -5,6 +5,7 @@ import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
@ -44,4 +45,12 @@ public interface IBulkExportJobDao extends JpaRepository<BulkExportJobEntity, Lo
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myRequest = :request AND j.myCreated > :createdAfter AND j.myStatus <> :status")
Slice<BulkExportJobEntity> findExistingJob(Pageable thePage, @Param("request") String theRequest, @Param("createdAfter") Date theCreatedAfter, @Param("status") BulkJobStatusEnum theNotStatus);
@Modifying
@Query("DELETE FROM BulkExportJobEntity t")
void deleteAllFiles();
@Modifying
@Query("DELETE FROM BulkExportJobEntity t WHERE t.myId = :pid")
void deleteByPid(@Param("pid") Long theId);
}

View File

@ -24,6 +24,7 @@ import java.util.List;
*/
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
@ -41,4 +42,7 @@ public interface IForcedIdDao extends JpaRepository<ForcedId, Long> {
@Query("SELECT f FROM ForcedId f WHERE f.myResourcePid = :resource_pid")
ForcedId findByResourcePid(@Param("resource_pid") Long theResourcePid);
@Modifying
@Query("DELETE FROM ForcedId t WHERE t.myId = :pid")
void deleteByPid(@Param("pid") Long theId);
}

View File

@ -91,14 +91,12 @@ public interface IResourceHistoryTableDao extends JpaRepository<ResourceHistoryT
"LEFT OUTER JOIN ResourceTable t ON (v.myResourceId = t.myId) " +
"WHERE v.myResourceVersion != t.myVersion")
Slice<Long> findIdsOfPreviousVersionsOfResources(Pageable thePage);
@Query("" +
"SELECT h FROM ResourceHistoryTable h " +
"INNER JOIN ResourceTable r ON (r.myId = h.myResourceId and r.myVersion = h.myResourceVersion) " +
"WHERE r.myId in (:pids)")
Collection<ResourceHistoryTable> findByResourceIds(@Param("pids") Collection<Long> pids);
@Modifying
@Query("UPDATE ResourceHistoryTable r SET r.myResourceVersion = :newVersion WHERE r.myResourceId = :id AND r.myResourceVersion = :oldVersion")
void updateVersion(@Param("id") long theId, @Param("oldVersion") long theOldVersion, @Param("newVersion") long theNewVersion);
@Modifying
@Query("DELETE FROM ResourceHistoryTable t WHERE t.myId = :pid")
void deleteByPid(@Param("pid") Long theId);
}

View File

@ -2,6 +2,11 @@ package ca.uhn.fhir.jpa.dao.data;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTag;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.List;
/*
* #%L
@ -24,5 +29,9 @@ import org.springframework.data.jpa.repository.JpaRepository;
*/
public interface IResourceHistoryTagDao extends JpaRepository<ResourceHistoryTag, Long> {
// nothing
@Modifying
@Query("DELETE FROM ResourceHistoryTag t WHERE t.myId IN :pids")
void deleteByPid(@Param("pids") List<Long> thePids);
}

View File

@ -35,4 +35,8 @@ import java.util.Map;
public interface IResourceProvenanceDao extends JpaRepository<ResourceHistoryProvenanceEntity, Long> {
@Modifying
@Query("DELETE FROM ResourceHistoryProvenanceEntity t WHERE t.myId = :pid")
void deleteByPid(@Param("pid") Long theId);
}

View File

@ -58,4 +58,9 @@ public interface IResourceTableDao extends JpaRepository<ResourceTable, Long> {
@Modifying
@Query("UPDATE ResourceTable t SET t.myIndexStatus = :status WHERE t.myId = :id")
void updateIndexStatus(@Param("id") Long theId, @Param("status") Long theIndexStatus);
@Modifying
@Query("DELETE FROM ResourceTable t WHERE t.myId = :pid")
void deleteByPid(@Param("pid") Long theId);
}

View File

@ -24,6 +24,7 @@ import java.util.Date;
*/
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
@ -35,4 +36,8 @@ public interface ISearchParamPresentDao extends JpaRepository<SearchParamPresent
@Query("SELECT s FROM SearchParamPresent s WHERE s.myResource = :res")
Collection<SearchParamPresent> findAllForResource(@Param("res") ResourceTable theResource);
@Modifying
@Query("delete from SearchParamPresent t WHERE t.myResourcePid = :resid")
void deleteByResourceId(@Param("resid") Long theResourcePid);
}

View File

@ -93,7 +93,9 @@ public class ExpungeOperation implements Callable<ExpungeOutcome> {
}
private Slice<Long> findHistoricalVersionsOfDeletedResources() {
return myExpungeDaoService.findHistoricalVersionsOfDeletedResources(myResourceName, myResourceId, myRemainingCount.get());
Slice<Long> retVal = myExpungeDaoService.findHistoricalVersionsOfDeletedResources(myResourceName, myResourceId, myRemainingCount.get());
ourLog.debug("Found {} historical versions", retVal.getSize());
return retVal;
}
private Slice<Long> findHistoricalVersionsOfNonDeletedResources() {

View File

@ -50,6 +50,7 @@ import org.springframework.transaction.annotation.Transactional;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
@Service
class ResourceExpungeService implements IResourceExpungeService {
@ -147,11 +148,11 @@ class ResourceExpungeService implements IResourceExpungeService {
callHooks(theRequestDetails, theRemainingCount, version, id);
if (version.getProvenance() != null) {
myResourceHistoryProvenanceTableDao.delete(version.getProvenance());
myResourceHistoryProvenanceTableDao.deleteByPid(version.getProvenance().getId());
}
myResourceHistoryTagDao.deleteAll(version.getTags());
myResourceHistoryTableDao.delete(version);
myResourceHistoryTagDao.deleteByPid(version.getTags().stream().map(t->t.getId()).collect(Collectors.toList()));
myResourceHistoryTableDao.deleteByPid(version.getId());
theRemainingCount.decrementAndGet();
}
@ -215,9 +216,13 @@ class ResourceExpungeService implements IResourceExpungeService {
myIdHelperService.delete(forcedId);
}
myResourceTableDao.delete(resource);
myResourceTableDao.deleteByPid(resource.getId());
}
@Autowired
private ISearchParamPresentDao mySearchParamPresentDao;
@Override
@Transactional
public void deleteAllSearchParams(Long theResourceId) {
@ -228,6 +233,7 @@ class ResourceExpungeService implements IResourceExpungeService {
myResourceIndexedSearchParamQuantityDao.deleteByResourceId(theResourceId);
myResourceIndexedSearchParamStringDao.deleteByResourceId(theResourceId);
myResourceIndexedSearchParamTokenDao.deleteByResourceId(theResourceId);
mySearchParamPresentDao.deleteByResourceId(theResourceId);
myResourceLinkDao.deleteByResourceId(theResourceId);
myResourceTagDao.deleteByResourceId(theResourceId);

View File

@ -54,7 +54,7 @@ public class IdHelperService {
private IInterceptorBroadcaster myInterceptorBroadcaster;
public void delete(ForcedId forcedId) {
myForcedIdDao.delete(forcedId);
myForcedIdDao.deleteByPid(forcedId.getId());
}
/**

View File

@ -23,20 +23,21 @@ package ca.uhn.fhir.jpa.entity;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import javax.persistence.*;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
@Entity
@Table(name = "HFJ_BLK_EXPORT_COLLECTION")
public class BulkExportCollectionEntity {
public class BulkExportCollectionEntity implements Serializable {
@Id
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKEXCOL_PID")
@SequenceGenerator(name = "SEQ_BLKEXCOL_PID", sequenceName = "SEQ_BLKEXCOL_PID")
@Column(name = "PID")
private Long myId;
@ManyToOne
@JoinColumn(name = "JOB_PID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name="FK_BLKEXCOL_JOB"))
@ManyToOne()
@JoinColumn(name = "JOB_PID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name = "FK_BLKEXCOL_JOB"))
private BulkExportJobEntity myJob;
@Column(name = "RES_TYPE", length = ResourceTable.RESTYPE_LEN, nullable = false)
private String myResourceType;
@ -82,4 +83,8 @@ public class BulkExportCollectionEntity {
}
return myFiles;
}
public Long getId() {
return myId;
}
}

View File

@ -23,10 +23,11 @@ package ca.uhn.fhir.jpa.entity;
import ca.uhn.fhir.jpa.model.entity.ForcedId;
import javax.persistence.*;
import java.io.Serializable;
@Entity
@Table(name = "HFJ_BLK_EXPORT_COLFILE")
public class BulkExportCollectionFileEntity {
public class BulkExportCollectionFileEntity implements Serializable {
@Id
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKEXCOLFILE_PID")
@ -50,4 +51,8 @@ public class BulkExportCollectionFileEntity {
public String getResourceId() {
return myResourceId;
}
public Long getId() {
return myId;
}
}

View File

@ -22,9 +22,11 @@ package ca.uhn.fhir.jpa.entity;
import ca.uhn.fhir.jpa.bulk.BulkJobStatusEnum;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hl7.fhir.r5.model.InstantType;
import javax.persistence.*;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
@ -37,7 +39,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
}, indexes = {
@Index(name = "IDX_BLKEX_EXPTIME", columnList = "EXP_TIME")
})
public class BulkExportJobEntity {
public class BulkExportJobEntity implements Serializable {
public static final int REQUEST_LENGTH = 500;
public static final int STATUS_MESSAGE_LEN = 500;
@ -64,7 +66,7 @@ public class BulkExportJobEntity {
private Date myExpiry;
@Column(name = "REQUEST", nullable = false, length = REQUEST_LENGTH)
private String myRequest;
@OneToMany(fetch = FetchType.LAZY, mappedBy = "myJob")
@OneToMany(fetch = FetchType.LAZY, mappedBy = "myJob", orphanRemoval = false)
private Collection<BulkExportCollectionEntity> myCollections;
@Version
@Column(name = "OPTLOCK", nullable = false)
@ -120,7 +122,8 @@ public class BulkExportJobEntity {
@Override
public String toString() {
ToStringBuilder b = new ToStringBuilder(this);
ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE);
b.append("pid", myId);
if (isNotBlank(myJobId)) {
b.append("jobId", myJobId);
}

View File

@ -51,29 +51,30 @@ public class LoadedFileDescriptors implements Closeable {
try (BufferedInputStream bufferedInputStream = new BufferedInputStream(inputStream)) {
try (ZipInputStream zis = new ZipInputStream(bufferedInputStream)) {
for (ZipEntry nextEntry; (nextEntry = zis.getNextEntry()) != null; ) {
BOMInputStream fis = new BOMInputStream(zis);
File nextTemporaryFile = File.createTempFile("hapifhir", ".tmp");
ourLog.info("Creating temporary file: {}", nextTemporaryFile.getAbsolutePath());
nextTemporaryFile.deleteOnExit();
try (FileOutputStream fos = new FileOutputStream(nextTemporaryFile, false)) {
IOUtils.copy(fis, fos);
String nextEntryFileName = nextEntry.getName();
myUncompressedFileDescriptors.add(new ITermLoaderSvc.FileDescriptor() {
@Override
public String getFilename() {
return nextEntryFileName;
}
@Override
public InputStream getInputStream() {
try {
return new FileInputStream(nextTemporaryFile);
} catch (FileNotFoundException e) {
throw new InternalErrorException(e);
try (BOMInputStream fis = new NonClosableBOMInputStream(zis)) {
File nextTemporaryFile = File.createTempFile("hapifhir", ".tmp");
ourLog.info("Creating temporary file: {}", nextTemporaryFile.getAbsolutePath());
nextTemporaryFile.deleteOnExit();
try (FileOutputStream fos = new FileOutputStream(nextTemporaryFile, false)) {
IOUtils.copy(fis, fos);
String nextEntryFileName = nextEntry.getName();
myUncompressedFileDescriptors.add(new ITermLoaderSvc.FileDescriptor() {
@Override
public String getFilename() {
return nextEntryFileName;
}
}
});
myTemporaryFiles.add(nextTemporaryFile);
@Override
public InputStream getInputStream() {
try {
return new FileInputStream(nextTemporaryFile);
} catch (FileNotFoundException e) {
throw new InternalErrorException(e);
}
}
});
myTemporaryFiles.add(nextTemporaryFile);
}
}
}
}
@ -140,4 +141,14 @@ public class LoadedFileDescriptors implements Closeable {
}
private static class NonClosableBOMInputStream extends BOMInputStream {
NonClosableBOMInputStream(InputStream theWrap) {
super(theWrap);
}
@Override
public void close() {
// nothing
}
}
}

View File

@ -281,15 +281,6 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc {
throw new InternalErrorException("Failed to load imgthla.xml", e);
}
Map<String, CodeSystem.PropertyType> propertyNamesToTypes = new HashMap<>();
for (CodeSystem.PropertyComponent nextProperty : imgthlaCs.getProperty()) {
String nextPropertyCode = nextProperty.getCode();
CodeSystem.PropertyType nextPropertyType = nextProperty.getType();
if (isNotBlank(nextPropertyCode)) {
propertyNamesToTypes.put(nextPropertyCode, nextPropertyType);
}
}
boolean foundHlaNom = false;
boolean foundHlaXml = false;
for (FileDescriptor nextZipBytes : theDescriptors.getUncompressedFileDescriptors()) {
@ -424,7 +415,7 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc {
iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_ANSWERLIST_FILE.getCode(), LOINC_ANSWERLIST_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false);
// Answer list links (connects LOINC observation codes to answer list codes)
handler = new LoincAnswerListLinkHandler(code2concept, valueSets);
handler = new LoincAnswerListLinkHandler(code2concept);
iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_ANSWERLIST_LINK_FILE.getCode(), LOINC_ANSWERLIST_LINK_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false);
// RSNA playbook

View File

@ -36,13 +36,9 @@ import static org.apache.commons.lang3.StringUtils.trim;
public class LoincAnswerListLinkHandler implements IRecordHandler {
private final Map<String, TermConcept> myCode2Concept;
private final Map<String, ValueSet> myIdToValueSet = new HashMap<>();
public LoincAnswerListLinkHandler(Map<String, TermConcept> theCode2concept, List<ValueSet> theValueSets) {
public LoincAnswerListLinkHandler(Map<String, TermConcept> theCode2concept) {
myCode2Concept = theCode2concept;
for (ValueSet next : theValueSets) {
myIdToValueSet.put(next.getId(), next);
}
}
@Override

View File

@ -13,6 +13,7 @@ import ca.uhn.fhir.test.utilities.UnregisterScheduledProcessor;
import com.google.common.base.Charsets;
import com.google.common.collect.Sets;
import org.apache.commons.lang3.time.DateUtils;
import org.hamcrest.Matchers;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Binary;
import org.hl7.fhir.r4.model.InstantType;
@ -92,7 +93,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// Check that things were deleted
runInTransaction(() -> {
assertEquals(0, myResourceTableDao.count());
assertEquals(0, myBulkExportJobDao.count());
assertThat(myBulkExportJobDao.findAll(), Matchers.empty());
assertEquals(0, myBulkExportCollectionDao.count());
assertEquals(0, myBulkExportCollectionFileDao.count());
});

View File

@ -111,7 +111,7 @@ public class TestR4Config extends BaseJavaConfigR4 {
SLF4JLogLevel level = SLF4JLogLevel.INFO;
DataSource dataSource = ProxyDataSourceBuilder
.create(retVal)
// .logQueryBySlf4j(level, "SQL")
.logQueryBySlf4j(level, "SQL")
.logSlowQueryBySlf4j(10, TimeUnit.SECONDS)
// .countQuery(new ThreadQueryCountHolder())
.beforeQuery(new BlockLargeNumbersOfParamsListener())

View File

@ -482,4 +482,23 @@ public abstract class BaseJpaTest extends BaseTest {
Thread.sleep(500);
}
public static void waitForSize(int theTarget, Callable<Number> theCallable, Callable<String> theFailureMessage) throws Exception {
waitForSize(theTarget, 10000, theCallable, theFailureMessage);
}
public static void waitForSize(int theTarget, int theTimeout, Callable<Number> theCallable, Callable<String> theFailureMessage) throws Exception {
StopWatch sw = new StopWatch();
while (theCallable.call().intValue() != theTarget && sw.getMillis() < theTimeout) {
try {
Thread.sleep(50);
} catch (InterruptedException theE) {
throw new Error(theE);
}
}
if (sw.getMillis() >= theTimeout) {
fail("Size " + theCallable.call() + " is != target " + theTarget + " - " + theFailureMessage.call());
}
Thread.sleep(500);
}
}

View File

@ -296,6 +296,35 @@ public class SearchCoordinatorSvcImplTest {
}
@Test
public void testCancelActiveSearches() {
SearchParameterMap params = new SearchParameterMap();
params.add("name", new StringParam("ANAME"));
List<Long> pids = createPidSequence(10, 800);
SlowIterator iter = new SlowIterator(pids.iterator(), 500);
when(mySearchBuilder.createQuery(same(params), any(), any())).thenReturn(iter);
doAnswer(loadPids()).when(mySearchBuilder).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any());
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective(), null);
assertNotNull(result.getUuid());
assertEquals(null, result.size());
List<IBaseResource> resources;
resources = result.getResources(0, 1);
assertEquals(1, resources.size());
mySvc.cancelAllActiveSearches();
try {
result.getResources(10, 20);
} catch (InternalErrorException e) {
assertEquals("Abort has been requested", e.getMessage());
}
}
/**
* Subsequent requests for the same search (i.e. a request for the next
* page) within the same JVM will not use the original bundle provider

View File

@ -2,20 +2,28 @@ package ca.uhn.fhir.jpa.subscription;
import org.springframework.messaging.Message;
import org.springframework.messaging.MessageChannel;
import org.springframework.messaging.support.ChannelInterceptorAdapter;
import org.springframework.messaging.support.ChannelInterceptor;
public class CountingInterceptor extends ChannelInterceptorAdapter {
import java.util.ArrayList;
import java.util.List;
private int mySentCount;
public class CountingInterceptor implements ChannelInterceptor {
private List<String> mySent = new ArrayList<>();
public int getSentCount() {
return mySentCount;
return mySent.size();
}
@Override
public void afterSendCompletion(Message<?> message, MessageChannel channel, boolean sent, Exception ex) {
if (sent) {
mySentCount++;
mySent.add(message.toString());
}
}
@Override
public String toString() {
return "[" + String.join("\n", mySent) + "]";
}
}

View File

@ -3,7 +3,6 @@ package ca.uhn.fhir.jpa.subscription.resthook;
import ca.uhn.fhir.jpa.config.StoppableSubscriptionDeliveringRestHookSubscriber;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.subscription.BaseSubscriptionsR4Test;
import ca.uhn.fhir.model.dstu2.valueset.SubscriptionStatusEnum;
import ca.uhn.fhir.rest.api.CacheControlDirective;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.MethodOutcome;
@ -308,7 +307,7 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
int modCount = myCountingInterceptor.getSentCount();
ourClient.update().resource(subscription1).execute();
waitForSize(modCount + 1, () -> myCountingInterceptor.getSentCount());
waitForSize(modCount + 1, () -> myCountingInterceptor.getSentCount(), () -> myCountingInterceptor.toString());
ourLog.info("** About to send observation");
Observation observation2 = sendObservation(code, "SNOMED-CT");

View File

@ -305,7 +305,7 @@ public class RestHookTestR5Test extends BaseSubscriptionsR5Test {
int modCount = myCountingInterceptor.getSentCount();
ourClient.update().resource(subscription1).execute();
waitForSize(modCount + 1, () -> myCountingInterceptor.getSentCount());
waitForSize(modCount + 1, () -> myCountingInterceptor.getSentCount(), () -> myCountingInterceptor.toString());
ourLog.info("** About to send observation");
Observation observation2 = sendObservation(code, "SNOMED-CT");

View File

@ -93,7 +93,7 @@ public class DropIndexTask extends BaseTableTask<DropIndexTask> {
sql = "drop index " + theIndexName;
break;
case POSTGRES_9_4:
sql = "alter table " + theTableName + " drop constraint " + theIndexName + " cascade";
sql = "drop index " + theIndexName + " cascade";
break;
case ORACLE_12C:
case MSSQL_2012:

View File

@ -85,4 +85,8 @@ public class BinaryStorageEntity {
public void setBlob(Blob theBlob) {
myBlob = theBlob;
}
public String getBlobId() {
return myBlobId;
}
}

View File

@ -90,4 +90,7 @@ public class ForcedId {
myResourceType = theResourceType;
}
public Long getId() {
return myId;
}
}

View File

@ -80,4 +80,7 @@ public class ResourceHistoryProvenanceEntity {
myRequestId = theRequestId;
}
public Long getId() {
return myId;
}
}

View File

@ -87,4 +87,7 @@ public class ResourceHistoryTag extends BaseTag implements Serializable {
myResourceHistory = theResourceHistory;
}
public Long getId() {
return myId;
}
}

View File

@ -46,6 +46,8 @@ public class SearchParamPresent implements Serializable {
@ManyToOne()
@JoinColumn(name = "RES_ID", referencedColumnName = "RES_ID", nullable = false, foreignKey = @ForeignKey(name = "FK_RESPARMPRES_RESID"))
private ResourceTable myResource;
@Column(name="RES_ID", nullable = false, insertable = false, updatable = false)
private Long myResourcePid;
@Transient
private transient String myParamName;
@Column(name = "HASH_PRESENCE")

View File

@ -33,6 +33,7 @@ import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.Subscription;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.io.Serializable;
import java.util.*;
@ -165,6 +166,7 @@ public class CanonicalSubscription implements Serializable, Cloneable {
}
}
@Nullable
public IIdType getIdElement(FhirContext theContext) {
IIdType retVal = null;
if (isNotBlank(myIdElement)) {

View File

@ -27,6 +27,7 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription;
import ca.uhn.fhir.jpa.subscription.module.cache.ActiveSubscription;
import ca.uhn.fhir.jpa.subscription.module.cache.SubscriptionRegistry;
import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -97,4 +98,22 @@ public abstract class BaseSubscriptionDeliverySubscriber implements MessageHandl
public abstract void handleMessage(ResourceDeliveryMessage theMessage) throws Exception;
@VisibleForTesting
void setFhirContextForUnitTest(FhirContext theCtx) {
myFhirContext = theCtx;
}
@VisibleForTesting
void setInterceptorBroadcasterForUnitTest(IInterceptorBroadcaster theInterceptorBroadcaster) {
myInterceptorBroadcaster = theInterceptorBroadcaster;
}
@VisibleForTesting
void setSubscriptionRegistryForUnitTest(SubscriptionRegistry theSubscriptionRegistry) {
mySubscriptionRegistry = theSubscriptionRegistry;
}
public IInterceptorBroadcaster getInterceptorBroadcaster() {
return myInterceptorBroadcaster;
}
}

View File

@ -120,7 +120,6 @@ public class ResourceDeliveryMessage extends BaseResourceMessage implements IRes
public String toString() {
return new ToStringBuilder(this)
.append("mySubscription", mySubscription)
// .append("mySubscriptionString", mySubscriptionString)
.append("myPayloadString", myPayloadString)
.append("myPayload", myPayload)
.append("myPayloadId", myPayloadId)
@ -134,7 +133,10 @@ public class ResourceDeliveryMessage extends BaseResourceMessage implements IRes
public String getSubscriptionId(FhirContext theFhirContext) {
String retVal = null;
if (getSubscription() != null) {
retVal = getSubscription().getIdElement(theFhirContext).getValue();
IIdType idElement = getSubscription().getIdElement(theFhirContext);
if (idElement != null) {
retVal = idElement.getValue();
}
}
return retVal;
}

View File

@ -52,8 +52,6 @@ public class SubscriptionDeliveringRestHookSubscriber extends BaseSubscriptionDe
@Autowired
IResourceRetriever myResourceRetriever;
private Logger ourLog = LoggerFactory.getLogger(SubscriptionDeliveringRestHookSubscriber.class);
@Autowired
private IInterceptorBroadcaster myInterceptorBroadcaster;
protected void deliverPayload(ResourceDeliveryMessage theMsg, CanonicalSubscription theSubscription, EncodingEnum thePayloadType, IGenericClient theClient) {
IBaseResource payloadResource = getAndMassagePayload(theMsg, theSubscription);
@ -144,7 +142,7 @@ public class SubscriptionDeliveringRestHookSubscriber extends BaseSubscriptionDe
HookParams params = new HookParams()
.add(CanonicalSubscription.class, subscription)
.add(ResourceDeliveryMessage.class, theMessage);
if (!myInterceptorBroadcaster.callHooks(Pointcut.SUBSCRIPTION_BEFORE_REST_HOOK_DELIVERY, params)) {
if (!getInterceptorBroadcaster().callHooks(Pointcut.SUBSCRIPTION_BEFORE_REST_HOOK_DELIVERY, params)) {
return;
}
@ -179,7 +177,7 @@ public class SubscriptionDeliveringRestHookSubscriber extends BaseSubscriptionDe
params = new HookParams()
.add(CanonicalSubscription.class, subscription)
.add(ResourceDeliveryMessage.class, theMessage);
if (!myInterceptorBroadcaster.callHooks(Pointcut.SUBSCRIPTION_AFTER_REST_HOOK_DELIVERY, params)) {
if (!getInterceptorBroadcaster().callHooks(Pointcut.SUBSCRIPTION_AFTER_REST_HOOK_DELIVERY, params)) {
//noinspection UnnecessaryReturnStatement
return;
}

View File

@ -0,0 +1,178 @@
package ca.uhn.fhir.jpa.subscription.module.subscriber;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription;
import ca.uhn.fhir.jpa.subscription.module.ResourceModifiedMessage;
import ca.uhn.fhir.jpa.subscription.module.cache.SubscriptionRegistry;
import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.client.api.IGenericClient;
import ca.uhn.fhir.rest.client.api.IRestfulClientFactory;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.Patient;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Answers;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.messaging.Message;
import org.springframework.messaging.MessagingException;
import org.springframework.messaging.support.GenericMessage;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.*;
@RunWith(MockitoJUnitRunner.class)
public class BaseSubscriptionDeliverySubscriberTest {
private SubscriptionDeliveringRestHookSubscriber mySubscriber;
private FhirContext myCtx = FhirContext.forR4();
@Mock
private IInterceptorBroadcaster myInterceptorBroadcaster;
@Mock
protected SubscriptionRegistry mySubscriptionRegistry;
@Mock(answer = Answers.RETURNS_DEEP_STUBS)
private IRestfulClientFactory myRestfulClientFactory;
@Mock(answer = Answers.RETURNS_DEEP_STUBS)
private IGenericClient myGenericClient;
@Before
public void before() {
mySubscriber = new SubscriptionDeliveringRestHookSubscriber();
mySubscriber.setFhirContextForUnitTest(myCtx);
mySubscriber.setInterceptorBroadcasterForUnitTest(myInterceptorBroadcaster);
mySubscriber.setSubscriptionRegistryForUnitTest(mySubscriptionRegistry);
myCtx.setRestfulClientFactory(myRestfulClientFactory);
when(myRestfulClientFactory.newGenericClient(any())).thenReturn(myGenericClient);
}
@Test
public void testWrongTypeIgnored() {
Message<String> message = new GenericMessage<>("HELLO");
// Nothing should happen
mySubscriber.handleMessage(message);
}
@Test
public void testSubscriptionWithNoIs() {
ResourceDeliveryMessage payload = new ResourceDeliveryMessage();
payload.setSubscription(new CanonicalSubscription());
// Nothing should happen
mySubscriber.handleMessage(new ResourceDeliveryJsonMessage(payload));
}
@Test
public void testRestHookDeliverySuccessful() {
when(myInterceptorBroadcaster.callHooks(any(), any())).thenReturn(true);
Patient patient = new Patient();
patient.setActive(true);
CanonicalSubscription subscription = new CanonicalSubscription();
subscription.setIdElement(new IdType("Subscription/123"));
subscription.setEndpointUrl("http://example.com/fhir");
subscription.setPayloadString("application/fhir+json");
ResourceDeliveryMessage payload = new ResourceDeliveryMessage();
payload.setSubscription(subscription);
payload.setPayload(myCtx, patient, EncodingEnum.JSON);
payload.setOperationType(ResourceModifiedMessage.OperationTypeEnum.CREATE);
mySubscriber.handleMessage(new ResourceDeliveryJsonMessage(payload));
verify(myGenericClient, times(1)).update();
}
@Test
public void testRestHookDeliveryFails_ShouldRollBack() {
when(myInterceptorBroadcaster.callHooks(any(), any())).thenReturn(true);
Patient patient = new Patient();
patient.setActive(true);
CanonicalSubscription subscription = new CanonicalSubscription();
subscription.setIdElement(new IdType("Subscription/123"));
subscription.setEndpointUrl("http://example.com/fhir");
subscription.setPayloadString("application/fhir+json");
ResourceDeliveryMessage payload = new ResourceDeliveryMessage();
payload.setSubscription(subscription);
payload.setPayload(myCtx, patient, EncodingEnum.JSON);
payload.setOperationType(ResourceModifiedMessage.OperationTypeEnum.CREATE);
when(myGenericClient.update()).thenThrow(new InternalErrorException("FOO"));
try {
mySubscriber.handleMessage(new ResourceDeliveryJsonMessage(payload));
fail();
} catch (MessagingException e) {
assertEquals("Failure handling subscription payload for subscription: Subscription/123; nested exception is ca.uhn.fhir.rest.server.exceptions.InternalErrorException: FOO", e.getMessage());
}
verify(myGenericClient, times(1)).update();
}
@Test
public void testRestHookDeliveryFails_InterceptorDealsWithIt() {
when(myInterceptorBroadcaster.callHooks(eq(Pointcut.SUBSCRIPTION_BEFORE_DELIVERY), any())).thenReturn(true);
when(myInterceptorBroadcaster.callHooks(eq(Pointcut.SUBSCRIPTION_BEFORE_REST_HOOK_DELIVERY), any())).thenReturn(true);
when(myInterceptorBroadcaster.callHooks(eq(Pointcut.SUBSCRIPTION_AFTER_DELIVERY_FAILED), any())).thenReturn(false);
Patient patient = new Patient();
patient.setActive(true);
CanonicalSubscription subscription = new CanonicalSubscription();
subscription.setIdElement(new IdType("Subscription/123"));
subscription.setEndpointUrl("http://example.com/fhir");
subscription.setPayloadString("application/fhir+json");
ResourceDeliveryMessage payload = new ResourceDeliveryMessage();
payload.setSubscription(subscription);
payload.setPayload(myCtx, patient, EncodingEnum.JSON);
payload.setOperationType(ResourceModifiedMessage.OperationTypeEnum.CREATE);
when(myGenericClient.update()).thenThrow(new InternalErrorException("FOO"));
// This shouldn't throw an exception
mySubscriber.handleMessage(new ResourceDeliveryJsonMessage(payload));
verify(myGenericClient, times(1)).update();
}
@Test
public void testRestHookDeliveryAbortedByInterceptor() {
when(myInterceptorBroadcaster.callHooks(eq(Pointcut.SUBSCRIPTION_BEFORE_DELIVERY), any())).thenReturn(true);
when(myInterceptorBroadcaster.callHooks(eq(Pointcut.SUBSCRIPTION_BEFORE_REST_HOOK_DELIVERY), any())).thenReturn(false);
Patient patient = new Patient();
patient.setActive(true);
CanonicalSubscription subscription = new CanonicalSubscription();
subscription.setIdElement(new IdType("Subscription/123"));
subscription.setEndpointUrl("http://example.com/fhir");
subscription.setPayloadString("application/fhir+json");
ResourceDeliveryMessage payload = new ResourceDeliveryMessage();
payload.setSubscription(subscription);
payload.setPayload(myCtx, patient, EncodingEnum.JSON);
payload.setOperationType(ResourceModifiedMessage.OperationTypeEnum.CREATE);
mySubscriber.handleMessage(new ResourceDeliveryJsonMessage(payload));
verify(myGenericClient, times(0)).update();
}
@Test
public void testInterceptorBroadcasterAbortsDelivery() {
}
}

View File

@ -2,6 +2,7 @@ package ca.uhn.fhir.parser;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.test.BaseTest;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.TestUtil;
import com.google.common.collect.Sets;
@ -26,7 +27,7 @@ import static org.hamcrest.core.IsNot.not;
import static org.junit.Assert.*;
import static org.mockito.Mockito.mock;
public class JsonParserR4Test {
public class JsonParserR4Test extends BaseTest {
private static final Logger ourLog = LoggerFactory.getLogger(JsonParserR4Test.class);
private static FhirContext ourCtx = FhirContext.forR4();
@ -43,6 +44,14 @@ public class JsonParserR4Test {
return b;
}
@Test
public void testEntitiesNotConverted() throws IOException {
Device input = loadResource(ourCtx, Device.class, "/entities-from-cerner.json");
String narrative = input.getText().getDivAsString();
ourLog.info(narrative);
}
@Test
public void testEncodeExtensionOnBinaryData() {
Binary b = new Binary();

View File

@ -0,0 +1,6 @@
{
"resourceType": "Device",
"text": {
"div": "<div xmlns=\"http://www.w3.org/1999/xhtml\">&#174;</div>"
}
}

View File

@ -25,6 +25,8 @@ import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import com.google.common.base.Charsets;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hl7.fhir.instance.model.api.IBaseResource;
import java.io.IOException;
@ -34,6 +36,10 @@ import java.util.zip.GZIPInputStream;
public class BaseTest {
static {
ToStringBuilder.setDefaultStyle(ToStringStyle.SHORT_PREFIX_STYLE);
}
protected String loadResource(String theClasspath) throws IOException {
Function<InputStream, InputStream> streamTransform = t->t;
return loadResource(theClasspath, streamTransform);

24
pom.xml
View File

@ -614,7 +614,7 @@
<jetty_version>9.4.14.v20181114</jetty_version>
<jsr305_version>3.0.2</jsr305_version>
<!--<hibernate_version>5.2.10.Final</hibernate_version>-->
<hibernate_version>5.4.4.Final</hibernate_version>
<hibernate_version>5.4.6.Final</hibernate_version>
<!-- Update lucene version when you update hibernate-search version -->
<hibernate_search_version>5.11.3.Final</hibernate_search_version>
<lucene_version>5.5.5</lucene_version>
@ -631,11 +631,11 @@
<plexus_compiler_api_version>2.8.5</plexus_compiler_api_version>
<servicemix_saxon_version>9.5.1-5_1</servicemix_saxon_version>
<servicemix_xmlresolver_version>1.2_5</servicemix_xmlresolver_version>
<slf4j_version>1.7.25</slf4j_version>
<spring_version>5.1.8.RELEASE</spring_version>
<slf4j_version>1.7.28</slf4j_version>
<spring_version>5.2.0.RELEASE</spring_version>
<!-- FYI: Spring Data JPA 2.1.9 causes test failures due to unexpected cascading deletes -->
<spring_data_version>2.1.8.RELEASE</spring_data_version>
<spring_boot_version>2.1.1.RELEASE</spring_boot_version>
<spring_data_version>2.2.0.RELEASE</spring_data_version>
<spring_boot_version>2.2.0.RELEASE</spring_boot_version>
<spring_retry_version>1.2.2.RELEASE</spring_retry_version>
<stax2_api_version>3.1.4</stax2_api_version>
@ -1281,12 +1281,12 @@
<dependency>
<groupId>org.mariadb.jdbc</groupId>
<artifactId>mariadb-java-client</artifactId>
<version>2.4.2</version>
<version>2.5.1</version>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>3.0.0</version>
<version>3.1.0</version>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
@ -1550,7 +1550,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.0</version>
<version>3.8.1</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
@ -1613,7 +1613,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>3.1.1</version>
<version>3.1.2</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
@ -1638,7 +1638,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>3.0.1</version>
<version>3.1.0</version>
<dependencies>
<dependency>
<groupId>org.codehaus.plexus</groupId>
@ -1661,7 +1661,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-war-plugin</artifactId>
<version>3.2.0</version>
<version>3.2.3</version>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
@ -1721,7 +1721,7 @@
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>0.8.4</version>
<version>0.8.5</version>
<configuration>
<excludes>
<exclude>ca/uhn/fhir/model/dstu2/**/*.class</exclude>

View File

@ -12,9 +12,12 @@
latest versions (dependent HAPI modules listed in brackets):
<![CDATA[
<ul>
<li>Hibernate Core (JPA): 5.4.2.Final -&gt; 5.4.4.Final</li>
<li>SLF4j (All): 1.7.25 -&gt; 1.7.28</li>
<li>Spring (JPA): 5.1.8.Final -&gt; 5.2.0.Final</li>
<li>Hibernate Core (JPA): 5.4.2.Final -&gt; 5.4.6.Final</li>
<li>Hibernate Search (JPA): 5.11.1.Final -&gt; 5.11.3.Final</li>
<li>Jackson Databind (JPA): 2.9.9 -&gt; 2.9.10 (CVE-2019-16335, CVE-2019-14540)</li>
<li>Spring Boot (Boot): 2.1.1 -&gt; 2.2.0</li>
</ul>
]]>
</action>