Merge remote-tracking branch 'remotes/origin/master' into ks-subscription-delivery-queue-configurable-name
This commit is contained in:
commit
f6db6ff068
|
@ -28,7 +28,12 @@ jobs:
|
|||
mavenOptions: '-Xmx2048m $(MAVEN_OPTS) -Dorg.slf4j.simpleLogger.showDateTime=true -Dorg.slf4j.simpleLogger.dateTimeFormat=HH:mm:ss,SSS -Duser.timezone=America/Toronto'
|
||||
- script: bash <(curl https://codecov.io/bash) -t $(CODECOV_TOKEN)
|
||||
displayName: 'codecov'
|
||||
|
||||
- task: PublishCodeCoverageResults@1
|
||||
inputs:
|
||||
codeCoverageTool: 'JaCoCo'
|
||||
summaryFileLocation: $(System.DefaultWorkingDirectory)/hapi-fhir-jacoco/target/site/jacoco-report/jacoco.xml
|
||||
reportDirectory: $(System.DefaultWorkingDirectory)/hapi-fhir-jacoco/target/site/jacoco-report/
|
||||
failIfCoverageEmpty: false
|
||||
|
||||
# Potential Additional Maven3 Options:
|
||||
#publishJUnitResults: true
|
||||
|
|
|
@ -1,64 +0,0 @@
|
|||
package ca.uhn.fhir.model.api;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR - Core Library
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2019 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.model.primitive.StringDt;
|
||||
import ca.uhn.fhir.util.ElementUtil;
|
||||
|
||||
public class BaseBundle extends BaseElement /*implements IElement*/ {
|
||||
|
||||
private static final long serialVersionUID = 3349586533271409727L;
|
||||
private StringDt myAuthorName;
|
||||
private StringDt myAuthorUri;
|
||||
private IdDt myId;
|
||||
|
||||
public StringDt getAuthorName() {
|
||||
if (myAuthorName == null) {
|
||||
myAuthorName = new StringDt();
|
||||
}
|
||||
return myAuthorName;
|
||||
}
|
||||
|
||||
public StringDt getAuthorUri() {
|
||||
if (myAuthorUri == null) {
|
||||
myAuthorUri = new StringDt();
|
||||
}
|
||||
return myAuthorUri;
|
||||
}
|
||||
|
||||
public IdDt getId() {
|
||||
if (myId==null) {
|
||||
myId=new IdDt();
|
||||
}
|
||||
return myId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return ElementUtil.isEmpty(myAuthorName, myAuthorUri);
|
||||
}
|
||||
|
||||
public void setId(IdDt theId) {
|
||||
myId = theId;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,38 +0,0 @@
|
|||
package ca.uhn.fhir.model.primitive;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR - Core Library
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2019 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.model.api.IElement;
|
||||
import ca.uhn.fhir.model.api.annotation.DatatypeDef;
|
||||
|
||||
@DatatypeDef(name = "idref")
|
||||
public class IdrefDt extends StringDt {
|
||||
|
||||
private IElement myTarget;
|
||||
|
||||
public IElement getTarget() {
|
||||
return myTarget;
|
||||
}
|
||||
|
||||
public void setTarget(IElement theTarget) {
|
||||
myTarget = theTarget;
|
||||
}
|
||||
}
|
|
@ -26,6 +26,7 @@ import java.util.*;
|
|||
|
||||
public class Constants {
|
||||
|
||||
public static final String CT_TEXT_CSV = "text/csv";
|
||||
public static final String HEADER_REQUEST_ID = "X-Request-ID";
|
||||
public static final String CACHE_CONTROL_MAX_RESULTS = "max-results";
|
||||
public static final String CACHE_CONTROL_NO_CACHE = "no-cache";
|
||||
|
|
|
@ -738,25 +738,15 @@ public class FhirTerser {
|
|||
valueType = (Class<? extends IBase>) valueType.getSuperclass();
|
||||
}
|
||||
|
||||
if (childElementDef == null) {
|
||||
StringBuilder b = new StringBuilder();
|
||||
b.append("Found value of type[");
|
||||
b.append(nextValue.getClass().getSimpleName());
|
||||
b.append("] which is not valid for field[");
|
||||
b.append(nextChild.getElementName());
|
||||
b.append("] in ");
|
||||
b.append(childDef.getName());
|
||||
b.append(" - Valid types: ");
|
||||
for (Iterator<String> iter = new TreeSet<>(nextChild.getValidChildNames()).iterator(); iter.hasNext(); ) {
|
||||
BaseRuntimeElementDefinition<?> childByName = nextChild.getChildByName(iter.next());
|
||||
b.append(childByName.getImplementingClass().getSimpleName());
|
||||
if (iter.hasNext()) {
|
||||
b.append(", ");
|
||||
}
|
||||
}
|
||||
throw new DataFormatException(b.toString());
|
||||
Class<? extends IBase> typeClass = nextValue.getClass();
|
||||
while (childElementDef == null && IBase.class.isAssignableFrom(typeClass)) {
|
||||
//noinspection unchecked
|
||||
typeClass = (Class<? extends IBase>) typeClass.getSuperclass();
|
||||
childElementDef = nextChild.getChildElementDefinitionByDatatype(typeClass);
|
||||
}
|
||||
|
||||
Validate.notNull(childElementDef, "Found value of type[%s] which is not valid for field[%s] in %s", nextValue.getClass(), nextChild.getElementName(), childDef.getName());
|
||||
|
||||
visit(nextValue, nextChild, childElementDef, theCallback, theContainingElementPath, theChildDefinitionPath, theElementDefinitionPath);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
package ca.uhn.fhir.rest.api;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
public class ConstantsTest {
|
||||
|
||||
@Test
|
||||
public void testConstants() {
|
||||
new Constants();
|
||||
}
|
||||
|
||||
}
|
|
@ -20,30 +20,46 @@ package ca.uhn.fhir.cli;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.TerminologyLoaderSvcImpl;
|
||||
import ca.uhn.fhir.rest.client.api.IGenericClient;
|
||||
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
|
||||
import ca.uhn.fhir.util.ParametersUtil;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
|
||||
public class UploadTerminologyCommand extends BaseCommand {
|
||||
public static final String UPLOAD_TERMINOLOGY = "upload-terminology";
|
||||
// TODO: Don't use qualified names for loggers in HAPI CLI.
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(UploadTerminologyCommand.class);
|
||||
private static final String UPLOAD_EXTERNAL_CODE_SYSTEM = "upload-external-code-system";
|
||||
|
||||
@Override
|
||||
public String getCommandDescription() {
|
||||
return "Uploads a terminology package (e.g. a SNOMED CT ZIP file) to a server, using the $" + UPLOAD_EXTERNAL_CODE_SYSTEM + " operation.";
|
||||
return "Uploads a terminology package (e.g. a SNOMED CT ZIP file or a custom terminology bundle) to a server, using the appropriate operation.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getCommandName() {
|
||||
return "upload-terminology";
|
||||
return UPLOAD_TERMINOLOGY;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -55,6 +71,7 @@ public class UploadTerminologyCommand extends BaseCommand {
|
|||
addRequiredOption(options, "u", "url", true, "The code system URL associated with this upload (e.g. " + IHapiTerminologyLoaderSvc.SCT_URI + ")");
|
||||
addOptionalOption(options, "d", "data", true, "Local file to use to upload (can be a raw file or a ZIP containing the raw file)");
|
||||
addOptionalOption(options, null, "custom", false, "Indicates that this upload uses the HAPI FHIR custom external terminology format");
|
||||
addOptionalOption(options, "m", "mode", true, "The upload mode: SNAPSHOT (default), ADD, REMOVE");
|
||||
addBasicAuthOption(options);
|
||||
addVerboseLoggingOption(options);
|
||||
|
||||
|
@ -65,6 +82,14 @@ public class UploadTerminologyCommand extends BaseCommand {
|
|||
public void run(CommandLine theCommandLine) throws ParseException {
|
||||
parseFhirContext(theCommandLine);
|
||||
|
||||
ModeEnum mode;
|
||||
String modeString = theCommandLine.getOptionValue("m", "SNAPSHOT");
|
||||
try {
|
||||
mode = ModeEnum.valueOf(modeString);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new ParseException("Invalid mode: " + modeString);
|
||||
}
|
||||
|
||||
String termUrl = theCommandLine.getOptionValue("u");
|
||||
if (isBlank(termUrl)) {
|
||||
throw new ParseException("No URL provided");
|
||||
|
@ -77,29 +102,118 @@ public class UploadTerminologyCommand extends BaseCommand {
|
|||
|
||||
IGenericClient client = super.newClient(theCommandLine);
|
||||
IBaseParameters inputParameters = ParametersUtil.newInstance(myFhirCtx);
|
||||
ParametersUtil.addParameterToParametersUri(myFhirCtx, inputParameters, "url", termUrl);
|
||||
for (String next : datafile) {
|
||||
ParametersUtil.addParameterToParametersString(myFhirCtx, inputParameters, "localfile", next);
|
||||
}
|
||||
if (theCommandLine.hasOption("custom")) {
|
||||
ParametersUtil.addParameterToParametersCode(myFhirCtx, inputParameters, "contentMode", "custom");
|
||||
}
|
||||
|
||||
if (theCommandLine.hasOption(VERBOSE_LOGGING_PARAM)) {
|
||||
client.registerInterceptor(new LoggingInterceptor(true));
|
||||
}
|
||||
|
||||
switch (mode) {
|
||||
case SNAPSHOT:
|
||||
uploadSnapshot(inputParameters, termUrl, datafile, theCommandLine, client);
|
||||
break;
|
||||
case ADD:
|
||||
uploadDelta(theCommandLine, termUrl, datafile, client, inputParameters, JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD, false);
|
||||
break;
|
||||
case REMOVE:
|
||||
uploadDelta(theCommandLine, termUrl, datafile, client, inputParameters, JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE, true);
|
||||
break;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void uploadDelta(CommandLine theCommandLine, String theTermUrl, String[] theDatafile, IGenericClient theClient, IBaseParameters theInputParameters, String theOperationName, boolean theFlatten) {
|
||||
ParametersUtil.addParameterToParametersUri(myFhirCtx, theInputParameters, "url", theTermUrl);
|
||||
|
||||
List<IHapiTerminologyLoaderSvc.FileDescriptor> fileDescriptors = new ArrayList<>();
|
||||
|
||||
for (String next : theDatafile) {
|
||||
try (FileInputStream inputStream = new FileInputStream(next)) {
|
||||
byte[] bytes = IOUtils.toByteArray(inputStream);
|
||||
fileDescriptors.add(new IHapiTerminologyLoaderSvc.FileDescriptor() {
|
||||
@Override
|
||||
public String getFilename() {
|
||||
return next;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputStream getInputStream() {
|
||||
return new ByteArrayInputStream(bytes);
|
||||
}
|
||||
});
|
||||
} catch (IOException e) {
|
||||
throw new CommandFailureException("Failed to read from file \"" + next + "\": " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion();
|
||||
TerminologyLoaderSvcImpl.LoadedFileDescriptors descriptors = new TerminologyLoaderSvcImpl.LoadedFileDescriptors(fileDescriptors);
|
||||
TerminologyLoaderSvcImpl.processCustomTerminologyFiles(descriptors, codeSystemVersion);
|
||||
|
||||
CodeSystem codeSystem = new CodeSystem();
|
||||
codeSystem.setUrl(theTermUrl);
|
||||
addCodesToCodeSystem(codeSystemVersion.getConcepts(), codeSystem.getConcept(), theFlatten);
|
||||
|
||||
ParametersUtil.addParameterToParameters(myFhirCtx, theInputParameters, "value", codeSystem);
|
||||
|
||||
if (theCommandLine.hasOption("custom")) {
|
||||
ParametersUtil.addParameterToParametersCode(myFhirCtx, theInputParameters, "contentMode", "custom");
|
||||
}
|
||||
|
||||
ourLog.info("Beginning upload - This may take a while...");
|
||||
|
||||
IBaseParameters response = client
|
||||
IBaseParameters response = theClient
|
||||
.operation()
|
||||
.onType(myFhirCtx.getResourceDefinition("CodeSystem").getImplementingClass())
|
||||
.named(UPLOAD_EXTERNAL_CODE_SYSTEM)
|
||||
.withParameters(inputParameters)
|
||||
.named(theOperationName)
|
||||
.withParameters(theInputParameters)
|
||||
.execute();
|
||||
|
||||
ourLog.info("Upload complete!");
|
||||
ourLog.info("Response:\n{}", myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(response));
|
||||
}
|
||||
|
||||
private void addCodesToCodeSystem(Collection<TermConcept> theSourceConcepts, List<CodeSystem.ConceptDefinitionComponent> theTargetConcept, boolean theFlatten) {
|
||||
for (TermConcept nextSourceConcept : theSourceConcepts) {
|
||||
|
||||
CodeSystem.ConceptDefinitionComponent nextTarget = new CodeSystem.ConceptDefinitionComponent();
|
||||
nextTarget.setCode(nextSourceConcept.getCode());
|
||||
nextTarget.setDisplay(nextSourceConcept.getDisplay());
|
||||
theTargetConcept.add(nextTarget);
|
||||
|
||||
List<TermConcept> children = nextSourceConcept.getChildren().stream().map(t -> t.getChild()).collect(Collectors.toList());
|
||||
if (theFlatten) {
|
||||
addCodesToCodeSystem(children, theTargetConcept, theFlatten);
|
||||
} else {
|
||||
addCodesToCodeSystem(children, nextTarget.getConcept(), theFlatten);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
private void uploadSnapshot(IBaseParameters theInputparameters, String theTermUrl, String[] theDatafile, CommandLine theCommandLine, IGenericClient theClient) {
|
||||
ParametersUtil.addParameterToParametersUri(myFhirCtx, theInputparameters, "url", theTermUrl);
|
||||
for (String next : theDatafile) {
|
||||
ParametersUtil.addParameterToParametersString(myFhirCtx, theInputparameters, "localfile", next);
|
||||
}
|
||||
if (theCommandLine.hasOption("custom")) {
|
||||
ParametersUtil.addParameterToParametersCode(myFhirCtx, theInputparameters, "contentMode", "custom");
|
||||
}
|
||||
|
||||
ourLog.info("Beginning upload - This may take a while...");
|
||||
|
||||
IBaseParameters response = theClient
|
||||
.operation()
|
||||
.onType(myFhirCtx.getResourceDefinition("CodeSystem").getImplementingClass())
|
||||
.named(JpaConstants.OPERATION_UPLOAD_EXTERNAL_CODE_SYSTEM)
|
||||
.withParameters(theInputparameters)
|
||||
.execute();
|
||||
|
||||
ourLog.info("Upload complete!");
|
||||
ourLog.info("Response:\n{}", myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(response));
|
||||
}
|
||||
|
||||
private enum ModeEnum {
|
||||
SNAPSHOT, ADD, REMOVE
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,198 @@
|
|||
package ca.uhn.fhir.cli;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
|
||||
import ca.uhn.fhir.rest.server.RestfulServer;
|
||||
import ca.uhn.fhir.test.utilities.JettyUtil;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.eclipse.jetty.server.Server;
|
||||
import org.eclipse.jetty.servlet.ServletHandler;
|
||||
import org.eclipse.jetty.servlet.ServletHolder;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Captor;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.MockitoJUnitRunner;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class UploadTerminologyCommandTest {
|
||||
|
||||
static {
|
||||
System.setProperty("test", "true");
|
||||
}
|
||||
|
||||
private Server myServer;
|
||||
private FhirContext myCtx = FhirContext.forR4();
|
||||
@Mock
|
||||
private IHapiTerminologyLoaderSvc myTerminologyLoaderSvc;
|
||||
@Mock
|
||||
private IHapiTerminologySvc myTerminologySvc;
|
||||
@Captor
|
||||
private ArgumentCaptor<List<IHapiTerminologyLoaderSvc.FileDescriptor>> myDescriptorList;
|
||||
@Captor
|
||||
private ArgumentCaptor<CodeSystem> myCodeSystemCaptor;
|
||||
|
||||
private int myPort;
|
||||
private String myConceptsFileName = "target/concepts.csv";
|
||||
private String myHierarchyFileName = "target/hierarchy.csv";
|
||||
private File myConceptsFile = new File(myConceptsFileName);
|
||||
private File myHierarchyFile = new File(myHierarchyFileName);
|
||||
|
||||
@Test
|
||||
public void testTerminologyUpload_AddDelta() throws IOException {
|
||||
|
||||
writeConceptAndHierarchyFiles();
|
||||
|
||||
when(myTerminologySvc.applyDeltaCodesystemsAdd(eq("http://foo"), any(), any())).thenReturn(new AtomicInteger(100));
|
||||
|
||||
App.main(new String[]{
|
||||
UploadTerminologyCommand.UPLOAD_TERMINOLOGY,
|
||||
"-v", "r4",
|
||||
"-m", "ADD",
|
||||
"-t", "http://localhost:" + myPort,
|
||||
"-u", "http://foo",
|
||||
"-d", myConceptsFileName,
|
||||
"-d", myHierarchyFileName
|
||||
});
|
||||
|
||||
verify(myTerminologySvc, times(1)).applyDeltaCodesystemsAdd(any(), isNull(), myCodeSystemCaptor.capture());
|
||||
|
||||
CodeSystem codeSystem = myCodeSystemCaptor.getValue();
|
||||
assertEquals(1, codeSystem.getConcept().size());
|
||||
assertEquals("http://foo", codeSystem.getUrl());
|
||||
assertEquals("ANIMALS", codeSystem.getConcept().get(0).getCode());
|
||||
assertEquals("Animals", codeSystem.getConcept().get(0).getDisplay());
|
||||
assertEquals(2, codeSystem.getConcept().get(0).getConcept().size());
|
||||
assertEquals("CATS", codeSystem.getConcept().get(0).getConcept().get(0).getCode());
|
||||
assertEquals("Cats", codeSystem.getConcept().get(0).getConcept().get(0).getDisplay());
|
||||
assertEquals("DOGS", codeSystem.getConcept().get(0).getConcept().get(1).getCode());
|
||||
assertEquals("Dogs", codeSystem.getConcept().get(0).getConcept().get(1).getDisplay());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTerminologyUpload_RemoveDelta() throws IOException {
|
||||
writeConceptAndHierarchyFiles();
|
||||
|
||||
when(myTerminologySvc.applyDeltaCodesystemsRemove(eq("http://foo"), any())).thenReturn(new AtomicInteger(100));
|
||||
|
||||
App.main(new String[]{
|
||||
UploadTerminologyCommand.UPLOAD_TERMINOLOGY,
|
||||
"-v", "r4",
|
||||
"-m", "REMOVE",
|
||||
"-t", "http://localhost:" + myPort,
|
||||
"-u", "http://foo",
|
||||
"-d", myConceptsFileName,
|
||||
"-d", myHierarchyFileName
|
||||
});
|
||||
|
||||
verify(myTerminologySvc, times(1)).applyDeltaCodesystemsRemove(any(), myCodeSystemCaptor.capture());
|
||||
|
||||
CodeSystem codeSystem = myCodeSystemCaptor.getValue();
|
||||
assertEquals(3, codeSystem.getConcept().size());
|
||||
assertEquals("http://foo", codeSystem.getUrl());
|
||||
assertEquals("ANIMALS", codeSystem.getConcept().get(0).getCode());
|
||||
assertEquals("Animals", codeSystem.getConcept().get(0).getDisplay());
|
||||
assertEquals("CATS", codeSystem.getConcept().get(1).getCode());
|
||||
assertEquals("Cats", codeSystem.getConcept().get(1).getDisplay());
|
||||
assertEquals("DOGS", codeSystem.getConcept().get(2).getCode());
|
||||
assertEquals("Dogs", codeSystem.getConcept().get(2).getDisplay());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTerminologyUpload_Snapshot() throws IOException {
|
||||
|
||||
writeConceptAndHierarchyFiles();
|
||||
|
||||
when(myTerminologyLoaderSvc.loadCustom(eq("http://foo"), any(), any())).thenReturn(new IHapiTerminologyLoaderSvc.UploadStatistics(100, new IdType("CodeSystem/123")));
|
||||
|
||||
App.main(new String[]{
|
||||
UploadTerminologyCommand.UPLOAD_TERMINOLOGY,
|
||||
"-v", "r4",
|
||||
"-m", "SNAPSHOT",
|
||||
"--custom",
|
||||
"-t", "http://localhost:" + myPort,
|
||||
"-u", "http://foo",
|
||||
"-d", myConceptsFileName,
|
||||
"-d", myHierarchyFileName
|
||||
});
|
||||
|
||||
verify(myTerminologyLoaderSvc, times(1)).loadCustom(any(), myDescriptorList.capture(), any());
|
||||
|
||||
List<IHapiTerminologyLoaderSvc.FileDescriptor> listOfDescriptors = myDescriptorList.getValue();
|
||||
assertEquals(2, listOfDescriptors.size());
|
||||
|
||||
assertThat(listOfDescriptors.get(0).getFilename(), Matchers.endsWith("concepts.csv"));
|
||||
assertInputStreamEqualsFile(myConceptsFile, listOfDescriptors.get(0).getInputStream());
|
||||
assertThat(listOfDescriptors.get(1).getFilename(), Matchers.endsWith("hierarchy.csv"));
|
||||
assertInputStreamEqualsFile(myHierarchyFile, listOfDescriptors.get(1).getInputStream());
|
||||
}
|
||||
|
||||
|
||||
private void writeConceptAndHierarchyFiles() throws IOException {
|
||||
try (FileWriter w = new FileWriter(myConceptsFile, false)) {
|
||||
w.append("CODE,DISPLAY\n");
|
||||
w.append("ANIMALS,Animals\n");
|
||||
w.append("CATS,Cats\n");
|
||||
w.append("DOGS,Dogs\n");
|
||||
}
|
||||
|
||||
try (FileWriter w = new FileWriter(myHierarchyFile, false)) {
|
||||
w.append("PARENT,CHILD\n");
|
||||
w.append("ANIMALS,CATS\n");
|
||||
w.append("ANIMALS,DOGS\n");
|
||||
}
|
||||
}
|
||||
|
||||
private void assertInputStreamEqualsFile(File theExpectedFile, InputStream theActualInputStream) throws IOException {
|
||||
try (FileInputStream fis = new FileInputStream(theExpectedFile)) {
|
||||
byte[] expectedBytes = IOUtils.toByteArray(fis);
|
||||
byte[] actualBytes = IOUtils.toByteArray(theActualInputStream);
|
||||
assertArrayEquals(expectedBytes, actualBytes);
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
public void after() throws Exception {
|
||||
JettyUtil.closeServer(myServer);
|
||||
|
||||
FileUtils.deleteQuietly(myConceptsFile);
|
||||
FileUtils.deleteQuietly(myHierarchyFile);
|
||||
}
|
||||
|
||||
@Before
|
||||
public void start() throws Exception {
|
||||
myServer = new Server(0);
|
||||
|
||||
TerminologyUploaderProvider provider = new TerminologyUploaderProvider(myCtx, myTerminologyLoaderSvc, myTerminologySvc);
|
||||
|
||||
ServletHandler proxyHandler = new ServletHandler();
|
||||
RestfulServer servlet = new RestfulServer(myCtx);
|
||||
servlet.registerProvider(provider);
|
||||
ServletHolder servletHolder = new ServletHolder(servlet);
|
||||
proxyHandler.addServletWithMapping(servletHolder, "/*");
|
||||
myServer.setHandler(proxyHandler);
|
||||
JettyUtil.startServer(myServer);
|
||||
myPort = JettyUtil.getPortForStartedServer(myServer);
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -145,14 +145,6 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4<ValueSet> imple
|
|||
return retVal;
|
||||
}
|
||||
|
||||
private void validateIncludes(String name, List<ConceptSetComponent> listToValidate) {
|
||||
for (ConceptSetComponent nextExclude : listToValidate) {
|
||||
if (isBlank(nextExclude.getSystem()) && !ElementUtil.isEmpty(nextExclude.getConcept(), nextExclude.getFilter())) {
|
||||
throw new InvalidRequestException("ValueSet contains " + name + " criteria with no system defined");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSet expandByIdentifier(String theUri, String theFilter) {
|
||||
if (isBlank(theUri)) {
|
||||
|
|
|
@ -151,14 +151,6 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5<ValueSet> imple
|
|||
return retVal;
|
||||
}
|
||||
|
||||
private void validateIncludes(String name, List<ConceptSetComponent> listToValidate) {
|
||||
for (ConceptSetComponent nextExclude : listToValidate) {
|
||||
if (isBlank(nextExclude.getSystem()) && !ElementUtil.isEmpty(nextExclude.getConcept(), nextExclude.getFilter())) {
|
||||
throw new InvalidRequestException("ValueSet contains " + name + " criteria with no system defined");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSet expandByIdentifier(String theUri, String theFilter) {
|
||||
if (isBlank(theUri)) {
|
||||
|
|
|
@ -365,7 +365,10 @@ public class TermConcept implements Serializable {
|
|||
|
||||
@Override
|
||||
public String toString() {
|
||||
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE).append("code", myCode).append("display", myDisplay).build();
|
||||
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
|
||||
.append("code", myCode)
|
||||
.append("display", myDisplay)
|
||||
.build();
|
||||
}
|
||||
|
||||
public List<IContextValidationSupport.BaseConceptProperty> toValidationProperties() {
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.provider;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2019 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
|
||||
/**
|
||||
* @deprecated TerminologyUploaderProvider
|
||||
*/
|
||||
@Deprecated
|
||||
public class BaseTerminologyUploaderProvider {
|
||||
|
||||
// FIXME: remove these before 4.0.0
|
||||
public static final String UPLOAD_EXTERNAL_CODE_SYSTEM = JpaConstants.OPERATION_UPLOAD_EXTERNAL_CODE_SYSTEM;
|
||||
public static final String CONCEPT_COUNT = "conceptCount";
|
||||
public static final String TARGET = "target";
|
||||
public static final String SYSTEM = "system";
|
||||
public static final String PARENT_CODE = "parentCode";
|
||||
public static final String VALUE = "value";
|
||||
|
||||
}
|
|
@ -21,18 +21,17 @@ package ca.uhn.fhir.jpa.provider;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.subscription.ISubscriptionTriggeringSvc;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.subscription.ISubscriptionTriggeringSvc;
|
||||
import ca.uhn.fhir.model.dstu2.valueset.ResourceTypeEnum;
|
||||
import ca.uhn.fhir.rest.annotation.IdParam;
|
||||
import ca.uhn.fhir.rest.annotation.Operation;
|
||||
import ca.uhn.fhir.rest.annotation.OperationParam;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.param.UriParam;
|
||||
import ca.uhn.fhir.rest.server.IResourceProvider;
|
||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.List;
|
||||
|
@ -48,8 +47,8 @@ public class SubscriptionTriggeringProvider implements IResourceProvider {
|
|||
|
||||
@Operation(name = JpaConstants.OPERATION_TRIGGER_SUBSCRIPTION)
|
||||
public IBaseParameters triggerSubscription(
|
||||
@OperationParam(name = RESOURCE_ID, min = 0, max = OperationParam.MAX_UNLIMITED) List<UriParam> theResourceIds,
|
||||
@OperationParam(name = SEARCH_URL, min = 0, max = OperationParam.MAX_UNLIMITED) List<StringParam> theSearchUrls
|
||||
@OperationParam(name = RESOURCE_ID, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "uri") List<IPrimitiveType<String>> theResourceIds,
|
||||
@OperationParam(name = SEARCH_URL, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theSearchUrls
|
||||
) {
|
||||
return mySubscriptionTriggeringSvc.triggerSubscription(theResourceIds, theSearchUrls, null);
|
||||
}
|
||||
|
@ -57,8 +56,8 @@ public class SubscriptionTriggeringProvider implements IResourceProvider {
|
|||
@Operation(name = JpaConstants.OPERATION_TRIGGER_SUBSCRIPTION)
|
||||
public IBaseParameters triggerSubscription(
|
||||
@IdParam IIdType theSubscriptionId,
|
||||
@OperationParam(name = RESOURCE_ID, min = 0, max = OperationParam.MAX_UNLIMITED) List<UriParam> theResourceIds,
|
||||
@OperationParam(name = SEARCH_URL, min = 0, max = OperationParam.MAX_UNLIMITED) List<StringParam> theSearchUrls
|
||||
@OperationParam(name = RESOURCE_ID, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "uri") List<IPrimitiveType<String>> theResourceIds,
|
||||
@OperationParam(name = SEARCH_URL, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theSearchUrls
|
||||
) {
|
||||
return mySubscriptionTriggeringSvc.triggerSubscription(theResourceIds, theSearchUrls, theSubscriptionId);
|
||||
}
|
||||
|
|
|
@ -54,9 +54,10 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
|
|||
|
||||
public static final String CONCEPT_COUNT = "conceptCount";
|
||||
public static final String TARGET = "target";
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyUploaderProvider.class);
|
||||
public static final String PARENT_CODE = "parentCode";
|
||||
public static final String VALUE = "value";
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyUploaderProvider.class);
|
||||
private static final String PACKAGE = "package";
|
||||
|
||||
@Autowired
|
||||
private FhirContext myCtx;
|
||||
|
@ -87,12 +88,12 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
|
|||
* $apply-codesystem-delta-add
|
||||
* </code>
|
||||
*/
|
||||
@Operation(typeName="CodeSystem", name = JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD, idempotent = false, returnParameters = {
|
||||
@Operation(typeName = "CodeSystem", name = JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD, idempotent = false, returnParameters = {
|
||||
})
|
||||
public IBaseParameters applyCodeSystemDeltaAdd(
|
||||
HttpServletRequest theServletRequest,
|
||||
@OperationParam(name = PARENT_CODE, min = 0, max = 1) IPrimitiveType<String> theParentCode,
|
||||
@OperationParam(name = VALUE, min = 1, max = 1) IBaseResource theValue,
|
||||
@OperationParam(name = VALUE, min = 0, max = 1) IBaseResource theValue,
|
||||
RequestDetails theRequestDetails
|
||||
) {
|
||||
|
||||
|
@ -104,6 +105,8 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
|
|||
value = (CodeSystem) theValue;
|
||||
} else if (theValue instanceof org.hl7.fhir.dstu3.model.CodeSystem) {
|
||||
value = VersionConvertor_30_40.convertCodeSystem((org.hl7.fhir.dstu3.model.CodeSystem) theValue);
|
||||
} else if (theValue instanceof org.hl7.fhir.r5.model.CodeSystem) {
|
||||
value = org.hl7.fhir.convertors.conv40_50.CodeSystem.convertCodeSystem((org.hl7.fhir.r5.model.CodeSystem) theValue);
|
||||
} else {
|
||||
throw new InvalidRequestException("Value must be present and be a CodeSystem");
|
||||
}
|
||||
|
@ -130,7 +133,7 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
|
|||
* $apply-codesystem-delta-remove
|
||||
* </code>
|
||||
*/
|
||||
@Operation(typeName="CodeSystem", name = JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE, idempotent = false, returnParameters = {
|
||||
@Operation(typeName = "CodeSystem", name = JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE, idempotent = false, returnParameters = {
|
||||
})
|
||||
public IBaseParameters applyCodeSystemDeltaRemove(
|
||||
HttpServletRequest theServletRequest,
|
||||
|
@ -146,6 +149,8 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
|
|||
value = (CodeSystem) theValue;
|
||||
} else if (theValue instanceof org.hl7.fhir.dstu3.model.CodeSystem) {
|
||||
value = VersionConvertor_30_40.convertCodeSystem((org.hl7.fhir.dstu3.model.CodeSystem) theValue);
|
||||
} else if (theValue instanceof org.hl7.fhir.r5.model.CodeSystem) {
|
||||
value = org.hl7.fhir.convertors.conv40_50.CodeSystem.convertCodeSystem((org.hl7.fhir.r5.model.CodeSystem) theValue);
|
||||
} else {
|
||||
throw new InvalidRequestException("Value must be present and be a CodeSystem");
|
||||
}
|
||||
|
@ -165,13 +170,12 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
|
|||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* <code>
|
||||
* $upload-external-codesystem
|
||||
* </code>
|
||||
*/
|
||||
@Operation(typeName="CodeSystem", name = JpaConstants.OPERATION_UPLOAD_EXTERNAL_CODE_SYSTEM, idempotent = false, returnParameters = {
|
||||
@Operation(typeName = "CodeSystem", name = JpaConstants.OPERATION_UPLOAD_EXTERNAL_CODE_SYSTEM, idempotent = false, returnParameters = {
|
||||
// @OperationParam(name = "conceptCount", type = IntegerType.class, min = 1)
|
||||
})
|
||||
public IBaseParameters uploadExternalCodeSystem(
|
||||
|
@ -179,7 +183,7 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
|
|||
@OperationParam(name = "url", min = 1, typeName = "uri") IPrimitiveType<String> theCodeSystemUrl,
|
||||
@OperationParam(name = "contentMode", min = 0, typeName = "code") IPrimitiveType<String> theContentMode,
|
||||
@OperationParam(name = "localfile", min = 1, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theLocalFile,
|
||||
@OperationParam(name = "package", min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") List<ICompositeType> thePackage,
|
||||
@OperationParam(name = PACKAGE, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") List<ICompositeType> thePackage,
|
||||
RequestDetails theRequestDetails
|
||||
) {
|
||||
|
||||
|
|
|
@ -1,47 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.provider.dstu3;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2019 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider;
|
||||
import ca.uhn.fhir.rest.annotation.Operation;
|
||||
import ca.uhn.fhir.rest.annotation.OperationParam;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import org.hl7.fhir.convertors.VersionConvertor_30_40;
|
||||
import org.hl7.fhir.dstu3.model.Attachment;
|
||||
import org.hl7.fhir.dstu3.model.IntegerType;
|
||||
import org.hl7.fhir.dstu3.model.Parameters;
|
||||
import org.hl7.fhir.dstu3.model.StringType;
|
||||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link TerminologyUploaderProvider} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public class TerminologyUploaderProviderDstu3 extends TerminologyUploaderProvider {
|
||||
// nothing
|
||||
}
|
|
@ -1,42 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.provider.r4;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2019 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider;
|
||||
import ca.uhn.fhir.rest.annotation.Operation;
|
||||
import ca.uhn.fhir.rest.annotation.OperationParam;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import org.hl7.fhir.r4.model.Attachment;
|
||||
import org.hl7.fhir.r4.model.IntegerType;
|
||||
import org.hl7.fhir.r4.model.Parameters;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link TerminologyUploaderProvider} instead
|
||||
*/
|
||||
public class TerminologyUploaderProviderR4 extends TerminologyUploaderProvider {
|
||||
// nothing
|
||||
}
|
|
@ -58,7 +58,6 @@ public class JpaConformanceProviderR5 extends org.hl7.fhir.r5.hapi.rest.server.S
|
|||
@CoverageIgnore
|
||||
public JpaConformanceProviderR5(){
|
||||
super();
|
||||
super.setCache(false);
|
||||
setIncludeResourceCounts(true);
|
||||
}
|
||||
|
||||
|
@ -66,11 +65,10 @@ public class JpaConformanceProviderR5 extends org.hl7.fhir.r5.hapi.rest.server.S
|
|||
* Constructor
|
||||
*/
|
||||
public JpaConformanceProviderR5(RestfulServer theRestfulServer, IFhirSystemDao<Bundle, Meta> theSystemDao, DaoConfig theDaoConfig) {
|
||||
super(theRestfulServer);
|
||||
super();
|
||||
myRestfulServer = theRestfulServer;
|
||||
mySystemDao = theSystemDao;
|
||||
myDaoConfig = theDaoConfig;
|
||||
super.setCache(false);
|
||||
setIncludeResourceCounts(true);
|
||||
setSearchParamRegistry(theSystemDao.getSearchParamRegistry());
|
||||
}
|
||||
|
|
|
@ -21,15 +21,15 @@ package ca.uhn.fhir.jpa.subscription;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.rest.annotation.IdParam;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.param.UriParam;
|
||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public interface ISubscriptionTriggeringSvc {
|
||||
IBaseParameters triggerSubscription(List<UriParam> theResourceIds, List<StringParam> theSearchUrls, @IdParam IIdType theSubscriptionId);
|
||||
|
||||
IBaseParameters triggerSubscription(List<IPrimitiveType<String>> theResourceIds, List<IPrimitiveType<String>> theSearchUrls, @IdParam IIdType theSubscriptionId);
|
||||
|
||||
void runDeliveryPass();
|
||||
}
|
||||
|
|
|
@ -100,7 +100,8 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc
|
|||
private ISchedulerService mySchedulerService;
|
||||
|
||||
@Override
|
||||
public IBaseParameters triggerSubscription(List<UriParam> theResourceIds, List<StringParam> theSearchUrls, @IdParam IIdType theSubscriptionId) {
|
||||
public IBaseParameters triggerSubscription(List<IPrimitiveType<String>> theResourceIds, List<IPrimitiveType<String>> theSearchUrls, @IdParam IIdType theSubscriptionId) {
|
||||
|
||||
if (myDaoConfig.getSupportedSubscriptionTypes().isEmpty()) {
|
||||
throw new PreconditionFailedException("Subscription processing not active on this server");
|
||||
}
|
||||
|
@ -115,8 +116,8 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc
|
|||
subscriptionDao.read(subscriptionId);
|
||||
}
|
||||
|
||||
List<UriParam> resourceIds = ObjectUtils.defaultIfNull(theResourceIds, Collections.emptyList());
|
||||
List<StringParam> searchUrls = ObjectUtils.defaultIfNull(theSearchUrls, Collections.emptyList());
|
||||
List<IPrimitiveType<String>> resourceIds = ObjectUtils.defaultIfNull(theResourceIds, Collections.emptyList());
|
||||
List<IPrimitiveType<String>> searchUrls = ObjectUtils.defaultIfNull(theSearchUrls, Collections.emptyList());
|
||||
|
||||
// Make sure we have at least one resource ID or search URL
|
||||
if (resourceIds.size() == 0 && searchUrls.size() == 0) {
|
||||
|
@ -124,14 +125,14 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc
|
|||
}
|
||||
|
||||
// Resource URLs must be compete
|
||||
for (UriParam next : resourceIds) {
|
||||
for (IPrimitiveType<String> next : resourceIds) {
|
||||
IdType resourceId = new IdType(next.getValue());
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(resourceId.hasResourceType(), RESOURCE_ID + " parameter must have resource type");
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(resourceId.hasIdPart(), RESOURCE_ID + " parameter must have resource ID part");
|
||||
}
|
||||
|
||||
// Search URLs must be valid
|
||||
for (StringParam next : searchUrls) {
|
||||
for (IPrimitiveType<String> next : searchUrls) {
|
||||
if (!next.getValue().contains("?")) {
|
||||
throw new InvalidRequestException("Search URL is not valid (must be in the form \"[resource type]?[optional params]\")");
|
||||
}
|
||||
|
@ -139,8 +140,8 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc
|
|||
|
||||
SubscriptionTriggeringJobDetails jobDetails = new SubscriptionTriggeringJobDetails();
|
||||
jobDetails.setJobId(UUID.randomUUID().toString());
|
||||
jobDetails.setRemainingResourceIds(resourceIds.stream().map(UriParam::getValue).collect(Collectors.toList()));
|
||||
jobDetails.setRemainingSearchUrls(searchUrls.stream().map(StringParam::getValue).collect(Collectors.toList()));
|
||||
jobDetails.setRemainingResourceIds(resourceIds.stream().map(t->t.getValue()).collect(Collectors.toList()));
|
||||
jobDetails.setRemainingSearchUrls(searchUrls.stream().map(t->t.getValue()).collect(Collectors.toList()));
|
||||
if (theSubscriptionId != null) {
|
||||
jobDetails.setSubscriptionId(theSubscriptionId.getIdPart());
|
||||
}
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.hl7.fhir.r4.model.Enumerations;
|
|||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.validation.constraints.NotNull;
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
|
@ -81,12 +82,10 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
|
||||
private static final int LOG_INCREMENT = 1000;
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcImpl.class);
|
||||
|
||||
@Autowired
|
||||
private IHapiTerminologySvc myTermSvc;
|
||||
|
||||
// FYI: Hardcoded to R4 because that's what the term svc uses internally
|
||||
private final FhirContext myCtx = FhirContext.forR4();
|
||||
@Autowired
|
||||
private IHapiTerminologySvc myTermSvc;
|
||||
|
||||
private void dropCircularRefs(TermConcept theConcept, ArrayList<String> theChain, Map<String, TermConcept> theCode2concept, Counter theCircularCounter) {
|
||||
|
||||
|
@ -121,69 +120,6 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
|
||||
}
|
||||
|
||||
private void iterateOverZipFile(LoadedFileDescriptors theDescriptors, String theFileNamePart, IRecordHandler theHandler, char theDelimiter, QuoteMode theQuoteMode, boolean theIsPartialFilename) {
|
||||
|
||||
boolean foundMatch = false;
|
||||
for (FileDescriptor nextZipBytes : theDescriptors.getUncompressedFileDescriptors()) {
|
||||
String nextFilename = nextZipBytes.getFilename();
|
||||
boolean matches;
|
||||
if (theIsPartialFilename) {
|
||||
matches = nextFilename.contains(theFileNamePart);
|
||||
} else {
|
||||
matches = nextFilename.endsWith("/" + theFileNamePart) || nextFilename.equals(theFileNamePart);
|
||||
}
|
||||
|
||||
if (matches) {
|
||||
ourLog.info("Processing file {}", nextFilename);
|
||||
foundMatch = true;
|
||||
|
||||
Reader reader;
|
||||
CSVParser parsed;
|
||||
try {
|
||||
reader = new InputStreamReader(nextZipBytes.getInputStream(), Charsets.UTF_8);
|
||||
|
||||
if (ourLog.isTraceEnabled()) {
|
||||
String contents = IOUtils.toString(reader);
|
||||
ourLog.info("File contents for: {}\n{}", nextFilename, contents);
|
||||
reader = new StringReader(contents);
|
||||
}
|
||||
|
||||
CSVFormat format = CSVFormat.newFormat(theDelimiter).withFirstRecordAsHeader();
|
||||
if (theQuoteMode != null) {
|
||||
format = format.withQuote('"').withQuoteMode(theQuoteMode);
|
||||
}
|
||||
parsed = new CSVParser(reader, format);
|
||||
Iterator<CSVRecord> iter = parsed.iterator();
|
||||
ourLog.debug("Header map: {}", parsed.getHeaderMap());
|
||||
|
||||
int count = 0;
|
||||
int nextLoggedCount = 0;
|
||||
while (iter.hasNext()) {
|
||||
CSVRecord nextRecord = iter.next();
|
||||
if (nextRecord.isConsistent() == false) {
|
||||
continue;
|
||||
}
|
||||
theHandler.accept(nextRecord);
|
||||
count++;
|
||||
if (count >= nextLoggedCount) {
|
||||
ourLog.info(" * Processed {} records in {}", count, nextFilename);
|
||||
nextLoggedCount += LOG_INCREMENT;
|
||||
}
|
||||
}
|
||||
|
||||
} catch (IOException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (!foundMatch) {
|
||||
throw new InvalidRequestException("Did not find file matching " + theFileNamePart);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public UploadStatistics loadImgthla(List<FileDescriptor> theFiles, RequestDetails theRequestDetails) {
|
||||
LoadedFileDescriptors descriptors = null;
|
||||
|
@ -280,7 +216,6 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
@Override
|
||||
public UploadStatistics loadCustom(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails) {
|
||||
try (LoadedFileDescriptors descriptors = new LoadedFileDescriptors(theFiles)) {
|
||||
final Map<String, TermConcept> code2concept = new HashMap<>();
|
||||
IRecordHandler handler;
|
||||
|
||||
Optional<String> codeSystemContent = loadFile(descriptors, CUSTOM_CODESYSTEM_JSON, CUSTOM_CODESYSTEM_XML);
|
||||
|
@ -299,23 +234,7 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
}
|
||||
|
||||
TermCodeSystemVersion csv = new TermCodeSystemVersion();
|
||||
|
||||
// Concept File
|
||||
handler = new ConceptHandler(code2concept, csv);
|
||||
iterateOverZipFile(descriptors, CUSTOM_CONCEPTS_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Hierarchy
|
||||
if (descriptors.hasFile(CUSTOM_HIERARCHY_FILE)) {
|
||||
handler = new HierarchyHandler(code2concept);
|
||||
iterateOverZipFile(descriptors, CUSTOM_HIERARCHY_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
}
|
||||
|
||||
// Add root concepts to CodeSystemVersion
|
||||
for (TermConcept nextConcept : code2concept.values()) {
|
||||
if (nextConcept.getParents().isEmpty()) {
|
||||
csv.getConcepts().add(nextConcept);
|
||||
}
|
||||
}
|
||||
final Map<String, TermConcept> code2concept = processCustomTerminologyFiles(descriptors, csv);
|
||||
|
||||
IIdType target = storeCodeSystem(theRequestDetails, csv, codeSystem, null, null);
|
||||
return new UploadStatistics(code2concept.size(), target);
|
||||
|
@ -383,12 +302,6 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
try {
|
||||
reader = new InputStreamReader(nextZipBytes.getInputStream(), Charsets.UTF_8);
|
||||
|
||||
if (ourLog.isTraceEnabled()) {
|
||||
String contents = IOUtils.toString(reader);
|
||||
ourLog.info("File contents for: {}\n{}", nextFilename, contents);
|
||||
reader = new StringReader(contents);
|
||||
}
|
||||
|
||||
LineNumberReader lnr = new LineNumberReader(reader);
|
||||
while (lnr.readLine() != null) {
|
||||
}
|
||||
|
@ -414,12 +327,6 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
try {
|
||||
reader = new InputStreamReader(nextZipBytes.getInputStream(), Charsets.UTF_8);
|
||||
|
||||
if (ourLog.isTraceEnabled()) {
|
||||
String contents = IOUtils.toString(reader);
|
||||
ourLog.info("File contents for: {}\n{}", nextFilename, contents);
|
||||
reader = new StringReader(contents);
|
||||
}
|
||||
|
||||
LineNumberReader lnr = new LineNumberReader(reader);
|
||||
while (lnr.readLine() != null) {
|
||||
}
|
||||
|
@ -666,12 +573,12 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
static class LoadedFileDescriptors implements Closeable {
|
||||
public static class LoadedFileDescriptors implements Closeable {
|
||||
|
||||
private List<File> myTemporaryFiles = new ArrayList<>();
|
||||
private List<IHapiTerminologyLoaderSvc.FileDescriptor> myUncompressedFileDescriptors = new ArrayList<>();
|
||||
|
||||
LoadedFileDescriptors(List<IHapiTerminologyLoaderSvc.FileDescriptor> theFileDescriptors) {
|
||||
public LoadedFileDescriptors(List<IHapiTerminologyLoaderSvc.FileDescriptor> theFileDescriptors) {
|
||||
try {
|
||||
for (FileDescriptor next : theFileDescriptors) {
|
||||
if (next.getFilename().toLowerCase().endsWith(".zip")) {
|
||||
|
@ -765,6 +672,92 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static Map<String, TermConcept> processCustomTerminologyFiles(LoadedFileDescriptors theDescriptors, TermCodeSystemVersion theCsv) {
|
||||
IRecordHandler handler;// Concept File
|
||||
final Map<String, TermConcept> code2concept = new HashMap<>();
|
||||
handler = new ConceptHandler(code2concept, theCsv);
|
||||
iterateOverZipFile(theDescriptors, CUSTOM_CONCEPTS_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Hierarchy
|
||||
if (theDescriptors.hasFile(CUSTOM_HIERARCHY_FILE)) {
|
||||
handler = new HierarchyHandler(code2concept);
|
||||
iterateOverZipFile(theDescriptors, CUSTOM_HIERARCHY_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
}
|
||||
|
||||
// Add root concepts to CodeSystemVersion
|
||||
for (TermConcept nextConcept : code2concept.values()) {
|
||||
if (nextConcept.getParents().isEmpty()) {
|
||||
theCsv.getConcepts().add(nextConcept);
|
||||
}
|
||||
}
|
||||
return code2concept;
|
||||
}
|
||||
|
||||
private static void iterateOverZipFile(LoadedFileDescriptors theDescriptors, String theFileNamePart, IRecordHandler theHandler, char theDelimiter, QuoteMode theQuoteMode, boolean theIsPartialFilename) {
|
||||
|
||||
boolean foundMatch = false;
|
||||
for (FileDescriptor nextZipBytes : theDescriptors.getUncompressedFileDescriptors()) {
|
||||
String nextFilename = nextZipBytes.getFilename();
|
||||
boolean matches;
|
||||
if (theIsPartialFilename) {
|
||||
matches = nextFilename.contains(theFileNamePart);
|
||||
} else {
|
||||
matches = nextFilename.endsWith("/" + theFileNamePart) || nextFilename.equals(theFileNamePart);
|
||||
}
|
||||
|
||||
if (matches) {
|
||||
ourLog.info("Processing file {}", nextFilename);
|
||||
foundMatch = true;
|
||||
|
||||
Reader reader;
|
||||
CSVParser parsed;
|
||||
try {
|
||||
reader = new InputStreamReader(nextZipBytes.getInputStream(), Charsets.UTF_8);
|
||||
|
||||
parsed = newCsvRecords(theDelimiter, theQuoteMode, reader);
|
||||
Iterator<CSVRecord> iter = parsed.iterator();
|
||||
ourLog.debug("Header map: {}", parsed.getHeaderMap());
|
||||
|
||||
int count = 0;
|
||||
int nextLoggedCount = 0;
|
||||
while (iter.hasNext()) {
|
||||
CSVRecord nextRecord = iter.next();
|
||||
if (nextRecord.isConsistent() == false) {
|
||||
continue;
|
||||
}
|
||||
theHandler.accept(nextRecord);
|
||||
count++;
|
||||
if (count >= nextLoggedCount) {
|
||||
ourLog.info(" * Processed {} records in {}", count, nextFilename);
|
||||
nextLoggedCount += LOG_INCREMENT;
|
||||
}
|
||||
}
|
||||
|
||||
} catch (IOException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (!foundMatch) {
|
||||
throw new InvalidRequestException("Did not find file matching " + theFileNamePart);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static CSVParser newCsvRecords(char theDelimiter, QuoteMode theQuoteMode, Reader theReader) throws IOException {
|
||||
CSVParser parsed;
|
||||
CSVFormat format = CSVFormat.newFormat(theDelimiter).withFirstRecordAsHeader();
|
||||
if (theQuoteMode != null) {
|
||||
format = format.withQuote('"').withQuoteMode(theQuoteMode);
|
||||
}
|
||||
parsed = new CSVParser(theReader, format);
|
||||
return parsed;
|
||||
}
|
||||
|
||||
public static String firstNonBlank(String... theStrings) {
|
||||
String retVal = "";
|
||||
for (String nextString : theStrings) {
|
||||
|
|
|
@ -37,6 +37,8 @@ import static org.apache.commons.lang3.StringUtils.trim;
|
|||
public class ConceptHandler implements IRecordHandler {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ConceptHandler.class);
|
||||
public static final String CODE = "CODE";
|
||||
public static final String DISPLAY = "DISPLAY";
|
||||
private final Map<String, TermConcept> myCode2Concept;
|
||||
private final TermCodeSystemVersion myCodeSystemVersion;
|
||||
|
||||
|
@ -47,9 +49,9 @@ public class ConceptHandler implements IRecordHandler {
|
|||
|
||||
@Override
|
||||
public void accept(CSVRecord theRecord) {
|
||||
String code = trim(theRecord.get("CODE"));
|
||||
String code = trim(theRecord.get(CODE));
|
||||
if (isNotBlank(code)) {
|
||||
String display = trim(theRecord.get("DISPLAY"));
|
||||
String display = trim(theRecord.get(DISPLAY));
|
||||
|
||||
Validate.isTrue(!myCode2Concept.containsKey(code), "The code %s has appeared more than once", code);
|
||||
|
||||
|
|
|
@ -493,6 +493,12 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
* 20 should be prefetched since that's the initial page size
|
||||
*/
|
||||
|
||||
await().until(()->{
|
||||
return runInTransaction(()->{
|
||||
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
|
||||
return search.getNumFound() >= 50;
|
||||
});
|
||||
});
|
||||
runInTransaction(() -> {
|
||||
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
|
||||
assertEquals(50, search.getNumFound());
|
||||
|
|
|
@ -47,93 +47,6 @@ public class ResourceProviderR4CodeSystemTest extends BaseResourceProviderR4Test
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testApplyDeltaAdd() {
|
||||
|
||||
CodeSystem delta = new CodeSystem();
|
||||
delta.setUrl("http://example.com/labCodes");
|
||||
delta.setName("Example Hospital Lab Codes");
|
||||
delta.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
delta.setContent(CodeSystem.CodeSystemContentMode.NOTPRESENT);
|
||||
delta.setUrl("http://foo");
|
||||
CodeSystem.ConceptDefinitionComponent chem = delta
|
||||
.addConcept()
|
||||
.setCode("CHEM")
|
||||
.setDisplay("Chemistry Tests");
|
||||
chem
|
||||
.addConcept()
|
||||
.setCode("HB")
|
||||
.setDisplay("Hemoglobin");
|
||||
chem
|
||||
.addConcept()
|
||||
.setCode("NEUT")
|
||||
.setDisplay("Neutrophil");
|
||||
CodeSystem.ConceptDefinitionComponent micro = delta
|
||||
.addConcept()
|
||||
.setCode("MICRO")
|
||||
.setDisplay("Microbiology Tests");
|
||||
micro
|
||||
.addConcept()
|
||||
.setCode("C&S")
|
||||
.setDisplay("Culture & Sensitivity");
|
||||
|
||||
LoggingInterceptor interceptor = new LoggingInterceptor(true);
|
||||
ourClient.registerInterceptor(interceptor);
|
||||
Parameters outcome = ourClient
|
||||
.operation()
|
||||
.onType(CodeSystem.class)
|
||||
.named(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD)
|
||||
.withParameter(Parameters.class, TerminologyUploaderProvider.VALUE, delta)
|
||||
.prettyPrint()
|
||||
.execute();
|
||||
ourClient.unregisterInterceptor(interceptor);
|
||||
|
||||
String encoded = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome);
|
||||
ourLog.info(encoded);
|
||||
assertThat(encoded, containsString("\"valueInteger\": 5"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testApplyDeltaRemove() {
|
||||
// Create not-present
|
||||
CodeSystem cs = new CodeSystem();
|
||||
cs.setUrl("http://foo");
|
||||
cs.setContent(CodeSystem.CodeSystemContentMode.NOTPRESENT);
|
||||
ourClient.create().resource(cs).execute();
|
||||
|
||||
CodeSystem delta = new CodeSystem();
|
||||
delta.setUrl("http://foo");
|
||||
delta
|
||||
.addConcept()
|
||||
.setCode("codeA")
|
||||
.setDisplay("displayA");
|
||||
|
||||
// Add
|
||||
ourClient
|
||||
.operation()
|
||||
.onType(CodeSystem.class)
|
||||
.named(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD)
|
||||
.withParameter(Parameters.class, TerminologyUploaderProvider.VALUE, delta)
|
||||
.prettyPrint()
|
||||
.execute();
|
||||
|
||||
// Remove
|
||||
LoggingInterceptor interceptor = new LoggingInterceptor(true);
|
||||
ourClient.registerInterceptor(interceptor);
|
||||
Parameters outcome = ourClient
|
||||
.operation()
|
||||
.onType(CodeSystem.class)
|
||||
.named(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE)
|
||||
.withParameter(Parameters.class, TerminologyUploaderProvider.VALUE, delta)
|
||||
.prettyPrint()
|
||||
.execute();
|
||||
ourClient.unregisterInterceptor(interceptor);
|
||||
|
||||
String encoded = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome);
|
||||
ourLog.info(encoded);
|
||||
assertThat(encoded, containsString("\"valueInteger\": 1"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLookupOnExternalCode() {
|
||||
ResourceProviderR4ValueSetTest.createExternalCs(myCodeSystemDao, myResourceTableDao, myTermSvc, mySrd);
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
package ca.uhn.fhir.jpa.provider.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider;
|
||||
import ca.uhn.fhir.jpa.provider.dstu3.TerminologyUploaderProviderDstu3Test;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
|
@ -18,6 +21,7 @@ import java.util.List;
|
|||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipOutputStream;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.matchesPattern;
|
||||
import static org.junit.Assert.*;
|
||||
|
@ -45,7 +49,6 @@ public class TerminologyUploaderProviderR4Test extends BaseResourceProviderR4Tes
|
|||
public void testUploadInvalidUrl() throws Exception {
|
||||
byte[] packageBytes = createSctZip();
|
||||
|
||||
//@formatter:off
|
||||
try {
|
||||
ourClient
|
||||
.operation()
|
||||
|
@ -58,7 +61,6 @@ public class TerminologyUploaderProviderR4Test extends BaseResourceProviderR4Tes
|
|||
} catch (InvalidRequestException e) {
|
||||
assertEquals("HTTP 400 Bad Request: Unknown URL: http://snomed.info/sctFOO", e.getMessage());
|
||||
}
|
||||
//@formatter:on
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -172,6 +174,93 @@ public class TerminologyUploaderProviderR4Test extends BaseResourceProviderR4Tes
|
|||
assertThat(((IntegerType) respParam.getParameter().get(1).getValue()).getValue(), greaterThan(1));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testApplyDeltaAdd() {
|
||||
|
||||
CodeSystem delta = new CodeSystem();
|
||||
delta.setUrl("http://example.com/labCodes");
|
||||
delta.setName("Example Hospital Lab Codes");
|
||||
delta.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
delta.setContent(CodeSystem.CodeSystemContentMode.NOTPRESENT);
|
||||
delta.setUrl("http://foo");
|
||||
CodeSystem.ConceptDefinitionComponent chem = delta
|
||||
.addConcept()
|
||||
.setCode("CHEM")
|
||||
.setDisplay("Chemistry Tests");
|
||||
chem
|
||||
.addConcept()
|
||||
.setCode("HB")
|
||||
.setDisplay("Hemoglobin");
|
||||
chem
|
||||
.addConcept()
|
||||
.setCode("NEUT")
|
||||
.setDisplay("Neutrophil");
|
||||
CodeSystem.ConceptDefinitionComponent micro = delta
|
||||
.addConcept()
|
||||
.setCode("MICRO")
|
||||
.setDisplay("Microbiology Tests");
|
||||
micro
|
||||
.addConcept()
|
||||
.setCode("C&S")
|
||||
.setDisplay("Culture & Sensitivity");
|
||||
|
||||
LoggingInterceptor interceptor = new LoggingInterceptor(true);
|
||||
ourClient.registerInterceptor(interceptor);
|
||||
Parameters outcome = ourClient
|
||||
.operation()
|
||||
.onType(CodeSystem.class)
|
||||
.named(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD)
|
||||
.withParameter(Parameters.class, TerminologyUploaderProvider.VALUE, delta)
|
||||
.prettyPrint()
|
||||
.execute();
|
||||
ourClient.unregisterInterceptor(interceptor);
|
||||
|
||||
String encoded = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome);
|
||||
ourLog.info(encoded);
|
||||
assertThat(encoded, containsString("\"valueInteger\": 5"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testApplyDeltaRemove() {
|
||||
// Create not-present
|
||||
CodeSystem cs = new CodeSystem();
|
||||
cs.setUrl("http://foo");
|
||||
cs.setContent(CodeSystem.CodeSystemContentMode.NOTPRESENT);
|
||||
ourClient.create().resource(cs).execute();
|
||||
|
||||
CodeSystem delta = new CodeSystem();
|
||||
delta.setUrl("http://foo");
|
||||
delta
|
||||
.addConcept()
|
||||
.setCode("codeA")
|
||||
.setDisplay("displayA");
|
||||
|
||||
// Add
|
||||
ourClient
|
||||
.operation()
|
||||
.onType(CodeSystem.class)
|
||||
.named(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD)
|
||||
.withParameter(Parameters.class, TerminologyUploaderProvider.VALUE, delta)
|
||||
.prettyPrint()
|
||||
.execute();
|
||||
|
||||
// Remove
|
||||
LoggingInterceptor interceptor = new LoggingInterceptor(true);
|
||||
ourClient.registerInterceptor(interceptor);
|
||||
Parameters outcome = ourClient
|
||||
.operation()
|
||||
.onType(CodeSystem.class)
|
||||
.named(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE)
|
||||
.withParameter(Parameters.class, TerminologyUploaderProvider.VALUE, delta)
|
||||
.prettyPrint()
|
||||
.execute();
|
||||
ourClient.unregisterInterceptor(interceptor);
|
||||
|
||||
String encoded = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome);
|
||||
ourLog.info(encoded);
|
||||
assertThat(encoded, containsString("\"valueInteger\": 1"));
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -228,8 +228,6 @@ public class SubscriptionTriggeringDstu3Test extends BaseResourceProviderDstu3Te
|
|||
responseValue = response.getParameter().get(0).getValue().primitiveValue();
|
||||
assertThat(responseValue, containsString("Subscription triggering job submitted as JOB ID"));
|
||||
|
||||
// Thread.sleep(1000000000);
|
||||
|
||||
waitForSize(51, ourUpdatedObservations);
|
||||
waitForSize(0, ourCreatedObservations);
|
||||
waitForSize(0, ourCreatedPatients);
|
||||
|
@ -237,6 +235,72 @@ public class SubscriptionTriggeringDstu3Test extends BaseResourceProviderDstu3Te
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTriggerUsingOrSeparatedList_MultipleStrings() throws Exception {
|
||||
myDaoConfig.setSearchPreFetchThresholds(Lists.newArrayList(13, 22, 100));
|
||||
|
||||
String payload = "application/fhir+json";
|
||||
IdType sub2id = createSubscription("Patient?", payload, ourListenerServerBase).getIdElement();
|
||||
|
||||
// Create lots
|
||||
for (int i = 0; i < 10; i++) {
|
||||
Patient p = new Patient();
|
||||
p.setId("P"+i);
|
||||
p.addName().setFamily("P" + i);
|
||||
ourClient.update().resource(p).execute();
|
||||
}
|
||||
waitForSize(10, ourUpdatedPatients);
|
||||
|
||||
// Use multiple strings
|
||||
beforeReset();
|
||||
Parameters response = ourClient
|
||||
.operation()
|
||||
.onInstance(sub2id)
|
||||
.named(JpaConstants.OPERATION_TRIGGER_SUBSCRIPTION)
|
||||
.withParameter(Parameters.class, SubscriptionTriggeringProvider.SEARCH_URL, new StringType("Patient?_id=P0"))
|
||||
.andParameter(SubscriptionTriggeringProvider.SEARCH_URL, new StringType("Patient?_id=P1"))
|
||||
.andParameter(SubscriptionTriggeringProvider.SEARCH_URL, new StringType("Patient?_id=P2"))
|
||||
.execute();
|
||||
String responseValue = response.getParameter().get(0).getValue().primitiveValue();
|
||||
assertThat(responseValue, containsString("Subscription triggering job submitted as JOB ID"));
|
||||
|
||||
waitForSize(0, ourCreatedPatients);
|
||||
waitForSize(3, ourUpdatedPatients);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTriggerUsingOrSeparatedList_SingleString() throws Exception {
|
||||
myDaoConfig.setSearchPreFetchThresholds(Lists.newArrayList(13, 22, 100));
|
||||
|
||||
String payload = "application/fhir+json";
|
||||
IdType sub2id = createSubscription("Patient?", payload, ourListenerServerBase).getIdElement();
|
||||
|
||||
// Create lots
|
||||
for (int i = 0; i < 10; i++) {
|
||||
Patient p = new Patient();
|
||||
p.setId("P"+i);
|
||||
p.addName().setFamily("P" + i);
|
||||
ourClient.update().resource(p).execute();
|
||||
}
|
||||
waitForSize(10, ourUpdatedPatients);
|
||||
|
||||
// Use a single
|
||||
beforeReset();
|
||||
Parameters response = ourClient
|
||||
.operation()
|
||||
.onInstance(sub2id)
|
||||
.named(JpaConstants.OPERATION_TRIGGER_SUBSCRIPTION)
|
||||
.withParameter(Parameters.class, SubscriptionTriggeringProvider.SEARCH_URL, new StringType("Patient?_id=P0,P1,P2"))
|
||||
.execute();
|
||||
String responseValue = response.getParameter().get(0).getValue().primitiveValue();
|
||||
assertThat(responseValue, containsString("Subscription triggering job submitted as JOB ID"));
|
||||
|
||||
waitForSize(0, ourCreatedPatients);
|
||||
waitForSize(3, ourUpdatedPatients);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTriggerUsingSearchesWithCount() throws Exception {
|
||||
String payload = "application/fhir+json";
|
||||
|
|
|
@ -6,7 +6,6 @@ import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet;
|
|||
import ca.uhn.fhir.jpa.dao.dstu3.BaseJpaDstu3Test;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.hl7.fhir.convertors.VersionConvertor_30_40;
|
||||
import org.hl7.fhir.dstu3.model.*;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.junit.After;
|
||||
|
@ -17,13 +16,14 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.awaitility.Awaitility.await;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class TerminologyLoaderSvcIntegrationDstu3Test extends BaseJpaDstu3Test {
|
||||
|
@ -106,6 +106,19 @@ public class TerminologyLoaderSvcIntegrationDstu3Test extends BaseJpaDstu3Test {
|
|||
assertThat(codes, empty());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStoreAndProcessDeferred() throws IOException {
|
||||
ZipCollectionBuilder files = new ZipCollectionBuilder();
|
||||
TerminologyLoaderSvcLoincTest.addLoincMandatoryFilesToZip(files);
|
||||
myLoader.loadLoinc(files.getFiles(), mySrd);
|
||||
|
||||
myTermSvc.saveDeferred();
|
||||
|
||||
runInTransaction(() -> {
|
||||
await().until(() -> myTermConceptMapDao.count(), greaterThan(0L));
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExpandWithPropertyString() throws Exception {
|
||||
ZipCollectionBuilder files = new ZipCollectionBuilder();
|
||||
|
|
|
@ -36,6 +36,7 @@ import java.util.Optional;
|
|||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.ArgumentMatchers.*;
|
||||
|
@ -1478,6 +1479,7 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test {
|
|||
assertEquals("8450-9", concept.getCode());
|
||||
assertEquals("Systolic blood pressure--expiration", concept.getDisplay());
|
||||
assertEquals(2, concept.getDesignations().size());
|
||||
assertThat(concept.toString(), containsString("8450"));
|
||||
|
||||
List<TermConceptDesignation> designations = Lists.newArrayList(concept.getDesignations().iterator());
|
||||
|
||||
|
|
|
@ -86,7 +86,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
bulkExportCollection.addColumn("PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
|
||||
bulkExportCollection.addColumn("JOB_PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
|
||||
bulkExportCollection.addForeignKey("FK_BLKEXCOL_JOB").toColumn("JOB_PID").references("HFJ_BLK_EXPORT_JOB", "PID");
|
||||
bulkExportCollection.addColumn("RES_TYPE").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 35);
|
||||
bulkExportCollection.addColumn("RES_TYPE").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
|
||||
bulkExportCollection.addColumn("TYPE_FILTER").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 1000);
|
||||
bulkExportCollection.addColumn("OPTLOCK").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.INT);
|
||||
|
||||
|
@ -148,12 +148,13 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
.unique(true)
|
||||
.withColumns("VALUESET_PID", "VALUESET_ORDER");
|
||||
|
||||
// Account for RESTYPE_LEN column increasing from 30 to 35
|
||||
version.onTable("HFJ_RESOURCE").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 35);
|
||||
version.onTable("HFJ_HISTORY_TAG").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 35);
|
||||
version.onTable("HFJ_RES_LINK").modifyColumn("SOURCE_RESOURCE_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 35);
|
||||
version.onTable("HFJ_RES_LINK").modifyColumn("TARGET_RESOURCE_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 35);
|
||||
version.onTable("HFJ_RES_TAG").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 35);
|
||||
// Account for RESTYPE_LEN column increasing from 30 to 40
|
||||
version.onTable("HFJ_RESOURCE").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
|
||||
version.onTable("HFJ_RES_VER").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
|
||||
version.onTable("HFJ_HISTORY_TAG").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
|
||||
version.onTable("HFJ_RES_LINK").modifyColumn("SOURCE_RESOURCE_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
|
||||
version.onTable("HFJ_RES_LINK").modifyColumn("TARGET_RESOURCE_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
|
||||
version.onTable("HFJ_RES_TAG").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
|
||||
|
||||
// TermConceptDesignation
|
||||
version.startSectionWithMessage("Processing table: TRM_CONCEPT_DESIG");
|
||||
|
|
|
@ -38,10 +38,14 @@ import java.util.Collection;
|
|||
public class ResourceHistoryTable extends BaseHasResource implements Serializable {
|
||||
|
||||
public static final String IDX_RESVER_ID_VER = "IDX_RESVER_ID_VER";
|
||||
|
||||
/**
|
||||
* @see ResourceEncodingEnum
|
||||
*/
|
||||
// Don't reduce the visibility here, we reference this from Smile
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
public static final int ENCODING_COL_LENGTH = 5;
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
@Id
|
||||
@SequenceGenerator(name = "SEQ_RESOURCE_HISTORY_ID", sequenceName = "SEQ_RESOURCE_HISTORY_ID")
|
||||
|
@ -52,7 +56,7 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
|||
@Column(name = "RES_ID")
|
||||
private Long myResourceId;
|
||||
|
||||
@Column(name = "RES_TYPE", length = 30, nullable = false)
|
||||
@Column(name = "RES_TYPE", length = ResourceTable.RESTYPE_LEN, nullable = false)
|
||||
private String myResourceType;
|
||||
|
||||
@Column(name = "RES_VER", nullable = false)
|
||||
|
@ -82,19 +86,6 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
|||
return myProvenance;
|
||||
}
|
||||
|
||||
public void setProvenance(ResourceHistoryProvenanceEntity theProvenance) {
|
||||
myProvenance = theProvenance;
|
||||
}
|
||||
|
||||
public void addTag(ResourceHistoryTag theTag) {
|
||||
for (ResourceHistoryTag next : getTags()) {
|
||||
if (next.equals(theTag)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
getTags().add(theTag);
|
||||
}
|
||||
|
||||
public void addTag(ResourceTag theTag) {
|
||||
ResourceHistoryTag tag = new ResourceHistoryTag(this, theTag.getTag());
|
||||
tag.setResourceType(theTag.getResourceType());
|
||||
|
@ -126,10 +117,6 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
|||
return myId;
|
||||
}
|
||||
|
||||
public void setId(Long theId) {
|
||||
myId = theId;
|
||||
}
|
||||
|
||||
public byte[] getResource() {
|
||||
return myResource;
|
||||
}
|
||||
|
@ -159,7 +146,7 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
|||
@Override
|
||||
public Collection<ResourceHistoryTag> getTags() {
|
||||
if (myTags == null) {
|
||||
myTags = new ArrayList<ResourceHistoryTag>();
|
||||
myTags = new ArrayList<>();
|
||||
}
|
||||
return myTags;
|
||||
}
|
||||
|
@ -173,13 +160,4 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
|||
myResourceVersion = theVersion;
|
||||
}
|
||||
|
||||
public boolean hasTag(String theTerm, String theScheme) {
|
||||
for (ResourceHistoryTag next : getTags()) {
|
||||
if (next.getTag().getSystem().equals(theScheme) && next.getTag().getCode().equals(theTerm)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -47,7 +47,7 @@ import static org.apache.commons.lang3.StringUtils.defaultString;
|
|||
@Index(name = "IDX_INDEXSTATUS", columnList = "SP_INDEX_STATUS")
|
||||
})
|
||||
public class ResourceTable extends BaseHasResource implements Serializable {
|
||||
public static final int RESTYPE_LEN = 35;
|
||||
public static final int RESTYPE_LEN = 40;
|
||||
private static final int MAX_LANGUAGE_LENGTH = 20;
|
||||
private static final int MAX_PROFILE_LENGTH = 200;
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
|
|
@ -573,15 +573,6 @@ public class ServerCapabilityStatementProvider extends BaseServerCapabilityState
|
|||
// ignore
|
||||
}
|
||||
|
||||
private void sortRuntimeSearchParameters(List<RuntimeSearchParam> searchParameters) {
|
||||
Collections.sort(searchParameters, new Comparator<RuntimeSearchParam>() {
|
||||
@Override
|
||||
public int compare(RuntimeSearchParam theO1, RuntimeSearchParam theO2) {
|
||||
return theO1.getName().compareTo(theO2.getName());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void sortSearchParameters(List<SearchParameter> searchParameters) {
|
||||
Collections.sort(searchParameters, new Comparator<SearchParameter>() {
|
||||
@Override
|
||||
|
|
|
@ -1022,6 +1022,7 @@ public class FhirTerserR4Test {
|
|||
assertThat(strings, Matchers.contains("http://foo"));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testVisitWithModelVisitor2() {
|
||||
IModelVisitor2 visitor = mock(IModelVisitor2.class);
|
||||
|
@ -1048,6 +1049,97 @@ public class FhirTerserR4Test {
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetAllPopulatedChildElementsOfType() {
|
||||
|
||||
Patient p = new Patient();
|
||||
p.setGender(Enumerations.AdministrativeGender.MALE);
|
||||
p.addIdentifier().setSystem("urn:foo");
|
||||
p.addAddress().addLine("Line1");
|
||||
p.addAddress().addLine("Line2");
|
||||
p.addName().setFamily("Line3");
|
||||
|
||||
FhirTerser t = ourCtx.newTerser();
|
||||
List<StringType> strings = t.getAllPopulatedChildElementsOfType(p, StringType.class);
|
||||
|
||||
assertEquals(3, strings.size());
|
||||
|
||||
Set<String> allStrings = new HashSet<>();
|
||||
for (StringType next : strings) {
|
||||
allStrings.add(next.getValue());
|
||||
}
|
||||
|
||||
assertThat(allStrings, containsInAnyOrder("Line1", "Line2", "Line3"));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMultiValueTypes() {
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.setValue(new Quantity(123L));
|
||||
|
||||
FhirTerser t = ourCtx.newTerser();
|
||||
|
||||
// As string
|
||||
{
|
||||
List<Object> values = t.getValues(obs, "Observation.valueString");
|
||||
assertEquals(0, values.size());
|
||||
}
|
||||
|
||||
// As quantity
|
||||
{
|
||||
List<Object> values = t.getValues(obs, "Observation.valueQuantity");
|
||||
assertEquals(1, values.size());
|
||||
Quantity actual = (Quantity) values.get(0);
|
||||
assertEquals("123", actual.getValueElement().getValueAsString());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTerser() {
|
||||
|
||||
//@formatter:off
|
||||
String msg = "<Observation xmlns=\"http://hl7.org/fhir\">\n" +
|
||||
" <text>\n" +
|
||||
" <status value=\"empty\"/>\n" +
|
||||
" <div xmlns=\"http://www.w3.org/1999/xhtml\"/>\n" +
|
||||
" </text>\n" +
|
||||
" <!-- The test code - may not be correct -->\n" +
|
||||
" <name>\n" +
|
||||
" <coding>\n" +
|
||||
" <system value=\"http://loinc.org\"/>\n" +
|
||||
" <code value=\"43151-0\"/>\n" +
|
||||
" <display value=\"Glucose Meter Device Panel\"/>\n" +
|
||||
" </coding>\n" +
|
||||
" </name>\n" +
|
||||
" <valueQuantity>\n" +
|
||||
" <value value=\"7.7\"/>\n" +
|
||||
" <units value=\"mmol/L\"/>\n" +
|
||||
" <system value=\"http://unitsofmeasure.org\"/>\n" +
|
||||
" </valueQuantity>\n" +
|
||||
" <appliesDateTime value=\"2014-05-28T22:12:21Z\"/>\n" +
|
||||
" <status value=\"final\"/>\n" +
|
||||
" <reliability value=\"ok\"/>\n" +
|
||||
" <subject>\n" +
|
||||
" <reference value=\"cid:patient@bundle\"/>\n" +
|
||||
" </subject>\n" +
|
||||
" <performer>\n" +
|
||||
" <reference value=\"cid:device@bundle\"></reference>\n" +
|
||||
" </performer>\n" +
|
||||
"</Observation>";
|
||||
//@formatter:on
|
||||
|
||||
Observation parsed = ourCtx.newXmlParser().parseResource(Observation.class, msg);
|
||||
FhirTerser t = ourCtx.newTerser();
|
||||
|
||||
List<Reference> elems = t.getAllPopulatedChildElementsOfType(parsed, Reference.class);
|
||||
assertEquals(2, elems.size());
|
||||
assertEquals("cid:patient@bundle", elems.get(0).getReferenceElement().getValue());
|
||||
assertEquals("cid:device@bundle", elems.get(1).getReferenceElement().getValue());
|
||||
}
|
||||
|
||||
|
||||
private List<String> toStrings(List<StringType> theStrings) {
|
||||
ArrayList<String> retVal = new ArrayList<String>();
|
||||
for (StringType next : theStrings) {
|
||||
|
|
|
@ -1,117 +0,0 @@
|
|||
package ca.uhn.fhir.util;
|
||||
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
import org.hl7.fhir.r4.model.*;
|
||||
import org.hl7.fhir.r4.model.Enumerations.AdministrativeGender;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
|
||||
public class FhirTerserTest {
|
||||
|
||||
private static FhirContext ourCtx = FhirContext.forR4();
|
||||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirTerserTest.class);
|
||||
|
||||
@Test
|
||||
public void testGetAllPopulatedChildElementsOfType() {
|
||||
|
||||
Patient p = new Patient();
|
||||
p.setGender(AdministrativeGender.MALE);
|
||||
p.addIdentifier().setSystem("urn:foo");
|
||||
p.addAddress().addLine("Line1");
|
||||
p.addAddress().addLine("Line2");
|
||||
p.addName().setFamily("Line3");
|
||||
|
||||
FhirTerser t = ourCtx.newTerser();
|
||||
List<StringType> strings = t.getAllPopulatedChildElementsOfType(p, StringType.class);
|
||||
|
||||
assertEquals(3, strings.size());
|
||||
|
||||
Set<String> allStrings = new HashSet<>();
|
||||
for (StringType next : strings) {
|
||||
allStrings.add(next.getValue());
|
||||
}
|
||||
|
||||
assertThat(allStrings, containsInAnyOrder("Line1", "Line2", "Line3"));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMultiValueTypes() {
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.setValue(new Quantity(123L));
|
||||
|
||||
FhirTerser t = ourCtx.newTerser();
|
||||
|
||||
// As string
|
||||
{
|
||||
List<Object> values = t.getValues(obs, "Observation.valueString");
|
||||
assertEquals(0, values.size());
|
||||
}
|
||||
|
||||
// As quantity
|
||||
{
|
||||
List<Object> values = t.getValues(obs, "Observation.valueQuantity");
|
||||
assertEquals(1, values.size());
|
||||
Quantity actual = (Quantity) values.get(0);
|
||||
assertEquals("123", actual.getValueElement().getValueAsString());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTerser() {
|
||||
|
||||
//@formatter:off
|
||||
String msg = "<Observation xmlns=\"http://hl7.org/fhir\">\n" +
|
||||
" <text>\n" +
|
||||
" <status value=\"empty\"/>\n" +
|
||||
" <div xmlns=\"http://www.w3.org/1999/xhtml\"/>\n" +
|
||||
" </text>\n" +
|
||||
" <!-- The test code - may not be correct -->\n" +
|
||||
" <name>\n" +
|
||||
" <coding>\n" +
|
||||
" <system value=\"http://loinc.org\"/>\n" +
|
||||
" <code value=\"43151-0\"/>\n" +
|
||||
" <display value=\"Glucose Meter Device Panel\"/>\n" +
|
||||
" </coding>\n" +
|
||||
" </name>\n" +
|
||||
" <valueQuantity>\n" +
|
||||
" <value value=\"7.7\"/>\n" +
|
||||
" <units value=\"mmol/L\"/>\n" +
|
||||
" <system value=\"http://unitsofmeasure.org\"/>\n" +
|
||||
" </valueQuantity>\n" +
|
||||
" <appliesDateTime value=\"2014-05-28T22:12:21Z\"/>\n" +
|
||||
" <status value=\"final\"/>\n" +
|
||||
" <reliability value=\"ok\"/>\n" +
|
||||
" <subject>\n" +
|
||||
" <reference value=\"cid:patient@bundle\"/>\n" +
|
||||
" </subject>\n" +
|
||||
" <performer>\n" +
|
||||
" <reference value=\"cid:device@bundle\"></reference>\n" +
|
||||
" </performer>\n" +
|
||||
"</Observation>";
|
||||
//@formatter:on
|
||||
|
||||
Observation parsed = ourCtx.newXmlParser().parseResource(Observation.class, msg);
|
||||
FhirTerser t = ourCtx.newTerser();
|
||||
|
||||
List<Reference> elems = t.getAllPopulatedChildElementsOfType(parsed, Reference.class);
|
||||
assertEquals(2, elems.size());
|
||||
assertEquals("cid:patient@bundle", elems.get(0).getReferenceElement().getValue());
|
||||
assertEquals("cid:device@bundle", elems.get(1).getReferenceElement().getValue());
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
}
|
||||
|
||||
}
|
|
@ -9,7 +9,7 @@ import org.hl7.fhir.r5.hapi.rest.server.ServerProfileProvider;
|
|||
public class FhirServerR5 implements IFhirVersionServer {
|
||||
@Override
|
||||
public ServerCapabilityStatementProvider createServerConformanceProvider(RestfulServer theServer) {
|
||||
return new ServerCapabilityStatementProvider(theServer);
|
||||
return new ServerCapabilityStatementProvider();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -15,8 +15,8 @@ import ca.uhn.fhir.rest.server.RestfulServer;
|
|||
import ca.uhn.fhir.rest.server.RestfulServerConfiguration;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.rest.server.method.*;
|
||||
import ca.uhn.fhir.rest.server.method.OperationMethodBinding.ReturnType;
|
||||
import ca.uhn.fhir.rest.server.method.SearchParameter;
|
||||
import ca.uhn.fhir.rest.server.method.OperationMethodBinding.ReturnType;
|
||||
import ca.uhn.fhir.rest.server.util.BaseServerCapabilityStatementProvider;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
|
@ -77,16 +77,6 @@ public class ServerCapabilityStatementProvider extends BaseServerCapabilityState
|
|||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @deprecated Use no-args constructor instead. Deprecated in 4.0.0
|
||||
*/
|
||||
@Deprecated
|
||||
public ServerCapabilityStatementProvider(RestfulServer theRestfulServer) {
|
||||
this();
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor - This is intended only for JAX-RS server
|
||||
*/
|
||||
|
@ -558,34 +548,12 @@ public class ServerCapabilityStatementProvider extends BaseServerCapabilityState
|
|||
return op;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the cache property (default is true). If set to true, the same response will be returned for each invocation.
|
||||
* <p>
|
||||
* See the class documentation for an important note if you are extending this class
|
||||
* </p>
|
||||
*
|
||||
* @deprecated Since 4.0.0 - This method no longer does anything
|
||||
*/
|
||||
@Deprecated
|
||||
public ServerCapabilityStatementProvider setCache(boolean theCache) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setRestfulServer(RestfulServer theRestfulServer) {
|
||||
// ignore
|
||||
}
|
||||
|
||||
private void sortRuntimeSearchParameters(List<RuntimeSearchParam> searchParameters) {
|
||||
Collections.sort(searchParameters, new Comparator<RuntimeSearchParam>() {
|
||||
@Override
|
||||
public int compare(RuntimeSearchParam theO1, RuntimeSearchParam theO2) {
|
||||
return theO1.getName().compareTo(theO2.getName());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void sortSearchParameters(List<SearchParameter> searchParameters) {
|
||||
private void sortSearchParameters(List<SearchParameter> searchParameters) {
|
||||
Collections.sort(searchParameters, new Comparator<SearchParameter>() {
|
||||
@Override
|
||||
public int compare(SearchParameter theO1, SearchParameter theO2) {
|
||||
|
|
|
@ -0,0 +1,183 @@
|
|||
|
||||
package ca.uhn.fhir.rest.server;
|
||||
|
||||
import ca.uhn.fhir.model.api.Include;
|
||||
import ca.uhn.fhir.model.api.annotation.Description;
|
||||
import ca.uhn.fhir.rest.annotation.*;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.SortSpec;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.param.*;
|
||||
import org.hl7.fhir.r5.model.Organization;
|
||||
import org.hl7.fhir.r5.model.Patient;
|
||||
import org.hl7.fhir.r5.model.Practitioner;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
// import ca.uhn.fhir.model.dstu.resource.Binary;
|
||||
// import ca.uhn.fhir.model.dstu2.resource.Bundle;
|
||||
// import ca.uhn.fhir.model.api.Bundle;
|
||||
|
||||
|
||||
public class PatientResourceProvider implements IResourceProvider
|
||||
{
|
||||
|
||||
@Override
|
||||
public Class<Patient> getResourceType() {
|
||||
return Patient.class;
|
||||
}
|
||||
|
||||
@Search()
|
||||
public IBundleProvider search(
|
||||
javax.servlet.http.HttpServletRequest theServletRequest,
|
||||
|
||||
@Description(shortDefinition="The resource identity")
|
||||
@OptionalParam(name="_id")
|
||||
StringAndListParam theId,
|
||||
|
||||
@Description(shortDefinition="The resource language")
|
||||
@OptionalParam(name="_language")
|
||||
StringAndListParam theResourceLanguage,
|
||||
|
||||
@Description(shortDefinition="Search the contents of the resource's data using a fulltext search")
|
||||
@OptionalParam(name=Constants.PARAM_CONTENT)
|
||||
StringAndListParam theFtContent,
|
||||
|
||||
@Description(shortDefinition="Search the contents of the resource's narrative using a fulltext search")
|
||||
@OptionalParam(name=Constants.PARAM_TEXT)
|
||||
StringAndListParam theFtText,
|
||||
|
||||
@Description(shortDefinition="Search for resources which have the given tag")
|
||||
@OptionalParam(name=Constants.PARAM_TAG)
|
||||
TokenAndListParam theSearchForTag,
|
||||
|
||||
@Description(shortDefinition="Search for resources which have the given security labels")
|
||||
@OptionalParam(name=Constants.PARAM_SECURITY)
|
||||
TokenAndListParam theSearchForSecurity,
|
||||
|
||||
@Description(shortDefinition="Search for resources which have the given profile")
|
||||
@OptionalParam(name=Constants.PARAM_PROFILE)
|
||||
UriAndListParam theSearchForProfile,
|
||||
|
||||
|
||||
@Description(shortDefinition="A patient identifier")
|
||||
@OptionalParam(name="identifier")
|
||||
TokenAndListParam theIdentifier,
|
||||
|
||||
@Description(shortDefinition="A portion of either family or given name of the patient")
|
||||
@OptionalParam(name="name")
|
||||
StringAndListParam theName,
|
||||
|
||||
@Description(shortDefinition="A portion of the family name of the patient")
|
||||
@OptionalParam(name="family")
|
||||
StringAndListParam theFamily,
|
||||
|
||||
@Description(shortDefinition="A portion of the given name of the patient")
|
||||
@OptionalParam(name="given")
|
||||
StringAndListParam theGiven,
|
||||
|
||||
@Description(shortDefinition="A portion of either family or given name using some kind of phonetic matching algorithm")
|
||||
@OptionalParam(name="phonetic")
|
||||
StringAndListParam thePhonetic,
|
||||
|
||||
@Description(shortDefinition="The value in any kind of telecom details of the patient")
|
||||
@OptionalParam(name="telecom")
|
||||
TokenAndListParam theTelecom,
|
||||
|
||||
@Description(shortDefinition="A value in a phone contact")
|
||||
@OptionalParam(name="phone")
|
||||
TokenAndListParam thePhone,
|
||||
|
||||
@Description(shortDefinition="A value in an email contact")
|
||||
@OptionalParam(name="email")
|
||||
TokenAndListParam theEmail,
|
||||
|
||||
@Description(shortDefinition="An address in any kind of address/part of the patient")
|
||||
@OptionalParam(name="address")
|
||||
StringAndListParam theAddress,
|
||||
|
||||
@Description(shortDefinition="A city specified in an address")
|
||||
@OptionalParam(name="address-city")
|
||||
StringAndListParam theAddress_city,
|
||||
|
||||
@Description(shortDefinition="A state specified in an address")
|
||||
@OptionalParam(name="address-state")
|
||||
StringAndListParam theAddress_state,
|
||||
|
||||
@Description(shortDefinition="A postalCode specified in an address")
|
||||
@OptionalParam(name="address-postalcode")
|
||||
StringAndListParam theAddress_postalcode,
|
||||
|
||||
@Description(shortDefinition="A country specified in an address")
|
||||
@OptionalParam(name="address-country")
|
||||
StringAndListParam theAddress_country,
|
||||
|
||||
@Description(shortDefinition="A use code specified in an address")
|
||||
@OptionalParam(name="address-use")
|
||||
TokenAndListParam theAddress_use,
|
||||
|
||||
@Description(shortDefinition="Gender of the patient")
|
||||
@OptionalParam(name="gender")
|
||||
TokenAndListParam theGender,
|
||||
|
||||
@Description(shortDefinition="Language code (irrespective of use value)")
|
||||
@OptionalParam(name="language")
|
||||
TokenAndListParam theLanguage,
|
||||
|
||||
@Description(shortDefinition="The patient's date of birth")
|
||||
@OptionalParam(name="birthdate")
|
||||
DateRangeParam theBirthdate,
|
||||
|
||||
@Description(shortDefinition="The organization at which this person is a patient")
|
||||
@OptionalParam(name="organization", targetTypes={ Organization.class } )
|
||||
ReferenceAndListParam theOrganization,
|
||||
|
||||
@Description(shortDefinition="Patient's nominated care provider, could be a care manager, not the organization that manages the record")
|
||||
@OptionalParam(name="careprovider", targetTypes={ Organization.class , Practitioner.class } )
|
||||
ReferenceAndListParam theCareprovider,
|
||||
|
||||
@Description(shortDefinition="Whether the patient record is active")
|
||||
@OptionalParam(name="active")
|
||||
TokenAndListParam theActive,
|
||||
|
||||
@Description(shortDefinition="The species for animal patients")
|
||||
@OptionalParam(name="animal-species")
|
||||
TokenAndListParam theAnimal_species,
|
||||
|
||||
@Description(shortDefinition="The breed for animal patients")
|
||||
@OptionalParam(name="animal-breed")
|
||||
TokenAndListParam theAnimal_breed,
|
||||
|
||||
@Description(shortDefinition="All patients linked to the given patient")
|
||||
@OptionalParam(name="link", targetTypes={ Patient.class } )
|
||||
ReferenceAndListParam theLink,
|
||||
|
||||
@Description(shortDefinition="This patient has been marked as deceased, or as a death date entered")
|
||||
@OptionalParam(name="deceased")
|
||||
TokenAndListParam theDeceased,
|
||||
|
||||
@Description(shortDefinition="The date of death has been provided and satisfies this search value")
|
||||
@OptionalParam(name="deathdate")
|
||||
DateRangeParam theDeathdate,
|
||||
|
||||
@IncludeParam(reverse=true)
|
||||
Set<Include> theRevIncludes,
|
||||
@Description(shortDefinition="Only return resources which were last updated as specified by the given range")
|
||||
@OptionalParam(name="_lastUpdated")
|
||||
DateRangeParam theLastUpdated,
|
||||
|
||||
@IncludeParam(allow= {
|
||||
"Patient:careprovider" , "Patient:link" , "Patient:organization" , "Patient:careprovider" , "Patient:link" , "Patient:organization" , "Patient:careprovider" , "Patient:link" , "Patient:organization" , "*"
|
||||
})
|
||||
Set<Include> theIncludes,
|
||||
|
||||
@Sort
|
||||
SortSpec theSort,
|
||||
|
||||
@Count
|
||||
Integer theCount
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -151,7 +151,6 @@ datatype.date=ca.uhn.fhir.model.primitive.DateDt
|
|||
datatype.dateTime=ca.uhn.fhir.model.primitive.DateTimeDt
|
||||
datatype.decimal=ca.uhn.fhir.model.primitive.DecimalDt
|
||||
datatype.id=ca.uhn.fhir.model.primitive.IdDt
|
||||
datatype.idref=ca.uhn.fhir.model.primitive.IdrefDt
|
||||
datatype.instant=ca.uhn.fhir.model.primitive.InstantDt
|
||||
datatype.integer=ca.uhn.fhir.model.primitive.IntegerDt
|
||||
datatype.markdown=ca.uhn.fhir.model.primitive.MarkdownDt
|
||||
|
|
|
@ -146,7 +146,6 @@ datatype.date=ca.uhn.fhir.model.primitive.DateDt
|
|||
datatype.dateTime=ca.uhn.fhir.model.primitive.DateTimeDt
|
||||
datatype.decimal=ca.uhn.fhir.model.primitive.DecimalDt
|
||||
datatype.id=ca.uhn.fhir.model.primitive.IdDt
|
||||
datatype.idref=ca.uhn.fhir.model.primitive.IdrefDt
|
||||
datatype.instant=ca.uhn.fhir.model.primitive.InstantDt
|
||||
datatype.integer=ca.uhn.fhir.model.primitive.IntegerDt
|
||||
datatype.markdown=ca.uhn.fhir.model.primitive.MarkdownDt
|
||||
|
|
4
pom.xml
4
pom.xml
|
@ -1672,6 +1672,10 @@
|
|||
<configuration>
|
||||
<excludes>
|
||||
<exclude>ca/uhn/fhir/model/dstu2/**/*.class</exclude>
|
||||
<exclude>ca/uhn/fhir/jpa/rp/r5/*.class</exclude>
|
||||
<exclude>ca/uhn/fhir/jpa/rp/r4/*.class</exclude>
|
||||
<exclude>ca/uhn/fhir/jpa/rp/dstu3/*.class</exclude>
|
||||
<exclude>ca/uhn/fhir/jpa/rp/dstu2/*.class</exclude>
|
||||
</excludes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
|
|
@ -304,12 +304,24 @@
|
|||
A number of overridden methods in the HAPI FHIR codebase did not have the
|
||||
@Override annotation. Thanks to Clayton Bodendein for cleaning this up!
|
||||
</action>
|
||||
<action type"add" issue="1373">
|
||||
<action type="add" issue="1373">
|
||||
Plain server resource providers were not correctly matching methods that
|
||||
had the _id search parameter if a client performed a request using a modifier
|
||||
such as :not or :exact. Thanks to Petro Mykhailyshyn
|
||||
for the pull request!
|
||||
</action>
|
||||
<action type="fix">
|
||||
The JPA server contained a restriction on the columns used to hold a resource's type name
|
||||
that was too short to hold the longest name from the final R4 definitions. This has been
|
||||
corrected to account for names up to 40 characters long.
|
||||
</action>
|
||||
<action type="fix">
|
||||
A new command has been added to the HAPI FHIR CLI that allows external (not-present) codesystem deltas to be manually uploaded
|
||||
</action>
|
||||
<action type="fix">
|
||||
The subscription triggering operation was not able to handle commas within search URLs being
|
||||
used to trigger resources for subscription checking. This has been corrected.
|
||||
</action>
|
||||
</release>
|
||||
<release version="4.0.3" date="2019-09-03" description="Igloo (Point Release)">
|
||||
<action type="fix">
|
||||
|
|
Loading…
Reference in New Issue