diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 02ca50f6ccd..5de0db629ad 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -28,7 +28,12 @@ jobs: mavenOptions: '-Xmx2048m $(MAVEN_OPTS) -Dorg.slf4j.simpleLogger.showDateTime=true -Dorg.slf4j.simpleLogger.dateTimeFormat=HH:mm:ss,SSS -Duser.timezone=America/Toronto' - script: bash <(curl https://codecov.io/bash) -t $(CODECOV_TOKEN) displayName: 'codecov' - + - task: PublishCodeCoverageResults@1 + inputs: + codeCoverageTool: 'JaCoCo' + summaryFileLocation: $(System.DefaultWorkingDirectory)/hapi-fhir-jacoco/target/site/jacoco-report/jacoco.xml + reportDirectory: $(System.DefaultWorkingDirectory)/hapi-fhir-jacoco/target/site/jacoco-report/ + failIfCoverageEmpty: false # Potential Additional Maven3 Options: #publishJUnitResults: true diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/Constants.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/Constants.java index 04eaae58453..0e6b98292f8 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/Constants.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/Constants.java @@ -26,6 +26,7 @@ import java.util.*; public class Constants { + public static final String CT_TEXT_CSV = "text/csv"; public static final String HEADER_REQUEST_ID = "X-Request-ID"; public static final String CACHE_CONTROL_MAX_RESULTS = "max-results"; public static final String CACHE_CONTROL_NO_CACHE = "no-cache"; diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTerser.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTerser.java index 161ab949a26..b881a544c55 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTerser.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTerser.java @@ -738,25 +738,15 @@ public class FhirTerser { valueType = (Class) valueType.getSuperclass(); } - if (childElementDef == null) { - StringBuilder b = new StringBuilder(); - b.append("Found value of type["); - b.append(nextValue.getClass().getSimpleName()); - b.append("] which is not valid for field["); - b.append(nextChild.getElementName()); - b.append("] in "); - b.append(childDef.getName()); - b.append(" - Valid types: "); - for (Iterator iter = new TreeSet<>(nextChild.getValidChildNames()).iterator(); iter.hasNext(); ) { - BaseRuntimeElementDefinition childByName = nextChild.getChildByName(iter.next()); - b.append(childByName.getImplementingClass().getSimpleName()); - if (iter.hasNext()) { - b.append(", "); - } - } - throw new DataFormatException(b.toString()); + Class typeClass = nextValue.getClass(); + while (childElementDef == null && IBase.class.isAssignableFrom(typeClass)) { + //noinspection unchecked + typeClass = (Class) typeClass.getSuperclass(); + childElementDef = nextChild.getChildElementDefinitionByDatatype(typeClass); } + Validate.notNull(childElementDef, "Found value of type[%s] which is not valid for field[%s] in %s", nextValue.getClass(), nextChild.getElementName(), childDef.getName()); + visit(nextValue, nextChild, childElementDef, theCallback, theContainingElementPath, theChildDefinitionPath, theElementDefinitionPath); } } diff --git a/hapi-fhir-base/src/test/java/ca/uhn/fhir/rest/api/ConstantsTest.java b/hapi-fhir-base/src/test/java/ca/uhn/fhir/rest/api/ConstantsTest.java new file mode 100644 index 00000000000..be1bcef1e68 --- /dev/null +++ b/hapi-fhir-base/src/test/java/ca/uhn/fhir/rest/api/ConstantsTest.java @@ -0,0 +1,12 @@ +package ca.uhn.fhir.rest.api; + +import org.junit.Test; + +public class ConstantsTest { + + @Test + public void testConstants() { + new Constants(); + } + +} diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/UploadTerminologyCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/UploadTerminologyCommand.java index eca0cf6da8d..2194c4cff91 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/UploadTerminologyCommand.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/UploadTerminologyCommand.java @@ -20,30 +20,46 @@ package ca.uhn.fhir.cli; * #L% */ +import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion; +import ca.uhn.fhir.jpa.entity.TermConcept; +import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink; +import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc; +import ca.uhn.fhir.jpa.term.TerminologyLoaderSvcImpl; import ca.uhn.fhir.rest.client.api.IGenericClient; import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor; import ca.uhn.fhir.util.ParametersUtil; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; +import org.apache.commons.io.IOUtils; import org.hl7.fhir.instance.model.api.IBaseParameters; +import org.hl7.fhir.r4.model.CodeSystem; + +import java.io.ByteArrayInputStream; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.stream.Collectors; import static org.apache.commons.lang3.StringUtils.isBlank; public class UploadTerminologyCommand extends BaseCommand { + public static final String UPLOAD_TERMINOLOGY = "upload-terminology"; // TODO: Don't use qualified names for loggers in HAPI CLI. private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(UploadTerminologyCommand.class); - private static final String UPLOAD_EXTERNAL_CODE_SYSTEM = "upload-external-code-system"; @Override public String getCommandDescription() { - return "Uploads a terminology package (e.g. a SNOMED CT ZIP file) to a server, using the $" + UPLOAD_EXTERNAL_CODE_SYSTEM + " operation."; + return "Uploads a terminology package (e.g. a SNOMED CT ZIP file or a custom terminology bundle) to a server, using the appropriate operation."; } @Override public String getCommandName() { - return "upload-terminology"; + return UPLOAD_TERMINOLOGY; } @Override @@ -55,6 +71,7 @@ public class UploadTerminologyCommand extends BaseCommand { addRequiredOption(options, "u", "url", true, "The code system URL associated with this upload (e.g. " + IHapiTerminologyLoaderSvc.SCT_URI + ")"); addOptionalOption(options, "d", "data", true, "Local file to use to upload (can be a raw file or a ZIP containing the raw file)"); addOptionalOption(options, null, "custom", false, "Indicates that this upload uses the HAPI FHIR custom external terminology format"); + addOptionalOption(options, "m", "mode", true, "The upload mode: SNAPSHOT (default), ADD, REMOVE"); addBasicAuthOption(options); addVerboseLoggingOption(options); @@ -65,6 +82,14 @@ public class UploadTerminologyCommand extends BaseCommand { public void run(CommandLine theCommandLine) throws ParseException { parseFhirContext(theCommandLine); + ModeEnum mode; + String modeString = theCommandLine.getOptionValue("m", "SNAPSHOT"); + try { + mode = ModeEnum.valueOf(modeString); + } catch (IllegalArgumentException e) { + throw new ParseException("Invalid mode: " + modeString); + } + String termUrl = theCommandLine.getOptionValue("u"); if (isBlank(termUrl)) { throw new ParseException("No URL provided"); @@ -77,29 +102,118 @@ public class UploadTerminologyCommand extends BaseCommand { IGenericClient client = super.newClient(theCommandLine); IBaseParameters inputParameters = ParametersUtil.newInstance(myFhirCtx); - ParametersUtil.addParameterToParametersUri(myFhirCtx, inputParameters, "url", termUrl); - for (String next : datafile) { - ParametersUtil.addParameterToParametersString(myFhirCtx, inputParameters, "localfile", next); - } - if (theCommandLine.hasOption("custom")) { - ParametersUtil.addParameterToParametersCode(myFhirCtx, inputParameters, "contentMode", "custom"); - } if (theCommandLine.hasOption(VERBOSE_LOGGING_PARAM)) { client.registerInterceptor(new LoggingInterceptor(true)); } + switch (mode) { + case SNAPSHOT: + uploadSnapshot(inputParameters, termUrl, datafile, theCommandLine, client); + break; + case ADD: + uploadDelta(theCommandLine, termUrl, datafile, client, inputParameters, JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD, false); + break; + case REMOVE: + uploadDelta(theCommandLine, termUrl, datafile, client, inputParameters, JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE, true); + break; + } + + } + + private void uploadDelta(CommandLine theCommandLine, String theTermUrl, String[] theDatafile, IGenericClient theClient, IBaseParameters theInputParameters, String theOperationName, boolean theFlatten) { + ParametersUtil.addParameterToParametersUri(myFhirCtx, theInputParameters, "url", theTermUrl); + + List fileDescriptors = new ArrayList<>(); + + for (String next : theDatafile) { + try (FileInputStream inputStream = new FileInputStream(next)) { + byte[] bytes = IOUtils.toByteArray(inputStream); + fileDescriptors.add(new IHapiTerminologyLoaderSvc.FileDescriptor() { + @Override + public String getFilename() { + return next; + } + + @Override + public InputStream getInputStream() { + return new ByteArrayInputStream(bytes); + } + }); + } catch (IOException e) { + throw new CommandFailureException("Failed to read from file \"" + next + "\": " + e.getMessage()); + } + } + + TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion(); + TerminologyLoaderSvcImpl.LoadedFileDescriptors descriptors = new TerminologyLoaderSvcImpl.LoadedFileDescriptors(fileDescriptors); + TerminologyLoaderSvcImpl.processCustomTerminologyFiles(descriptors, codeSystemVersion); + + CodeSystem codeSystem = new CodeSystem(); + codeSystem.setUrl(theTermUrl); + addCodesToCodeSystem(codeSystemVersion.getConcepts(), codeSystem.getConcept(), theFlatten); + + ParametersUtil.addParameterToParameters(myFhirCtx, theInputParameters, "value", codeSystem); + + if (theCommandLine.hasOption("custom")) { + ParametersUtil.addParameterToParametersCode(myFhirCtx, theInputParameters, "contentMode", "custom"); + } + ourLog.info("Beginning upload - This may take a while..."); - IBaseParameters response = client + IBaseParameters response = theClient .operation() .onType(myFhirCtx.getResourceDefinition("CodeSystem").getImplementingClass()) - .named(UPLOAD_EXTERNAL_CODE_SYSTEM) - .withParameters(inputParameters) + .named(theOperationName) + .withParameters(theInputParameters) .execute(); ourLog.info("Upload complete!"); ourLog.info("Response:\n{}", myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(response)); } + private void addCodesToCodeSystem(Collection theSourceConcepts, List theTargetConcept, boolean theFlatten) { + for (TermConcept nextSourceConcept : theSourceConcepts) { + + CodeSystem.ConceptDefinitionComponent nextTarget = new CodeSystem.ConceptDefinitionComponent(); + nextTarget.setCode(nextSourceConcept.getCode()); + nextTarget.setDisplay(nextSourceConcept.getDisplay()); + theTargetConcept.add(nextTarget); + + List children = nextSourceConcept.getChildren().stream().map(t -> t.getChild()).collect(Collectors.toList()); + if (theFlatten) { + addCodesToCodeSystem(children, theTargetConcept, theFlatten); + } else { + addCodesToCodeSystem(children, nextTarget.getConcept(), theFlatten); + } + + } + } + + private void uploadSnapshot(IBaseParameters theInputparameters, String theTermUrl, String[] theDatafile, CommandLine theCommandLine, IGenericClient theClient) { + ParametersUtil.addParameterToParametersUri(myFhirCtx, theInputparameters, "url", theTermUrl); + for (String next : theDatafile) { + ParametersUtil.addParameterToParametersString(myFhirCtx, theInputparameters, "localfile", next); + } + if (theCommandLine.hasOption("custom")) { + ParametersUtil.addParameterToParametersCode(myFhirCtx, theInputparameters, "contentMode", "custom"); + } + + ourLog.info("Beginning upload - This may take a while..."); + + IBaseParameters response = theClient + .operation() + .onType(myFhirCtx.getResourceDefinition("CodeSystem").getImplementingClass()) + .named(JpaConstants.OPERATION_UPLOAD_EXTERNAL_CODE_SYSTEM) + .withParameters(theInputparameters) + .execute(); + + ourLog.info("Upload complete!"); + ourLog.info("Response:\n{}", myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(response)); + } + + private enum ModeEnum { + SNAPSHOT, ADD, REMOVE + } + } diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/UploadTerminologyCommandTest.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/UploadTerminologyCommandTest.java new file mode 100644 index 00000000000..42c8eccd95a --- /dev/null +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/UploadTerminologyCommandTest.java @@ -0,0 +1,198 @@ +package ca.uhn.fhir.cli; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider; +import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc; +import ca.uhn.fhir.jpa.term.IHapiTerminologySvc; +import ca.uhn.fhir.rest.server.RestfulServer; +import ca.uhn.fhir.test.utilities.JettyUtil; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.IOUtils; +import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.servlet.ServletHandler; +import org.eclipse.jetty.servlet.ServletHolder; +import org.hamcrest.Matchers; +import org.hl7.fhir.r4.model.CodeSystem; +import org.hl7.fhir.r4.model.IdType; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Captor; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +import java.io.*; +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.junit.Assert.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.*; + +@RunWith(MockitoJUnitRunner.class) +public class UploadTerminologyCommandTest { + + static { + System.setProperty("test", "true"); + } + + private Server myServer; + private FhirContext myCtx = FhirContext.forR4(); + @Mock + private IHapiTerminologyLoaderSvc myTerminologyLoaderSvc; + @Mock + private IHapiTerminologySvc myTerminologySvc; + @Captor + private ArgumentCaptor> myDescriptorList; + @Captor + private ArgumentCaptor myCodeSystemCaptor; + + private int myPort; + private String myConceptsFileName = "target/concepts.csv"; + private String myHierarchyFileName = "target/hierarchy.csv"; + private File myConceptsFile = new File(myConceptsFileName); + private File myHierarchyFile = new File(myHierarchyFileName); + + @Test + public void testTerminologyUpload_AddDelta() throws IOException { + + writeConceptAndHierarchyFiles(); + + when(myTerminologySvc.applyDeltaCodesystemsAdd(eq("http://foo"), any(), any())).thenReturn(new AtomicInteger(100)); + + App.main(new String[]{ + UploadTerminologyCommand.UPLOAD_TERMINOLOGY, + "-v", "r4", + "-m", "ADD", + "-t", "http://localhost:" + myPort, + "-u", "http://foo", + "-d", myConceptsFileName, + "-d", myHierarchyFileName + }); + + verify(myTerminologySvc, times(1)).applyDeltaCodesystemsAdd(any(), isNull(), myCodeSystemCaptor.capture()); + + CodeSystem codeSystem = myCodeSystemCaptor.getValue(); + assertEquals(1, codeSystem.getConcept().size()); + assertEquals("http://foo", codeSystem.getUrl()); + assertEquals("ANIMALS", codeSystem.getConcept().get(0).getCode()); + assertEquals("Animals", codeSystem.getConcept().get(0).getDisplay()); + assertEquals(2, codeSystem.getConcept().get(0).getConcept().size()); + assertEquals("CATS", codeSystem.getConcept().get(0).getConcept().get(0).getCode()); + assertEquals("Cats", codeSystem.getConcept().get(0).getConcept().get(0).getDisplay()); + assertEquals("DOGS", codeSystem.getConcept().get(0).getConcept().get(1).getCode()); + assertEquals("Dogs", codeSystem.getConcept().get(0).getConcept().get(1).getDisplay()); + } + + @Test + public void testTerminologyUpload_RemoveDelta() throws IOException { + writeConceptAndHierarchyFiles(); + + when(myTerminologySvc.applyDeltaCodesystemsRemove(eq("http://foo"), any())).thenReturn(new AtomicInteger(100)); + + App.main(new String[]{ + UploadTerminologyCommand.UPLOAD_TERMINOLOGY, + "-v", "r4", + "-m", "REMOVE", + "-t", "http://localhost:" + myPort, + "-u", "http://foo", + "-d", myConceptsFileName, + "-d", myHierarchyFileName + }); + + verify(myTerminologySvc, times(1)).applyDeltaCodesystemsRemove(any(), myCodeSystemCaptor.capture()); + + CodeSystem codeSystem = myCodeSystemCaptor.getValue(); + assertEquals(3, codeSystem.getConcept().size()); + assertEquals("http://foo", codeSystem.getUrl()); + assertEquals("ANIMALS", codeSystem.getConcept().get(0).getCode()); + assertEquals("Animals", codeSystem.getConcept().get(0).getDisplay()); + assertEquals("CATS", codeSystem.getConcept().get(1).getCode()); + assertEquals("Cats", codeSystem.getConcept().get(1).getDisplay()); + assertEquals("DOGS", codeSystem.getConcept().get(2).getCode()); + assertEquals("Dogs", codeSystem.getConcept().get(2).getDisplay()); + } + + @Test + public void testTerminologyUpload_Snapshot() throws IOException { + + writeConceptAndHierarchyFiles(); + + when(myTerminologyLoaderSvc.loadCustom(eq("http://foo"), any(), any())).thenReturn(new IHapiTerminologyLoaderSvc.UploadStatistics(100, new IdType("CodeSystem/123"))); + + App.main(new String[]{ + UploadTerminologyCommand.UPLOAD_TERMINOLOGY, + "-v", "r4", + "-m", "SNAPSHOT", + "--custom", + "-t", "http://localhost:" + myPort, + "-u", "http://foo", + "-d", myConceptsFileName, + "-d", myHierarchyFileName + }); + + verify(myTerminologyLoaderSvc, times(1)).loadCustom(any(), myDescriptorList.capture(), any()); + + List listOfDescriptors = myDescriptorList.getValue(); + assertEquals(2, listOfDescriptors.size()); + + assertThat(listOfDescriptors.get(0).getFilename(), Matchers.endsWith("concepts.csv")); + assertInputStreamEqualsFile(myConceptsFile, listOfDescriptors.get(0).getInputStream()); + assertThat(listOfDescriptors.get(1).getFilename(), Matchers.endsWith("hierarchy.csv")); + assertInputStreamEqualsFile(myHierarchyFile, listOfDescriptors.get(1).getInputStream()); + } + + + private void writeConceptAndHierarchyFiles() throws IOException { + try (FileWriter w = new FileWriter(myConceptsFile, false)) { + w.append("CODE,DISPLAY\n"); + w.append("ANIMALS,Animals\n"); + w.append("CATS,Cats\n"); + w.append("DOGS,Dogs\n"); + } + + try (FileWriter w = new FileWriter(myHierarchyFile, false)) { + w.append("PARENT,CHILD\n"); + w.append("ANIMALS,CATS\n"); + w.append("ANIMALS,DOGS\n"); + } + } + + private void assertInputStreamEqualsFile(File theExpectedFile, InputStream theActualInputStream) throws IOException { + try (FileInputStream fis = new FileInputStream(theExpectedFile)) { + byte[] expectedBytes = IOUtils.toByteArray(fis); + byte[] actualBytes = IOUtils.toByteArray(theActualInputStream); + assertArrayEquals(expectedBytes, actualBytes); + } + } + + @After + public void after() throws Exception { + JettyUtil.closeServer(myServer); + + FileUtils.deleteQuietly(myConceptsFile); + FileUtils.deleteQuietly(myHierarchyFile); + } + + @Before + public void start() throws Exception { + myServer = new Server(0); + + TerminologyUploaderProvider provider = new TerminologyUploaderProvider(myCtx, myTerminologyLoaderSvc, myTerminologySvc); + + ServletHandler proxyHandler = new ServletHandler(); + RestfulServer servlet = new RestfulServer(myCtx); + servlet.registerProvider(provider); + ServletHolder servletHolder = new ServletHolder(servlet); + proxyHandler.addServletWithMapping(servletHolder, "/*"); + myServer.setHandler(proxyHandler); + JettyUtil.startServer(myServer); + myPort = JettyUtil.getPortForStartedServer(myServer); + + } + + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java index 34a12604cf8..1ee569d8867 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java @@ -365,7 +365,10 @@ public class TermConcept implements Serializable { @Override public String toString() { - return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE).append("code", myCode).append("display", myDisplay).build(); + return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) + .append("code", myCode) + .append("display", myDisplay) + .build(); } public List toValidationProperties() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseTerminologyUploaderProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseTerminologyUploaderProvider.java deleted file mode 100644 index 9e84f824b23..00000000000 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseTerminologyUploaderProvider.java +++ /dev/null @@ -1,39 +0,0 @@ -package ca.uhn.fhir.jpa.provider; - -/*- - * #%L - * HAPI FHIR JPA Server - * %% - * Copyright (C) 2014 - 2019 University Health Network - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import ca.uhn.fhir.jpa.model.util.JpaConstants; - -/** - * @deprecated TerminologyUploaderProvider - */ -@Deprecated -public class BaseTerminologyUploaderProvider { - - // FIXME: remove these before 4.0.0 - public static final String UPLOAD_EXTERNAL_CODE_SYSTEM = JpaConstants.OPERATION_UPLOAD_EXTERNAL_CODE_SYSTEM; - public static final String CONCEPT_COUNT = "conceptCount"; - public static final String TARGET = "target"; - public static final String SYSTEM = "system"; - public static final String PARENT_CODE = "parentCode"; - public static final String VALUE = "value"; - -} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/SubscriptionTriggeringProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/SubscriptionTriggeringProvider.java index 1d17bc8b98a..24d881ad2d9 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/SubscriptionTriggeringProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/SubscriptionTriggeringProvider.java @@ -21,18 +21,17 @@ package ca.uhn.fhir.jpa.provider; */ import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.jpa.subscription.ISubscriptionTriggeringSvc; import ca.uhn.fhir.jpa.model.util.JpaConstants; +import ca.uhn.fhir.jpa.subscription.ISubscriptionTriggeringSvc; import ca.uhn.fhir.model.dstu2.valueset.ResourceTypeEnum; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.Operation; import ca.uhn.fhir.rest.annotation.OperationParam; -import ca.uhn.fhir.rest.param.StringParam; -import ca.uhn.fhir.rest.param.UriParam; import ca.uhn.fhir.rest.server.IResourceProvider; import org.hl7.fhir.instance.model.api.IBaseParameters; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; +import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.springframework.beans.factory.annotation.Autowired; import java.util.List; @@ -48,8 +47,8 @@ public class SubscriptionTriggeringProvider implements IResourceProvider { @Operation(name = JpaConstants.OPERATION_TRIGGER_SUBSCRIPTION) public IBaseParameters triggerSubscription( - @OperationParam(name = RESOURCE_ID, min = 0, max = OperationParam.MAX_UNLIMITED) List theResourceIds, - @OperationParam(name = SEARCH_URL, min = 0, max = OperationParam.MAX_UNLIMITED) List theSearchUrls + @OperationParam(name = RESOURCE_ID, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "uri") List> theResourceIds, + @OperationParam(name = SEARCH_URL, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theSearchUrls ) { return mySubscriptionTriggeringSvc.triggerSubscription(theResourceIds, theSearchUrls, null); } @@ -57,8 +56,8 @@ public class SubscriptionTriggeringProvider implements IResourceProvider { @Operation(name = JpaConstants.OPERATION_TRIGGER_SUBSCRIPTION) public IBaseParameters triggerSubscription( @IdParam IIdType theSubscriptionId, - @OperationParam(name = RESOURCE_ID, min = 0, max = OperationParam.MAX_UNLIMITED) List theResourceIds, - @OperationParam(name = SEARCH_URL, min = 0, max = OperationParam.MAX_UNLIMITED) List theSearchUrls + @OperationParam(name = RESOURCE_ID, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "uri") List> theResourceIds, + @OperationParam(name = SEARCH_URL, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theSearchUrls ) { return mySubscriptionTriggeringSvc.triggerSubscription(theResourceIds, theSearchUrls, theSubscriptionId); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/TerminologyUploaderProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/TerminologyUploaderProvider.java index cbc133904bf..ddb78d7c6b9 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/TerminologyUploaderProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/TerminologyUploaderProvider.java @@ -54,9 +54,10 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { public static final String CONCEPT_COUNT = "conceptCount"; public static final String TARGET = "target"; - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyUploaderProvider.class); public static final String PARENT_CODE = "parentCode"; public static final String VALUE = "value"; + private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyUploaderProvider.class); + private static final String PACKAGE = "package"; @Autowired private FhirContext myCtx; @@ -87,12 +88,12 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { * $apply-codesystem-delta-add * */ - @Operation(typeName="CodeSystem", name = JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD, idempotent = false, returnParameters = { + @Operation(typeName = "CodeSystem", name = JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD, idempotent = false, returnParameters = { }) public IBaseParameters applyCodeSystemDeltaAdd( HttpServletRequest theServletRequest, @OperationParam(name = PARENT_CODE, min = 0, max = 1) IPrimitiveType theParentCode, - @OperationParam(name = VALUE, min = 1, max = 1) IBaseResource theValue, + @OperationParam(name = VALUE, min = 0, max = 1) IBaseResource theValue, RequestDetails theRequestDetails ) { @@ -104,6 +105,8 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { value = (CodeSystem) theValue; } else if (theValue instanceof org.hl7.fhir.dstu3.model.CodeSystem) { value = VersionConvertor_30_40.convertCodeSystem((org.hl7.fhir.dstu3.model.CodeSystem) theValue); + } else if (theValue instanceof org.hl7.fhir.r5.model.CodeSystem) { + value = org.hl7.fhir.convertors.conv40_50.CodeSystem.convertCodeSystem((org.hl7.fhir.r5.model.CodeSystem) theValue); } else { throw new InvalidRequestException("Value must be present and be a CodeSystem"); } @@ -130,7 +133,7 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { * $apply-codesystem-delta-remove * */ - @Operation(typeName="CodeSystem", name = JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE, idempotent = false, returnParameters = { + @Operation(typeName = "CodeSystem", name = JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE, idempotent = false, returnParameters = { }) public IBaseParameters applyCodeSystemDeltaRemove( HttpServletRequest theServletRequest, @@ -146,6 +149,8 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { value = (CodeSystem) theValue; } else if (theValue instanceof org.hl7.fhir.dstu3.model.CodeSystem) { value = VersionConvertor_30_40.convertCodeSystem((org.hl7.fhir.dstu3.model.CodeSystem) theValue); + } else if (theValue instanceof org.hl7.fhir.r5.model.CodeSystem) { + value = org.hl7.fhir.convertors.conv40_50.CodeSystem.convertCodeSystem((org.hl7.fhir.r5.model.CodeSystem) theValue); } else { throw new InvalidRequestException("Value must be present and be a CodeSystem"); } @@ -165,13 +170,12 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { } - /** * * $upload-external-codesystem * */ - @Operation(typeName="CodeSystem", name = JpaConstants.OPERATION_UPLOAD_EXTERNAL_CODE_SYSTEM, idempotent = false, returnParameters = { + @Operation(typeName = "CodeSystem", name = JpaConstants.OPERATION_UPLOAD_EXTERNAL_CODE_SYSTEM, idempotent = false, returnParameters = { // @OperationParam(name = "conceptCount", type = IntegerType.class, min = 1) }) public IBaseParameters uploadExternalCodeSystem( @@ -179,7 +183,7 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { @OperationParam(name = "url", min = 1, typeName = "uri") IPrimitiveType theCodeSystemUrl, @OperationParam(name = "contentMode", min = 0, typeName = "code") IPrimitiveType theContentMode, @OperationParam(name = "localfile", min = 1, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theLocalFile, - @OperationParam(name = "package", min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") List thePackage, + @OperationParam(name = PACKAGE, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") List thePackage, RequestDetails theRequestDetails ) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/TerminologyUploaderProviderDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/TerminologyUploaderProviderDstu3.java deleted file mode 100644 index 0ede86b447b..00000000000 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/TerminologyUploaderProviderDstu3.java +++ /dev/null @@ -1,47 +0,0 @@ -package ca.uhn.fhir.jpa.provider.dstu3; - -/*- - * #%L - * HAPI FHIR JPA Server - * %% - * Copyright (C) 2014 - 2019 University Health Network - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import ca.uhn.fhir.jpa.model.util.JpaConstants; -import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider; -import ca.uhn.fhir.rest.annotation.Operation; -import ca.uhn.fhir.rest.annotation.OperationParam; -import ca.uhn.fhir.rest.api.server.RequestDetails; -import ca.uhn.fhir.rest.param.StringParam; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; -import org.hl7.fhir.convertors.VersionConvertor_30_40; -import org.hl7.fhir.dstu3.model.Attachment; -import org.hl7.fhir.dstu3.model.IntegerType; -import org.hl7.fhir.dstu3.model.Parameters; -import org.hl7.fhir.dstu3.model.StringType; -import org.hl7.fhir.exceptions.FHIRException; - -import javax.servlet.http.HttpServletRequest; -import java.util.ArrayList; -import java.util.List; - -/** - * @deprecated Use {@link TerminologyUploaderProvider} instead - */ -@Deprecated -public class TerminologyUploaderProviderDstu3 extends TerminologyUploaderProvider { - // nothing -} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/TerminologyUploaderProviderR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/TerminologyUploaderProviderR4.java deleted file mode 100644 index 06b699a22c2..00000000000 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/TerminologyUploaderProviderR4.java +++ /dev/null @@ -1,42 +0,0 @@ -package ca.uhn.fhir.jpa.provider.r4; - -/*- - * #%L - * HAPI FHIR JPA Server - * %% - * Copyright (C) 2014 - 2019 University Health Network - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import ca.uhn.fhir.jpa.model.util.JpaConstants; -import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider; -import ca.uhn.fhir.rest.annotation.Operation; -import ca.uhn.fhir.rest.annotation.OperationParam; -import ca.uhn.fhir.rest.api.server.RequestDetails; -import ca.uhn.fhir.rest.param.StringParam; -import org.hl7.fhir.r4.model.Attachment; -import org.hl7.fhir.r4.model.IntegerType; -import org.hl7.fhir.r4.model.Parameters; -import org.hl7.fhir.r4.model.StringType; - -import javax.servlet.http.HttpServletRequest; -import java.util.List; - -/** - * @deprecated Use {@link TerminologyUploaderProvider} instead - */ -public class TerminologyUploaderProviderR4 extends TerminologyUploaderProvider { - // nothing -} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/ISubscriptionTriggeringSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/ISubscriptionTriggeringSvc.java index 56082a894bb..33381da2c01 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/ISubscriptionTriggeringSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/ISubscriptionTriggeringSvc.java @@ -21,15 +21,15 @@ package ca.uhn.fhir.jpa.subscription; */ import ca.uhn.fhir.rest.annotation.IdParam; -import ca.uhn.fhir.rest.param.StringParam; -import ca.uhn.fhir.rest.param.UriParam; import org.hl7.fhir.instance.model.api.IBaseParameters; import org.hl7.fhir.instance.model.api.IIdType; +import org.hl7.fhir.instance.model.api.IPrimitiveType; import java.util.List; public interface ISubscriptionTriggeringSvc { - IBaseParameters triggerSubscription(List theResourceIds, List theSearchUrls, @IdParam IIdType theSubscriptionId); + + IBaseParameters triggerSubscription(List> theResourceIds, List> theSearchUrls, @IdParam IIdType theSubscriptionId); void runDeliveryPass(); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/SubscriptionTriggeringSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/SubscriptionTriggeringSvcImpl.java index ea2a83daa98..f17c89bd598 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/SubscriptionTriggeringSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/SubscriptionTriggeringSvcImpl.java @@ -100,7 +100,8 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc private ISchedulerService mySchedulerService; @Override - public IBaseParameters triggerSubscription(List theResourceIds, List theSearchUrls, @IdParam IIdType theSubscriptionId) { + public IBaseParameters triggerSubscription(List> theResourceIds, List> theSearchUrls, @IdParam IIdType theSubscriptionId) { + if (myDaoConfig.getSupportedSubscriptionTypes().isEmpty()) { throw new PreconditionFailedException("Subscription processing not active on this server"); } @@ -115,8 +116,8 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc subscriptionDao.read(subscriptionId); } - List resourceIds = ObjectUtils.defaultIfNull(theResourceIds, Collections.emptyList()); - List searchUrls = ObjectUtils.defaultIfNull(theSearchUrls, Collections.emptyList()); + List> resourceIds = ObjectUtils.defaultIfNull(theResourceIds, Collections.emptyList()); + List> searchUrls = ObjectUtils.defaultIfNull(theSearchUrls, Collections.emptyList()); // Make sure we have at least one resource ID or search URL if (resourceIds.size() == 0 && searchUrls.size() == 0) { @@ -124,14 +125,14 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc } // Resource URLs must be compete - for (UriParam next : resourceIds) { + for (IPrimitiveType next : resourceIds) { IdType resourceId = new IdType(next.getValue()); ValidateUtil.isTrueOrThrowInvalidRequest(resourceId.hasResourceType(), RESOURCE_ID + " parameter must have resource type"); ValidateUtil.isTrueOrThrowInvalidRequest(resourceId.hasIdPart(), RESOURCE_ID + " parameter must have resource ID part"); } // Search URLs must be valid - for (StringParam next : searchUrls) { + for (IPrimitiveType next : searchUrls) { if (!next.getValue().contains("?")) { throw new InvalidRequestException("Search URL is not valid (must be in the form \"[resource type]?[optional params]\")"); } @@ -139,8 +140,8 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc SubscriptionTriggeringJobDetails jobDetails = new SubscriptionTriggeringJobDetails(); jobDetails.setJobId(UUID.randomUUID().toString()); - jobDetails.setRemainingResourceIds(resourceIds.stream().map(UriParam::getValue).collect(Collectors.toList())); - jobDetails.setRemainingSearchUrls(searchUrls.stream().map(StringParam::getValue).collect(Collectors.toList())); + jobDetails.setRemainingResourceIds(resourceIds.stream().map(t->t.getValue()).collect(Collectors.toList())); + jobDetails.setRemainingSearchUrls(searchUrls.stream().map(t->t.getValue()).collect(Collectors.toList())); if (theSubscriptionId != null) { jobDetails.setSubscriptionId(theSubscriptionId.toUnqualifiedVersionless().getValue()); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcImpl.java index e078100e212..7d728e733b0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcImpl.java @@ -36,6 +36,7 @@ import org.hl7.fhir.r4.model.Enumerations; import org.hl7.fhir.r4.model.ValueSet; import org.springframework.beans.factory.annotation.Autowired; +import javax.annotation.Nonnull; import javax.validation.constraints.NotNull; import java.io.*; import java.util.*; @@ -81,12 +82,10 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc { private static final int LOG_INCREMENT = 1000; private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcImpl.class); - - @Autowired - private IHapiTerminologySvc myTermSvc; - // FYI: Hardcoded to R4 because that's what the term svc uses internally private final FhirContext myCtx = FhirContext.forR4(); + @Autowired + private IHapiTerminologySvc myTermSvc; private void dropCircularRefs(TermConcept theConcept, ArrayList theChain, Map theCode2concept, Counter theCircularCounter) { @@ -121,69 +120,6 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc { } - private void iterateOverZipFile(LoadedFileDescriptors theDescriptors, String theFileNamePart, IRecordHandler theHandler, char theDelimiter, QuoteMode theQuoteMode, boolean theIsPartialFilename) { - - boolean foundMatch = false; - for (FileDescriptor nextZipBytes : theDescriptors.getUncompressedFileDescriptors()) { - String nextFilename = nextZipBytes.getFilename(); - boolean matches; - if (theIsPartialFilename) { - matches = nextFilename.contains(theFileNamePart); - } else { - matches = nextFilename.endsWith("/" + theFileNamePart) || nextFilename.equals(theFileNamePart); - } - - if (matches) { - ourLog.info("Processing file {}", nextFilename); - foundMatch = true; - - Reader reader; - CSVParser parsed; - try { - reader = new InputStreamReader(nextZipBytes.getInputStream(), Charsets.UTF_8); - - if (ourLog.isTraceEnabled()) { - String contents = IOUtils.toString(reader); - ourLog.info("File contents for: {}\n{}", nextFilename, contents); - reader = new StringReader(contents); - } - - CSVFormat format = CSVFormat.newFormat(theDelimiter).withFirstRecordAsHeader(); - if (theQuoteMode != null) { - format = format.withQuote('"').withQuoteMode(theQuoteMode); - } - parsed = new CSVParser(reader, format); - Iterator iter = parsed.iterator(); - ourLog.debug("Header map: {}", parsed.getHeaderMap()); - - int count = 0; - int nextLoggedCount = 0; - while (iter.hasNext()) { - CSVRecord nextRecord = iter.next(); - if (nextRecord.isConsistent() == false) { - continue; - } - theHandler.accept(nextRecord); - count++; - if (count >= nextLoggedCount) { - ourLog.info(" * Processed {} records in {}", count, nextFilename); - nextLoggedCount += LOG_INCREMENT; - } - } - - } catch (IOException e) { - throw new InternalErrorException(e); - } - } - - } - - if (!foundMatch) { - throw new InvalidRequestException("Did not find file matching " + theFileNamePart); - } - - } - @Override public UploadStatistics loadImgthla(List theFiles, RequestDetails theRequestDetails) { LoadedFileDescriptors descriptors = null; @@ -280,7 +216,6 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc { @Override public UploadStatistics loadCustom(String theSystem, List theFiles, RequestDetails theRequestDetails) { try (LoadedFileDescriptors descriptors = new LoadedFileDescriptors(theFiles)) { - final Map code2concept = new HashMap<>(); IRecordHandler handler; Optional codeSystemContent = loadFile(descriptors, CUSTOM_CODESYSTEM_JSON, CUSTOM_CODESYSTEM_XML); @@ -299,23 +234,7 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc { } TermCodeSystemVersion csv = new TermCodeSystemVersion(); - - // Concept File - handler = new ConceptHandler(code2concept, csv); - iterateOverZipFile(descriptors, CUSTOM_CONCEPTS_FILE, handler, ',', QuoteMode.NON_NUMERIC, false); - - // Hierarchy - if (descriptors.hasFile(CUSTOM_HIERARCHY_FILE)) { - handler = new HierarchyHandler(code2concept); - iterateOverZipFile(descriptors, CUSTOM_HIERARCHY_FILE, handler, ',', QuoteMode.NON_NUMERIC, false); - } - - // Add root concepts to CodeSystemVersion - for (TermConcept nextConcept : code2concept.values()) { - if (nextConcept.getParents().isEmpty()) { - csv.getConcepts().add(nextConcept); - } - } + final Map code2concept = processCustomTerminologyFiles(descriptors, csv); IIdType target = storeCodeSystem(theRequestDetails, csv, codeSystem, null, null); return new UploadStatistics(code2concept.size(), target); @@ -383,12 +302,6 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc { try { reader = new InputStreamReader(nextZipBytes.getInputStream(), Charsets.UTF_8); - if (ourLog.isTraceEnabled()) { - String contents = IOUtils.toString(reader); - ourLog.info("File contents for: {}\n{}", nextFilename, contents); - reader = new StringReader(contents); - } - LineNumberReader lnr = new LineNumberReader(reader); while (lnr.readLine() != null) { } @@ -414,12 +327,6 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc { try { reader = new InputStreamReader(nextZipBytes.getInputStream(), Charsets.UTF_8); - if (ourLog.isTraceEnabled()) { - String contents = IOUtils.toString(reader); - ourLog.info("File contents for: {}\n{}", nextFilename, contents); - reader = new StringReader(contents); - } - LineNumberReader lnr = new LineNumberReader(reader); while (lnr.readLine() != null) { } @@ -666,12 +573,12 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc { return retVal; } - static class LoadedFileDescriptors implements Closeable { + public static class LoadedFileDescriptors implements Closeable { private List myTemporaryFiles = new ArrayList<>(); private List myUncompressedFileDescriptors = new ArrayList<>(); - LoadedFileDescriptors(List theFileDescriptors) { + public LoadedFileDescriptors(List theFileDescriptors) { try { for (FileDescriptor next : theFileDescriptors) { if (next.getFilename().toLowerCase().endsWith(".zip")) { @@ -765,6 +672,92 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc { } + @Nonnull + public static Map processCustomTerminologyFiles(LoadedFileDescriptors theDescriptors, TermCodeSystemVersion theCsv) { + IRecordHandler handler;// Concept File + final Map code2concept = new HashMap<>(); + handler = new ConceptHandler(code2concept, theCsv); + iterateOverZipFile(theDescriptors, CUSTOM_CONCEPTS_FILE, handler, ',', QuoteMode.NON_NUMERIC, false); + + // Hierarchy + if (theDescriptors.hasFile(CUSTOM_HIERARCHY_FILE)) { + handler = new HierarchyHandler(code2concept); + iterateOverZipFile(theDescriptors, CUSTOM_HIERARCHY_FILE, handler, ',', QuoteMode.NON_NUMERIC, false); + } + + // Add root concepts to CodeSystemVersion + for (TermConcept nextConcept : code2concept.values()) { + if (nextConcept.getParents().isEmpty()) { + theCsv.getConcepts().add(nextConcept); + } + } + return code2concept; + } + + private static void iterateOverZipFile(LoadedFileDescriptors theDescriptors, String theFileNamePart, IRecordHandler theHandler, char theDelimiter, QuoteMode theQuoteMode, boolean theIsPartialFilename) { + + boolean foundMatch = false; + for (FileDescriptor nextZipBytes : theDescriptors.getUncompressedFileDescriptors()) { + String nextFilename = nextZipBytes.getFilename(); + boolean matches; + if (theIsPartialFilename) { + matches = nextFilename.contains(theFileNamePart); + } else { + matches = nextFilename.endsWith("/" + theFileNamePart) || nextFilename.equals(theFileNamePart); + } + + if (matches) { + ourLog.info("Processing file {}", nextFilename); + foundMatch = true; + + Reader reader; + CSVParser parsed; + try { + reader = new InputStreamReader(nextZipBytes.getInputStream(), Charsets.UTF_8); + + parsed = newCsvRecords(theDelimiter, theQuoteMode, reader); + Iterator iter = parsed.iterator(); + ourLog.debug("Header map: {}", parsed.getHeaderMap()); + + int count = 0; + int nextLoggedCount = 0; + while (iter.hasNext()) { + CSVRecord nextRecord = iter.next(); + if (nextRecord.isConsistent() == false) { + continue; + } + theHandler.accept(nextRecord); + count++; + if (count >= nextLoggedCount) { + ourLog.info(" * Processed {} records in {}", count, nextFilename); + nextLoggedCount += LOG_INCREMENT; + } + } + + } catch (IOException e) { + throw new InternalErrorException(e); + } + } + + } + + if (!foundMatch) { + throw new InvalidRequestException("Did not find file matching " + theFileNamePart); + } + + } + + @Nonnull + public static CSVParser newCsvRecords(char theDelimiter, QuoteMode theQuoteMode, Reader theReader) throws IOException { + CSVParser parsed; + CSVFormat format = CSVFormat.newFormat(theDelimiter).withFirstRecordAsHeader(); + if (theQuoteMode != null) { + format = format.withQuote('"').withQuoteMode(theQuoteMode); + } + parsed = new CSVParser(theReader, format); + return parsed; + } + public static String firstNonBlank(String... theStrings) { String retVal = ""; for (String nextString : theStrings) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/custom/ConceptHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/custom/ConceptHandler.java index 81ef1d0e39a..ab51fedcb21 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/custom/ConceptHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/custom/ConceptHandler.java @@ -37,6 +37,8 @@ import static org.apache.commons.lang3.StringUtils.trim; public class ConceptHandler implements IRecordHandler { private static final Logger ourLog = LoggerFactory.getLogger(ConceptHandler.class); + public static final String CODE = "CODE"; + public static final String DISPLAY = "DISPLAY"; private final Map myCode2Concept; private final TermCodeSystemVersion myCodeSystemVersion; @@ -47,9 +49,9 @@ public class ConceptHandler implements IRecordHandler { @Override public void accept(CSVRecord theRecord) { - String code = trim(theRecord.get("CODE")); + String code = trim(theRecord.get(CODE)); if (isNotBlank(code)) { - String display = trim(theRecord.get("DISPLAY")); + String display = trim(theRecord.get(DISPLAY)); Validate.isTrue(!myCode2Concept.containsKey(code), "The code %s has appeared more than once", code); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchOptimizedTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchOptimizedTest.java index 1a9fb90065b..896a8c07def 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchOptimizedTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchOptimizedTest.java @@ -493,6 +493,12 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test { * 20 should be prefetched since that's the initial page size */ + await().until(()->{ + return runInTransaction(()->{ + Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException("")); + return search.getNumFound() >= 50; + }); + }); runInTransaction(() -> { Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException("")); assertEquals(50, search.getNumFound()); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4CodeSystemTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4CodeSystemTest.java index db84dfa5787..05c44c1e46f 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4CodeSystemTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4CodeSystemTest.java @@ -47,93 +47,6 @@ public class ResourceProviderR4CodeSystemTest extends BaseResourceProviderR4Test } - @Test - public void testApplyDeltaAdd() { - - CodeSystem delta = new CodeSystem(); - delta.setUrl("http://example.com/labCodes"); - delta.setName("Example Hospital Lab Codes"); - delta.setStatus(Enumerations.PublicationStatus.ACTIVE); - delta.setContent(CodeSystem.CodeSystemContentMode.NOTPRESENT); - delta.setUrl("http://foo"); - CodeSystem.ConceptDefinitionComponent chem = delta - .addConcept() - .setCode("CHEM") - .setDisplay("Chemistry Tests"); - chem - .addConcept() - .setCode("HB") - .setDisplay("Hemoglobin"); - chem - .addConcept() - .setCode("NEUT") - .setDisplay("Neutrophil"); - CodeSystem.ConceptDefinitionComponent micro = delta - .addConcept() - .setCode("MICRO") - .setDisplay("Microbiology Tests"); - micro - .addConcept() - .setCode("C&S") - .setDisplay("Culture & Sensitivity"); - - LoggingInterceptor interceptor = new LoggingInterceptor(true); - ourClient.registerInterceptor(interceptor); - Parameters outcome = ourClient - .operation() - .onType(CodeSystem.class) - .named(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD) - .withParameter(Parameters.class, TerminologyUploaderProvider.VALUE, delta) - .prettyPrint() - .execute(); - ourClient.unregisterInterceptor(interceptor); - - String encoded = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome); - ourLog.info(encoded); - assertThat(encoded, containsString("\"valueInteger\": 5")); - } - - @Test - public void testApplyDeltaRemove() { - // Create not-present - CodeSystem cs = new CodeSystem(); - cs.setUrl("http://foo"); - cs.setContent(CodeSystem.CodeSystemContentMode.NOTPRESENT); - ourClient.create().resource(cs).execute(); - - CodeSystem delta = new CodeSystem(); - delta.setUrl("http://foo"); - delta - .addConcept() - .setCode("codeA") - .setDisplay("displayA"); - - // Add - ourClient - .operation() - .onType(CodeSystem.class) - .named(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD) - .withParameter(Parameters.class, TerminologyUploaderProvider.VALUE, delta) - .prettyPrint() - .execute(); - - // Remove - LoggingInterceptor interceptor = new LoggingInterceptor(true); - ourClient.registerInterceptor(interceptor); - Parameters outcome = ourClient - .operation() - .onType(CodeSystem.class) - .named(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE) - .withParameter(Parameters.class, TerminologyUploaderProvider.VALUE, delta) - .prettyPrint() - .execute(); - ourClient.unregisterInterceptor(interceptor); - - String encoded = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome); - ourLog.info(encoded); - assertThat(encoded, containsString("\"valueInteger\": 1")); - } - @Test public void testLookupOnExternalCode() { ResourceProviderR4ValueSetTest.createExternalCs(myCodeSystemDao, myResourceTableDao, myTermSvc, mySrd); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/TerminologyUploaderProviderR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/TerminologyUploaderProviderR4Test.java index e97cd9866a5..350fffae75f 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/TerminologyUploaderProviderR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/TerminologyUploaderProviderR4Test.java @@ -1,7 +1,10 @@ package ca.uhn.fhir.jpa.provider.r4; +import ca.uhn.fhir.jpa.model.util.JpaConstants; +import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider; import ca.uhn.fhir.jpa.provider.dstu3.TerminologyUploaderProviderDstu3Test; import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc; +import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.util.TestUtil; import org.apache.commons.io.IOUtils; @@ -18,6 +21,7 @@ import java.util.List; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; +import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.matchesPattern; import static org.junit.Assert.*; @@ -45,7 +49,6 @@ public class TerminologyUploaderProviderR4Test extends BaseResourceProviderR4Tes public void testUploadInvalidUrl() throws Exception { byte[] packageBytes = createSctZip(); - //@formatter:off try { ourClient .operation() @@ -58,7 +61,6 @@ public class TerminologyUploaderProviderR4Test extends BaseResourceProviderR4Tes } catch (InvalidRequestException e) { assertEquals("HTTP 400 Bad Request: Unknown URL: http://snomed.info/sctFOO", e.getMessage()); } - //@formatter:on } @Test @@ -172,6 +174,93 @@ public class TerminologyUploaderProviderR4Test extends BaseResourceProviderR4Tes assertThat(((IntegerType) respParam.getParameter().get(1).getValue()).getValue(), greaterThan(1)); } + @Test + public void testApplyDeltaAdd() { + + CodeSystem delta = new CodeSystem(); + delta.setUrl("http://example.com/labCodes"); + delta.setName("Example Hospital Lab Codes"); + delta.setStatus(Enumerations.PublicationStatus.ACTIVE); + delta.setContent(CodeSystem.CodeSystemContentMode.NOTPRESENT); + delta.setUrl("http://foo"); + CodeSystem.ConceptDefinitionComponent chem = delta + .addConcept() + .setCode("CHEM") + .setDisplay("Chemistry Tests"); + chem + .addConcept() + .setCode("HB") + .setDisplay("Hemoglobin"); + chem + .addConcept() + .setCode("NEUT") + .setDisplay("Neutrophil"); + CodeSystem.ConceptDefinitionComponent micro = delta + .addConcept() + .setCode("MICRO") + .setDisplay("Microbiology Tests"); + micro + .addConcept() + .setCode("C&S") + .setDisplay("Culture & Sensitivity"); + + LoggingInterceptor interceptor = new LoggingInterceptor(true); + ourClient.registerInterceptor(interceptor); + Parameters outcome = ourClient + .operation() + .onType(CodeSystem.class) + .named(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD) + .withParameter(Parameters.class, TerminologyUploaderProvider.VALUE, delta) + .prettyPrint() + .execute(); + ourClient.unregisterInterceptor(interceptor); + + String encoded = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome); + ourLog.info(encoded); + assertThat(encoded, containsString("\"valueInteger\": 5")); + } + + @Test + public void testApplyDeltaRemove() { + // Create not-present + CodeSystem cs = new CodeSystem(); + cs.setUrl("http://foo"); + cs.setContent(CodeSystem.CodeSystemContentMode.NOTPRESENT); + ourClient.create().resource(cs).execute(); + + CodeSystem delta = new CodeSystem(); + delta.setUrl("http://foo"); + delta + .addConcept() + .setCode("codeA") + .setDisplay("displayA"); + + // Add + ourClient + .operation() + .onType(CodeSystem.class) + .named(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD) + .withParameter(Parameters.class, TerminologyUploaderProvider.VALUE, delta) + .prettyPrint() + .execute(); + + // Remove + LoggingInterceptor interceptor = new LoggingInterceptor(true); + ourClient.registerInterceptor(interceptor); + Parameters outcome = ourClient + .operation() + .onType(CodeSystem.class) + .named(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE) + .withParameter(Parameters.class, TerminologyUploaderProvider.VALUE, delta) + .prettyPrint() + .execute(); + ourClient.unregisterInterceptor(interceptor); + + String encoded = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome); + ourLog.info(encoded); + assertThat(encoded, containsString("\"valueInteger\": 1")); + } + @AfterClass public static void afterClassClearContext() { TestUtil.clearAllStaticFieldsForUnitTest(); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/resthook/SubscriptionTriggeringDstu3Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/resthook/SubscriptionTriggeringDstu3Test.java index a0c1684a7b4..1cfc1e5896f 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/resthook/SubscriptionTriggeringDstu3Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/resthook/SubscriptionTriggeringDstu3Test.java @@ -228,8 +228,6 @@ public class SubscriptionTriggeringDstu3Test extends BaseResourceProviderDstu3Te responseValue = response.getParameter().get(0).getValue().primitiveValue(); assertThat(responseValue, containsString("Subscription triggering job submitted as JOB ID")); -// Thread.sleep(1000000000); - waitForSize(51, ourUpdatedObservations); waitForSize(0, ourCreatedObservations); waitForSize(0, ourCreatedPatients); @@ -237,6 +235,72 @@ public class SubscriptionTriggeringDstu3Test extends BaseResourceProviderDstu3Te } + @Test + public void testTriggerUsingOrSeparatedList_MultipleStrings() throws Exception { + myDaoConfig.setSearchPreFetchThresholds(Lists.newArrayList(13, 22, 100)); + + String payload = "application/fhir+json"; + IdType sub2id = createSubscription("Patient?", payload, ourListenerServerBase).getIdElement(); + + // Create lots + for (int i = 0; i < 10; i++) { + Patient p = new Patient(); + p.setId("P"+i); + p.addName().setFamily("P" + i); + ourClient.update().resource(p).execute(); + } + waitForSize(10, ourUpdatedPatients); + + // Use multiple strings + beforeReset(); + Parameters response = ourClient + .operation() + .onInstance(sub2id) + .named(JpaConstants.OPERATION_TRIGGER_SUBSCRIPTION) + .withParameter(Parameters.class, SubscriptionTriggeringProvider.SEARCH_URL, new StringType("Patient?_id=P0")) + .andParameter(SubscriptionTriggeringProvider.SEARCH_URL, new StringType("Patient?_id=P1")) + .andParameter(SubscriptionTriggeringProvider.SEARCH_URL, new StringType("Patient?_id=P2")) + .execute(); + String responseValue = response.getParameter().get(0).getValue().primitiveValue(); + assertThat(responseValue, containsString("Subscription triggering job submitted as JOB ID")); + + waitForSize(0, ourCreatedPatients); + waitForSize(3, ourUpdatedPatients); + + } + + @Test + public void testTriggerUsingOrSeparatedList_SingleString() throws Exception { + myDaoConfig.setSearchPreFetchThresholds(Lists.newArrayList(13, 22, 100)); + + String payload = "application/fhir+json"; + IdType sub2id = createSubscription("Patient?", payload, ourListenerServerBase).getIdElement(); + + // Create lots + for (int i = 0; i < 10; i++) { + Patient p = new Patient(); + p.setId("P"+i); + p.addName().setFamily("P" + i); + ourClient.update().resource(p).execute(); + } + waitForSize(10, ourUpdatedPatients); + + // Use a single + beforeReset(); + Parameters response = ourClient + .operation() + .onInstance(sub2id) + .named(JpaConstants.OPERATION_TRIGGER_SUBSCRIPTION) + .withParameter(Parameters.class, SubscriptionTriggeringProvider.SEARCH_URL, new StringType("Patient?_id=P0,P1,P2")) + .execute(); + String responseValue = response.getParameter().get(0).getValue().primitiveValue(); + assertThat(responseValue, containsString("Subscription triggering job submitted as JOB ID")); + + waitForSize(0, ourCreatedPatients); + waitForSize(3, ourUpdatedPatients); + + } + @Test public void testTriggerUsingSearchesWithCount() throws Exception { String payload = "application/fhir+json"; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcIntegrationDstu3Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcIntegrationDstu3Test.java index 92a09cf73e5..f3c0bcb8e1d 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcIntegrationDstu3Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcIntegrationDstu3Test.java @@ -6,7 +6,6 @@ import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet; import ca.uhn.fhir.jpa.dao.dstu3.BaseJpaDstu3Test; import ca.uhn.fhir.util.TestUtil; import com.google.common.collect.Lists; -import org.hl7.fhir.convertors.VersionConvertor_30_40; import org.hl7.fhir.dstu3.model.*; import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.junit.After; @@ -17,13 +16,14 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; +import java.io.IOException; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.empty; +import static org.awaitility.Awaitility.await; +import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; public class TerminologyLoaderSvcIntegrationDstu3Test extends BaseJpaDstu3Test { @@ -106,6 +106,19 @@ public class TerminologyLoaderSvcIntegrationDstu3Test extends BaseJpaDstu3Test { assertThat(codes, empty()); } + @Test + public void testStoreAndProcessDeferred() throws IOException { + ZipCollectionBuilder files = new ZipCollectionBuilder(); + TerminologyLoaderSvcLoincTest.addLoincMandatoryFilesToZip(files); + myLoader.loadLoinc(files.getFiles(), mySrd); + + myTermSvc.saveDeferred(); + + runInTransaction(() -> { + await().until(() -> myTermConceptMapDao.count(), greaterThan(0L)); + }); + } + @Test public void testExpandWithPropertyString() throws Exception { ZipCollectionBuilder files = new ZipCollectionBuilder(); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java index e4a99c064fa..32c412f5a0d 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java @@ -36,6 +36,7 @@ import java.util.Optional; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; +import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.empty; import static org.junit.Assert.*; import static org.mockito.ArgumentMatchers.*; @@ -1478,6 +1479,7 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { assertEquals("8450-9", concept.getCode()); assertEquals("Systolic blood pressure--expiration", concept.getDisplay()); assertEquals(2, concept.getDesignations().size()); + assertThat(concept.toString(), containsString("8450")); List designations = Lists.newArrayList(concept.getDesignations().iterator()); diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java index 15719dbf226..8c5b13a4e26 100644 --- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java +++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java @@ -86,7 +86,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { bulkExportCollection.addColumn("PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG); bulkExportCollection.addColumn("JOB_PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG); bulkExportCollection.addForeignKey("FK_BLKEXCOL_JOB").toColumn("JOB_PID").references("HFJ_BLK_EXPORT_JOB", "PID"); - bulkExportCollection.addColumn("RES_TYPE").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 35); + bulkExportCollection.addColumn("RES_TYPE").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40); bulkExportCollection.addColumn("TYPE_FILTER").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 1000); bulkExportCollection.addColumn("OPTLOCK").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.INT); diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/util/FhirTerserR4Test.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/util/FhirTerserR4Test.java index 799593acc1b..3ea09f1573d 100644 --- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/util/FhirTerserR4Test.java +++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/util/FhirTerserR4Test.java @@ -1022,6 +1022,7 @@ public class FhirTerserR4Test { assertThat(strings, Matchers.contains("http://foo")); } + @Test public void testVisitWithModelVisitor2() { IModelVisitor2 visitor = mock(IModelVisitor2.class); @@ -1048,6 +1049,97 @@ public class FhirTerserR4Test { } + @Test + public void testGetAllPopulatedChildElementsOfType() { + + Patient p = new Patient(); + p.setGender(Enumerations.AdministrativeGender.MALE); + p.addIdentifier().setSystem("urn:foo"); + p.addAddress().addLine("Line1"); + p.addAddress().addLine("Line2"); + p.addName().setFamily("Line3"); + + FhirTerser t = ourCtx.newTerser(); + List strings = t.getAllPopulatedChildElementsOfType(p, StringType.class); + + assertEquals(3, strings.size()); + + Set allStrings = new HashSet<>(); + for (StringType next : strings) { + allStrings.add(next.getValue()); + } + + assertThat(allStrings, containsInAnyOrder("Line1", "Line2", "Line3")); + + } + + @Test + public void testMultiValueTypes() { + + Observation obs = new Observation(); + obs.setValue(new Quantity(123L)); + + FhirTerser t = ourCtx.newTerser(); + + // As string + { + List values = t.getValues(obs, "Observation.valueString"); + assertEquals(0, values.size()); + } + + // As quantity + { + List values = t.getValues(obs, "Observation.valueQuantity"); + assertEquals(1, values.size()); + Quantity actual = (Quantity) values.get(0); + assertEquals("123", actual.getValueElement().getValueAsString()); + } + } + + @Test + public void testTerser() { + + //@formatter:off + String msg = "\n" + + " \n" + + " \n" + + "
\n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + ""; + //@formatter:on + + Observation parsed = ourCtx.newXmlParser().parseResource(Observation.class, msg); + FhirTerser t = ourCtx.newTerser(); + + List elems = t.getAllPopulatedChildElementsOfType(parsed, Reference.class); + assertEquals(2, elems.size()); + assertEquals("cid:patient@bundle", elems.get(0).getReferenceElement().getValue()); + assertEquals("cid:device@bundle", elems.get(1).getReferenceElement().getValue()); + } + + private List toStrings(List theStrings) { ArrayList retVal = new ArrayList(); for (StringType next : theStrings) { diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/util/FhirTerserTest.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/util/FhirTerserTest.java deleted file mode 100644 index ce67f939ec6..00000000000 --- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/util/FhirTerserTest.java +++ /dev/null @@ -1,117 +0,0 @@ -package ca.uhn.fhir.util; - -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; - -import java.util.*; - -import org.hl7.fhir.r4.model.*; -import org.hl7.fhir.r4.model.Enumerations.AdministrativeGender; -import org.junit.AfterClass; -import org.junit.Test; - -import ca.uhn.fhir.context.FhirContext; - -public class FhirTerserTest { - - private static FhirContext ourCtx = FhirContext.forR4(); - - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirTerserTest.class); - - @Test - public void testGetAllPopulatedChildElementsOfType() { - - Patient p = new Patient(); - p.setGender(AdministrativeGender.MALE); - p.addIdentifier().setSystem("urn:foo"); - p.addAddress().addLine("Line1"); - p.addAddress().addLine("Line2"); - p.addName().setFamily("Line3"); - - FhirTerser t = ourCtx.newTerser(); - List strings = t.getAllPopulatedChildElementsOfType(p, StringType.class); - - assertEquals(3, strings.size()); - - Set allStrings = new HashSet<>(); - for (StringType next : strings) { - allStrings.add(next.getValue()); - } - - assertThat(allStrings, containsInAnyOrder("Line1", "Line2", "Line3")); - - } - - @Test - public void testMultiValueTypes() { - - Observation obs = new Observation(); - obs.setValue(new Quantity(123L)); - - FhirTerser t = ourCtx.newTerser(); - - // As string - { - List values = t.getValues(obs, "Observation.valueString"); - assertEquals(0, values.size()); - } - - // As quantity - { - List values = t.getValues(obs, "Observation.valueQuantity"); - assertEquals(1, values.size()); - Quantity actual = (Quantity) values.get(0); - assertEquals("123", actual.getValueElement().getValueAsString()); - } - } - - @Test - public void testTerser() { - - //@formatter:off - String msg = "\n" + - " \n" + - " \n" + - "
\n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - ""; - //@formatter:on - - Observation parsed = ourCtx.newXmlParser().parseResource(Observation.class, msg); - FhirTerser t = ourCtx.newTerser(); - - List elems = t.getAllPopulatedChildElementsOfType(parsed, Reference.class); - assertEquals(2, elems.size()); - assertEquals("cid:patient@bundle", elems.get(0).getReferenceElement().getValue()); - assertEquals("cid:device@bundle", elems.get(1).getReferenceElement().getValue()); - } - - @AfterClass - public static void afterClassClearContext() { - TestUtil.clearAllStaticFieldsForUnitTest(); - } - -} diff --git a/src/changes/changes.xml b/src/changes/changes.xml index 006580c024d..a7fcb2a5096 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -304,7 +304,7 @@ A number of overridden methods in the HAPI FHIR codebase did not have the @Override annotation. Thanks to Clayton Bodendein for cleaning this up! - + Plain server resource providers were not correctly matching methods that had the _id search parameter if a client performed a request using a modifier such as :not or :exact. Thanks to Petro Mykhailyshyn @@ -315,6 +315,13 @@ that was too short to hold the longest name from the final R4 definitions. This has been corrected to account for names up to 40 characters long. + + A new command has been added to the HAPI FHIR CLI that allows external (not-present) codesystem deltas to be manually uploaded + + + The subscription triggering operation was not able to handle commas within search URLs being + used to trigger resources for subscription checking. This has been corrected. +