Merge branch 'master' of github.com:jamesagnew/hapi-fhir
This commit is contained in:
commit
b4de3aa5dd
|
@ -195,7 +195,7 @@ public abstract class BaseRuntimeElementDefinition<T extends IBase> {
|
|||
public abstract ChildTypeEnum getChildType();
|
||||
|
||||
public enum ChildTypeEnum {
|
||||
COMPOSITE_DATATYPE, PRIMITIVE_DATATYPE, RESOURCE, RESOURCE_REF, RESOURCE_BLOCK,
|
||||
COMPOSITE_DATATYPE, PRIMITIVE_DATATYPE, RESOURCE, RESOURCE_BLOCK,
|
||||
/**
|
||||
* HAPI style.
|
||||
*/
|
||||
|
|
|
@ -1,45 +0,0 @@
|
|||
package ca.uhn.fhir.context;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR - Core Library
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2016 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseReference;
|
||||
|
||||
public class RuntimeResourceReferenceDefinition extends BaseRuntimeElementCompositeDefinition<IBaseReference> {
|
||||
|
||||
|
||||
public RuntimeResourceReferenceDefinition(String theName, Class<? extends IBaseReference> theImplementingClass, boolean theStandardType) {
|
||||
super(theName, theImplementingClass, theStandardType);
|
||||
}
|
||||
|
||||
@Override
|
||||
void sealAndInitialize(FhirContext theContext, Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> theClassToElementDefinitions) {
|
||||
super.sealAndInitialize(theContext, theClassToElementDefinitions);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ca.uhn.fhir.context.BaseRuntimeElementDefinition.ChildTypeEnum getChildType() {
|
||||
return ChildTypeEnum.RESOURCE_REF;
|
||||
}
|
||||
|
||||
}
|
|
@ -471,7 +471,6 @@ public class JsonParser extends BaseParser implements IParser {
|
|||
}
|
||||
break;
|
||||
}
|
||||
case RESOURCE_REF:
|
||||
case RESOURCE_BLOCK:
|
||||
case COMPOSITE_DATATYPE: {
|
||||
if (theChildName != null) {
|
||||
|
|
|
@ -91,6 +91,7 @@ import ca.uhn.fhir.rest.server.Constants;
|
|||
import ca.uhn.fhir.util.BundleUtil;
|
||||
import ca.uhn.fhir.util.FhirTerser;
|
||||
import ca.uhn.fhir.util.IModelVisitor;
|
||||
import ca.uhn.fhir.util.ReflectionUtil;
|
||||
|
||||
class ParserState<T> {
|
||||
|
||||
|
@ -128,16 +129,6 @@ class ParserState<T> {
|
|||
}
|
||||
}
|
||||
|
||||
private BaseState createResourceReferenceState(ParserState<T>.PreResourceState thePreResourceState, IBase newChildInstance) {
|
||||
BaseState newState;
|
||||
if (newChildInstance instanceof IBaseReference) {
|
||||
newState = new ResourceReferenceStateHl7Org(thePreResourceState, (IBaseReference) newChildInstance);
|
||||
} else {
|
||||
newState = new ResourceReferenceStateHapi(thePreResourceState, (BaseResourceReferenceDt) newChildInstance);
|
||||
}
|
||||
return newState;
|
||||
}
|
||||
|
||||
public void endingElement() throws DataFormatException {
|
||||
myState.endingElement();
|
||||
}
|
||||
|
@ -159,31 +150,7 @@ class ParserState<T> {
|
|||
}
|
||||
|
||||
private Object newContainedDt(IResource theTarget) {
|
||||
|
||||
Object newChildInstance;
|
||||
try {
|
||||
newChildInstance = theTarget.getStructureFhirVersionEnum().getVersionImplementation().getContainedType().newInstance();
|
||||
} catch (InstantiationException e) {
|
||||
throw new ConfigurationException("Failed to instantiate " + myContext.getVersion().getResourceReferenceType(), e);
|
||||
} catch (IllegalAccessException e) {
|
||||
throw new ConfigurationException("Failed to instantiate " + myContext.getVersion().getResourceReferenceType(), e);
|
||||
}
|
||||
return newChildInstance;
|
||||
}
|
||||
|
||||
private IBase newResourceReferenceDt(IBaseResource theTarget) {
|
||||
|
||||
IBase newChildInstance;
|
||||
try {
|
||||
IFhirVersion version;
|
||||
version = theTarget.getStructureFhirVersionEnum().getVersionImplementation();
|
||||
newChildInstance = version.getResourceReferenceType().newInstance();
|
||||
} catch (InstantiationException e) {
|
||||
throw new ConfigurationException("Failed to instantiate " + myContext.getVersion().getResourceReferenceType(), e);
|
||||
} catch (IllegalAccessException e) {
|
||||
throw new ConfigurationException("Failed to instantiate " + myContext.getVersion().getResourceReferenceType(), e);
|
||||
}
|
||||
return newChildInstance;
|
||||
return ReflectionUtil.newInstance(theTarget.getStructureFhirVersionEnum().getVersionImplementation().getContainedType());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
|
@ -1539,13 +1506,6 @@ class ParserState<T> {
|
|||
push(newState);
|
||||
return;
|
||||
}
|
||||
case RESOURCE_REF: {
|
||||
IBase newChildInstance = newResourceReferenceDt(myPreResourceState.myInstance);
|
||||
myDefinition.getMutator().addValue(myParentInstance, newChildInstance);
|
||||
BaseState newState = createResourceReferenceState(getPreResourceState(), newChildInstance);
|
||||
push(newState);
|
||||
return;
|
||||
}
|
||||
case PRIMITIVE_XHTML:
|
||||
case RESOURCE:
|
||||
case RESOURCE_BLOCK:
|
||||
|
@ -1607,7 +1567,6 @@ class ParserState<T> {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public void endingElement() {
|
||||
pop();
|
||||
|
@ -1666,13 +1625,6 @@ class ParserState<T> {
|
|||
push(newState);
|
||||
return;
|
||||
}
|
||||
case RESOURCE_REF: {
|
||||
IBase newChildInstance = newResourceReferenceDt(getPreResourceState().myInstance);
|
||||
child.getMutator().addValue(myInstance, newChildInstance);
|
||||
BaseState newState = createResourceReferenceState(getPreResourceState(), newChildInstance);
|
||||
push(newState);
|
||||
return;
|
||||
}
|
||||
case RESOURCE_BLOCK: {
|
||||
RuntimeResourceBlockDefinition blockTarget = (RuntimeResourceBlockDefinition) target;
|
||||
IBase newBlockInstance = blockTarget.newInstance();
|
||||
|
@ -1846,18 +1798,6 @@ class ParserState<T> {
|
|||
push(newState);
|
||||
return;
|
||||
}
|
||||
case RESOURCE_REF: {
|
||||
ICompositeType newChildInstance = (ICompositeType) newResourceReferenceDt(getPreResourceState().myInstance);
|
||||
myExtension.setValue(newChildInstance);
|
||||
if (myContext.getVersion().getVersion().isRi()) {
|
||||
ParserState<T>.ResourceReferenceStateHl7Org newState = new ResourceReferenceStateHl7Org(getPreResourceState(), (IBaseReference) newChildInstance);
|
||||
push(newState);
|
||||
} else {
|
||||
ResourceReferenceStateHapi newState = new ResourceReferenceStateHapi(getPreResourceState(), (BaseResourceReferenceDt) newChildInstance);
|
||||
push(newState);
|
||||
}
|
||||
return;
|
||||
}
|
||||
case PRIMITIVE_XHTML:
|
||||
case RESOURCE:
|
||||
case RESOURCE_BLOCK:
|
||||
|
@ -2304,7 +2244,6 @@ class ParserState<T> {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public void wereBack() {
|
||||
super.wereBack();
|
||||
|
@ -2347,7 +2286,6 @@ class ParserState<T> {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public void wereBack() {
|
||||
super.wereBack();
|
||||
|
@ -2460,151 +2398,6 @@ class ParserState<T> {
|
|||
|
||||
}
|
||||
|
||||
private class ResourceReferenceStateHapi extends BaseState {
|
||||
|
||||
private BaseResourceReferenceDt myInstance;
|
||||
private ResourceReferenceSubState mySubState;
|
||||
|
||||
public ResourceReferenceStateHapi(PreResourceState thePreResourceState, BaseResourceReferenceDt theInstance) {
|
||||
super(thePreResourceState);
|
||||
myInstance = theInstance;
|
||||
mySubState = ResourceReferenceSubState.INITIAL;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void attributeValue(String theName, String theValue) throws DataFormatException {
|
||||
if (!"value".equals(theName)) {
|
||||
return;
|
||||
}
|
||||
|
||||
switch (mySubState) {
|
||||
case DISPLAY:
|
||||
myInstance.getDisplayElement().setValue(theValue);
|
||||
break;
|
||||
case INITIAL:
|
||||
throw new DataFormatException("Unexpected attribute: " + theValue);
|
||||
case REFERENCE:
|
||||
myInstance.getReference().setValue(theValue);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void endingElement() {
|
||||
switch (mySubState) {
|
||||
case INITIAL:
|
||||
pop();
|
||||
break;
|
||||
case DISPLAY:
|
||||
case REFERENCE:
|
||||
mySubState = ResourceReferenceSubState.INITIAL;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void enteringNewElement(String theNamespaceUri, String theLocalPart) throws DataFormatException {
|
||||
switch (mySubState) {
|
||||
case INITIAL:
|
||||
if ("display".equals(theLocalPart)) {
|
||||
mySubState = ResourceReferenceSubState.DISPLAY;
|
||||
break;
|
||||
} else if ("reference".equals(theLocalPart)) {
|
||||
mySubState = ResourceReferenceSubState.REFERENCE;
|
||||
break;
|
||||
} else if ("resource".equals(theLocalPart)) {
|
||||
mySubState = ResourceReferenceSubState.REFERENCE;
|
||||
break;
|
||||
}
|
||||
//$FALL-THROUGH$
|
||||
case DISPLAY:
|
||||
case REFERENCE:
|
||||
throw new DataFormatException("Unexpected element: " + theLocalPart);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IElement getCurrentElement() {
|
||||
return myInstance;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private class ResourceReferenceStateHl7Org extends BaseState {
|
||||
|
||||
private IBaseReference myInstance;
|
||||
private ResourceReferenceSubState mySubState;
|
||||
|
||||
public ResourceReferenceStateHl7Org(PreResourceState thePreResourceState, IBaseReference theInstance) {
|
||||
super(thePreResourceState);
|
||||
myInstance = theInstance;
|
||||
mySubState = ResourceReferenceSubState.INITIAL;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void attributeValue(String theName, String theValue) throws DataFormatException {
|
||||
if (!"value".equals(theName)) {
|
||||
return;
|
||||
}
|
||||
|
||||
switch (mySubState) {
|
||||
case DISPLAY:
|
||||
myInstance.setDisplay(theValue);
|
||||
break;
|
||||
case INITIAL:
|
||||
throw new DataFormatException("Unexpected attribute: " + theValue);
|
||||
case REFERENCE:
|
||||
myInstance.setReference(theValue);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void endingElement() {
|
||||
switch (mySubState) {
|
||||
case INITIAL:
|
||||
pop();
|
||||
break;
|
||||
case DISPLAY:
|
||||
case REFERENCE:
|
||||
mySubState = ResourceReferenceSubState.INITIAL;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void enteringNewElement(String theNamespaceUri, String theLocalPart) throws DataFormatException {
|
||||
switch (mySubState) {
|
||||
case INITIAL:
|
||||
if ("display".equals(theLocalPart)) {
|
||||
mySubState = ResourceReferenceSubState.DISPLAY;
|
||||
break;
|
||||
} else if ("reference".equals(theLocalPart)) {
|
||||
mySubState = ResourceReferenceSubState.REFERENCE;
|
||||
break;
|
||||
} else if ("resource".equals(theLocalPart)) {
|
||||
mySubState = ResourceReferenceSubState.REFERENCE;
|
||||
break;
|
||||
} else {
|
||||
logAndSwallowUnexpectedElement(theLocalPart);
|
||||
break;
|
||||
}
|
||||
case DISPLAY:
|
||||
case REFERENCE:
|
||||
logAndSwallowUnexpectedElement(theLocalPart);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IBaseReference getCurrentElement() {
|
||||
return myInstance;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private enum ResourceReferenceSubState {
|
||||
DISPLAY, INITIAL, REFERENCE
|
||||
}
|
||||
|
||||
private class ResourceStateHapi extends ElementCompositeState {
|
||||
|
||||
private IResource myInstance;
|
||||
|
|
|
@ -531,7 +531,6 @@ public class XmlParser extends BaseParser implements IParser {
|
|||
}
|
||||
break;
|
||||
}
|
||||
case RESOURCE_REF:
|
||||
case RESOURCE_BLOCK:
|
||||
case COMPOSITE_DATATYPE: {
|
||||
theEventWriter.writeStartElement(childName);
|
||||
|
|
|
@ -422,16 +422,6 @@ public class FhirTerser {
|
|||
case PRIMITIVE_DATATYPE:
|
||||
// These are primitive types, so we don't need to visit their children
|
||||
break;
|
||||
case RESOURCE_REF:
|
||||
IBaseReference resRefDt = (IBaseReference) theElement;
|
||||
if (resRefDt.getReferenceElement().getValue() == null && resRefDt.getResource() != null) {
|
||||
IBaseResource theResource = resRefDt.getResource();
|
||||
if (theResource.getIdElement() == null || theResource.getIdElement().isEmpty() || theResource.getIdElement().isLocal()) {
|
||||
BaseRuntimeElementCompositeDefinition<?> def = myContext.getResourceDefinition(theResource);
|
||||
visit(theResource, null, def, theCallback, theContainingElementPath, theChildDefinitionPath, theElementDefinitionPath);
|
||||
}
|
||||
}
|
||||
break;
|
||||
case RESOURCE:
|
||||
case RESOURCE_BLOCK:
|
||||
case COMPOSITE_DATATYPE: {
|
||||
|
@ -575,7 +565,6 @@ public class FhirTerser {
|
|||
case PRIMITIVE_DATATYPE:
|
||||
// These are primitive types
|
||||
break;
|
||||
case RESOURCE_REF:
|
||||
case RESOURCE:
|
||||
case RESOURCE_BLOCK:
|
||||
case COMPOSITE_DATATYPE: {
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
package ca.uhn.fhir.util;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import ca.uhn.fhir.context.ConfigurationException;
|
||||
|
||||
public class ReflectionUtilTest {
|
||||
|
||||
@Test
|
||||
public void testNewInstance() {
|
||||
assertEquals(ArrayList.class, ReflectionUtil.newInstance(ArrayList.class).getClass());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNewInstanceFail() {
|
||||
try {
|
||||
ReflectionUtil.newInstance(List.class);
|
||||
fail();
|
||||
} catch (ConfigurationException e) {
|
||||
assertEquals("Failed to instantiate java.util.List", e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -40,6 +40,7 @@ public class App {
|
|||
ourCommands.add(new ValidateCommand());
|
||||
ourCommands.add(new ValidationDataUploader());
|
||||
ourCommands.add(new WebsocketSubscribeCommand());
|
||||
ourCommands.add(new UploadTerminologyCommand());
|
||||
|
||||
Collections.sort(ourCommands);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,159 @@
|
|||
package ca.uhn.fhir.cli;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipInputStream;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.fusesource.jansi.Ansi;
|
||||
import org.hl7.fhir.dstu3.hapi.validation.DefaultProfileValidationSupport;
|
||||
import org.hl7.fhir.dstu3.hapi.validation.FhirInstanceValidator;
|
||||
import org.hl7.fhir.dstu3.model.Attachment;
|
||||
import org.hl7.fhir.dstu3.model.Bundle.BundleEntryComponent;
|
||||
import org.hl7.fhir.dstu3.model.Bundle.BundleType;
|
||||
import org.hl7.fhir.dstu3.model.Bundle.HTTPVerb;
|
||||
import org.hl7.fhir.dstu3.model.IdType;
|
||||
import org.hl7.fhir.dstu3.model.Parameters;
|
||||
import org.hl7.fhir.dstu3.model.Resource;
|
||||
import org.hl7.fhir.dstu3.model.StringType;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.omg.Dynamic.Parameter;
|
||||
|
||||
import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
|
||||
import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.model.api.IResource;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Bundle;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Bundle.Entry;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Bundle.EntryRequest;
|
||||
import ca.uhn.fhir.model.dstu2.valueset.HTTPVerbEnum;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.model.valueset.BundleTypeEnum;
|
||||
import ca.uhn.fhir.parser.DataFormatException;
|
||||
import ca.uhn.fhir.rest.client.IGenericClient;
|
||||
import ca.uhn.fhir.rest.client.apache.GZipContentInterceptor;
|
||||
import ca.uhn.fhir.rest.server.IVersionSpecificBundleFactory;
|
||||
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
|
||||
import ca.uhn.fhir.util.BundleUtil;
|
||||
import ca.uhn.fhir.util.ResourceReferenceInfo;
|
||||
import ca.uhn.fhir.validation.FhirValidator;
|
||||
import ca.uhn.fhir.validation.ValidationResult;
|
||||
|
||||
public class UploadTerminologyCommand extends BaseCommand {
|
||||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(UploadTerminologyCommand.class);
|
||||
|
||||
@Override
|
||||
public String getCommandDescription() {
|
||||
//@formatter:off
|
||||
return "Uploads a terminology package (e.g. a SNOMED CT ZIP file) to a HAPI JPA server. "
|
||||
+ "Note that this command uses a custom operation that is only implemented on HAPI "
|
||||
+ "JPA servers that have been configured to accept it.";
|
||||
//@formatter:on
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getCommandName() {
|
||||
return "upload-terminology";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options options = new Options();
|
||||
Option opt;
|
||||
|
||||
addFhirVersionOption(options);
|
||||
|
||||
opt = new Option("t", "target", true, "Base URL for the target server (e.g. \"http://example.com/fhir\")");
|
||||
opt.setRequired(true);
|
||||
options.addOption(opt);
|
||||
|
||||
opt = new Option("u", "url", true, "The code system URL associated with this upload (e.g. " + IHapiTerminologyLoaderSvc.SCT_URL + ")");
|
||||
opt.setRequired(false);
|
||||
options.addOption(opt);
|
||||
|
||||
opt = new Option("d", "data", true, "Local *.zip containing file to use to upload");
|
||||
opt.setRequired(false);
|
||||
options.addOption(opt);
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run(CommandLine theCommandLine) throws Exception {
|
||||
FhirContext ctx = getSpecVersionContext(theCommandLine);
|
||||
|
||||
String targetServer = theCommandLine.getOptionValue("t");
|
||||
if (isBlank(targetServer)) {
|
||||
throw new ParseException("No target server (-t) specified");
|
||||
} else if (targetServer.startsWith("http") == false && targetServer.startsWith("file") == false) {
|
||||
throw new ParseException("Invalid target server specified, must begin with 'http' or 'file'");
|
||||
}
|
||||
|
||||
String termUrl = theCommandLine.getOptionValue("u");
|
||||
if (isBlank(termUrl)) {
|
||||
throw new ParseException("No URL provided");
|
||||
}
|
||||
|
||||
String datafile = theCommandLine.getOptionValue("d");
|
||||
if (isBlank(datafile)) {
|
||||
throw new ParseException("No data file provided");
|
||||
}
|
||||
|
||||
IGenericClient client = super.newClient(ctx, targetServer);
|
||||
IBaseParameters inputParameters;
|
||||
if (ctx.getVersion().getVersion() == FhirVersionEnum.DSTU3) {
|
||||
Parameters p = new Parameters();
|
||||
p.addParameter().setName("url").setValue(new StringType(termUrl));
|
||||
p.addParameter().setName("localfile").setValue(new StringType(datafile));
|
||||
inputParameters = p;
|
||||
} else {
|
||||
throw new ParseException("This command does not support FHIR version " + ctx.getVersion().getVersion());
|
||||
}
|
||||
|
||||
ourLog.info("Beginning upload - This may take a while...");
|
||||
IBaseParameters response = client
|
||||
.operation()
|
||||
.onServer()
|
||||
.named("upload-external-code-system")
|
||||
.withParameters(inputParameters)
|
||||
.execute();
|
||||
|
||||
ourLog.info("Upload complete!");
|
||||
}
|
||||
|
||||
}
|
|
@ -210,6 +210,10 @@
|
|||
</overlays>
|
||||
<webXml>src/main/webapp/WEB-INF/web.xml</webXml>
|
||||
<attachClasses>true</attachClasses>
|
||||
<excludes>
|
||||
WEB-INF/lib/Saxon-HE-*,
|
||||
WEB-INF/lib/hapi-*
|
||||
</excludes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package ca.uhn.fhir.jpa.demo;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
|
@ -19,6 +20,7 @@ import ca.uhn.fhir.jpa.provider.JpaSystemProviderDstu1;
|
|||
import ca.uhn.fhir.jpa.provider.JpaSystemProviderDstu2;
|
||||
import ca.uhn.fhir.jpa.provider.dstu3.JpaConformanceProviderDstu3;
|
||||
import ca.uhn.fhir.jpa.provider.dstu3.JpaSystemProviderDstu3;
|
||||
import ca.uhn.fhir.jpa.provider.dstu3.TerminologyUploaderProviderDstu3;
|
||||
import ca.uhn.fhir.model.api.IResource;
|
||||
import ca.uhn.fhir.model.dstu2.composite.MetaDt;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Bundle;
|
||||
|
@ -74,13 +76,14 @@ public class JpaServerDemo extends RestfulServer {
|
|||
* The system provider implements non-resource-type methods, such as
|
||||
* transaction, and global history.
|
||||
*/
|
||||
Object systemProvider;
|
||||
List<Object> systemProvider = new ArrayList<Object>();
|
||||
if (fhirVersion == FhirVersionEnum.DSTU1) {
|
||||
systemProvider = myAppCtx.getBean("mySystemProviderDstu1", JpaSystemProviderDstu1.class);
|
||||
systemProvider.add(myAppCtx.getBean("mySystemProviderDstu1", JpaSystemProviderDstu1.class));
|
||||
} else if (fhirVersion == FhirVersionEnum.DSTU2) {
|
||||
systemProvider = myAppCtx.getBean("mySystemProviderDstu2", JpaSystemProviderDstu2.class);
|
||||
systemProvider.add(myAppCtx.getBean("mySystemProviderDstu2", JpaSystemProviderDstu2.class));
|
||||
} else if (fhirVersion == FhirVersionEnum.DSTU3) {
|
||||
systemProvider = myAppCtx.getBean("mySystemProviderDstu3", JpaSystemProviderDstu3.class);
|
||||
systemProvider.add(myAppCtx.getBean("mySystemProviderDstu3", JpaSystemProviderDstu3.class));
|
||||
systemProvider.add(myAppCtx.getBean(TerminologyUploaderProviderDstu3.class));
|
||||
} else {
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
|
|
|
@ -38,8 +38,11 @@ import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl;
|
|||
import ca.uhn.fhir.jpa.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.dao.dstu3.SearchParamExtractorDstu3;
|
||||
import ca.uhn.fhir.jpa.provider.dstu3.TerminologyUploaderProviderDstu3;
|
||||
import ca.uhn.fhir.jpa.term.HapiTerminologySvcDstu3;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvcDstu3;
|
||||
import ca.uhn.fhir.jpa.term.TerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.validation.JpaValidationSupportChainDstu3;
|
||||
import ca.uhn.fhir.validation.IValidatorModule;
|
||||
|
||||
|
@ -51,6 +54,11 @@ public class BaseDstu3Config extends BaseConfig {
|
|||
public IHapiTerminologySvcDstu3 terminologyService() {
|
||||
return new HapiTerminologySvcDstu3();
|
||||
}
|
||||
|
||||
@Bean(autowire = Autowire.BY_TYPE)
|
||||
public IHapiTerminologyLoaderSvc terminologyLoaderService() {
|
||||
return new TerminologyLoaderSvc();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public HapiWorkerContext workerContext() {
|
||||
|
@ -108,4 +116,12 @@ public class BaseDstu3Config extends BaseConfig {
|
|||
return new SearchParamExtractorDstu3();
|
||||
}
|
||||
|
||||
@Bean(autowire=Autowire.BY_TYPE)
|
||||
public TerminologyUploaderProviderDstu3 terminologyUploaderProvider() {
|
||||
TerminologyUploaderProviderDstu3 retVal = new TerminologyUploaderProviderDstu3();
|
||||
retVal.setContext(defaultFhirContext());
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -1,21 +1,34 @@
|
|||
package ca.uhn.fhir.jpa.provider.dstu3;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.io.output.LockableFileWriter;
|
||||
import org.hl7.fhir.dstu3.model.Attachment;
|
||||
import org.hl7.fhir.dstu3.model.IntegerType;
|
||||
import org.hl7.fhir.dstu3.model.Parameters;
|
||||
import org.hl7.fhir.dstu3.model.StringType;
|
||||
import org.hl7.fhir.dstu3.model.UriType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import ca.uhn.fhir.jpa.provider.BaseJpaProvider;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc.UploadStatistics;
|
||||
import ca.uhn.fhir.rest.annotation.Operation;
|
||||
import ca.uhn.fhir.rest.annotation.OperationParam;
|
||||
import ca.uhn.fhir.rest.method.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
|
||||
public class TerminologyUploaderProviderDstu3 extends BaseJpaProvider {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyUploaderProviderDstu3.class);
|
||||
|
||||
@Autowired
|
||||
private IHapiTerminologyLoaderSvc myTerminologyLoaderSvc;
|
||||
|
@ -27,24 +40,40 @@ public class TerminologyUploaderProviderDstu3 extends BaseJpaProvider {
|
|||
public Parameters lookup(
|
||||
HttpServletRequest theServletRequest,
|
||||
@OperationParam(name="url", min=1) UriType theUrl,
|
||||
@OperationParam(name="package", min=1) Attachment thePackage,
|
||||
@OperationParam(name="package", min=0) Attachment thePackage,
|
||||
@OperationParam(name="localfile", min=0) StringType theLocalFile,
|
||||
RequestDetails theRequestDetails
|
||||
) {
|
||||
//@formatter:on
|
||||
|
||||
startRequest(theServletRequest);
|
||||
try {
|
||||
byte[] data = thePackage.getData();
|
||||
String url = theUrl.getValueAsString();
|
||||
byte[] data;
|
||||
if (theLocalFile != null && isNotBlank(theLocalFile.getValue())) {
|
||||
ourLog.info("Reading in local file: {}", theLocalFile.getValue());
|
||||
try {
|
||||
data = IOUtils.toByteArray(new FileInputStream(theLocalFile.getValue()));
|
||||
} catch (IOException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
} else if (thePackage == null || thePackage.getData() == null || thePackage.getData().length == 0) {
|
||||
throw new InvalidRequestException("No 'localfile' or 'package' parameter, or package had no data");
|
||||
} else {
|
||||
data = thePackage.getData();
|
||||
}
|
||||
|
||||
String url = theUrl != null ? theUrl.getValueAsString() : null;
|
||||
url = defaultString(url);
|
||||
|
||||
UploadStatistics stats;
|
||||
if (IHapiTerminologyLoaderSvc.SCT_URL.equals(url)) {
|
||||
myTerminologyLoaderSvc.loadSnomedCt(data, theRequestDetails);
|
||||
stats = myTerminologyLoaderSvc.loadSnomedCt(data, theRequestDetails);
|
||||
} else {
|
||||
throw new InvalidRequestException("Unknown URL: " + url);
|
||||
}
|
||||
|
||||
Parameters retVal = new Parameters();
|
||||
retVal.addParameter().setName("conceptCount").setValue(new IntegerType(0));
|
||||
retVal.addParameter().setName("conceptCount").setValue(new IntegerType(stats.getConceptCount()));
|
||||
return retVal;
|
||||
} finally {
|
||||
endRequest(theServletRequest);
|
||||
|
|
|
@ -5,7 +5,20 @@ import ca.uhn.fhir.rest.method.RequestDetails;
|
|||
public interface IHapiTerminologyLoaderSvc {
|
||||
|
||||
String SCT_URL = "http://snomed.info/sct";
|
||||
|
||||
void loadSnomedCt(byte[] theZipBytes, RequestDetails theRequestDetails);
|
||||
|
||||
UploadStatistics loadSnomedCt(byte[] theZipBytes, RequestDetails theRequestDetails);
|
||||
|
||||
public static class UploadStatistics {
|
||||
private int myConceptCount;
|
||||
|
||||
public int getConceptCount() {
|
||||
return myConceptCount;
|
||||
}
|
||||
|
||||
public UploadStatistics setConceptCount(int theConceptCount) {
|
||||
myConceptCount = theConceptCount;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -131,7 +131,7 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void loadSnomedCt(byte[] theZipBytes, RequestDetails theRequestDetails) {
|
||||
public UploadStatistics loadSnomedCt(byte[] theZipBytes, RequestDetails theRequestDetails) {
|
||||
List<String> allFilenames = Arrays.asList(SCT_FILE_DESCRIPTION, SCT_FILE_RELATIONSHIP, SCT_FILE_CONCEPT);
|
||||
|
||||
Map<String, File> filenameToFile = new HashMap<String, File>();
|
||||
|
@ -174,7 +174,7 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
|||
ourLog.info("Beginning SNOMED CT processing");
|
||||
|
||||
try {
|
||||
processSnomedCtFiles(filenameToFile, theRequestDetails);
|
||||
return processSnomedCtFiles(filenameToFile, theRequestDetails);
|
||||
} finally {
|
||||
ourLog.info("Finished SNOMED CT file import, cleaning up temporary files");
|
||||
for (File nextFile : filenameToFile.values()) {
|
||||
|
@ -183,7 +183,7 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
|||
}
|
||||
}
|
||||
|
||||
void processSnomedCtFiles(Map<String, File> filenameToFile, RequestDetails theRequestDetails) {
|
||||
UploadStatistics processSnomedCtFiles(Map<String, File> filenameToFile, RequestDetails theRequestDetails) {
|
||||
final TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion();
|
||||
final Map<String, TermConcept> id2concept = new HashMap<String, TermConcept>();
|
||||
final Map<String, TermConcept> code2concept = new HashMap<String, TermConcept>();
|
||||
|
@ -211,6 +211,8 @@ public class TerminologyLoaderSvc implements IHapiTerminologyLoaderSvc {
|
|||
|
||||
codeSystemVersion.getConcepts().addAll(rootConcepts.values());
|
||||
myTermSvc.storeNewCodeSystemVersion(SCT_URL, codeSystemVersion, theRequestDetails);
|
||||
|
||||
return new UploadStatistics().setConceptCount(code2concept.size());
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
|
|
|
@ -15,6 +15,7 @@ import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
|
|||
import org.springframework.transaction.annotation.EnableTransactionManagement;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.provider.dstu3.TerminologyUploaderProviderDstu3;
|
||||
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
|
||||
import ca.uhn.fhir.validation.ResultSeverityEnum;
|
||||
|
||||
|
|
|
@ -50,6 +50,7 @@ public abstract class BaseResourceProviderDstu3Test extends BaseJpaDstu3Test {
|
|||
private static Server ourServer;
|
||||
protected static String ourServerBase;
|
||||
private static GenericWebApplicationContext ourWebApplicationContext;
|
||||
private TerminologyUploaderProviderDstu3 myTerminologyUploaderProvider;
|
||||
|
||||
public BaseResourceProviderDstu3Test() {
|
||||
super();
|
||||
|
@ -78,7 +79,9 @@ public abstract class BaseResourceProviderDstu3Test extends BaseJpaDstu3Test {
|
|||
|
||||
ourRestServer.getFhirContext().setNarrativeGenerator(new DefaultThymeleafNarrativeGenerator());
|
||||
|
||||
ourRestServer.setPlainProviders(mySystemProvider);
|
||||
myTerminologyUploaderProvider = myAppCtx.getBean(TerminologyUploaderProviderDstu3.class);
|
||||
|
||||
ourRestServer.setPlainProviders(mySystemProvider, myTerminologyUploaderProvider);
|
||||
|
||||
JpaConformanceProviderDstu3 confProvider = new JpaConformanceProviderDstu3(ourRestServer, mySystemDao, myDaoConfig);
|
||||
confProvider.setImplementationDescription("THIS IS THE DESC");
|
||||
|
|
|
@ -0,0 +1,157 @@
|
|||
package ca.uhn.fhir.jpa.provider.dstu3;
|
||||
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipOutputStream;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.hl7.fhir.dstu3.model.Attachment;
|
||||
import org.hl7.fhir.dstu3.model.IntegerType;
|
||||
import org.hl7.fhir.dstu3.model.Parameters;
|
||||
import org.hl7.fhir.dstu3.model.StringType;
|
||||
import org.hl7.fhir.dstu3.model.UriType;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
|
||||
public class TerminologyProviderDstu3Test extends BaseResourceProviderDstu3Test {
|
||||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyProviderDstu3Test.class);
|
||||
|
||||
@AfterClass
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
public void testUploadSct() throws Exception {
|
||||
byte[] packageBytes = createSctZip();
|
||||
|
||||
//@formatter:off
|
||||
Parameters respParam = ourClient
|
||||
.operation()
|
||||
.onServer()
|
||||
.named("upload-external-code-system")
|
||||
.withParameter(Parameters.class, "url", new UriType(IHapiTerminologyLoaderSvc.SCT_URL))
|
||||
.andParameter("package", new Attachment().setData(packageBytes))
|
||||
.execute();
|
||||
//@formatter:on
|
||||
|
||||
String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(respParam);
|
||||
ourLog.info(resp);
|
||||
|
||||
assertThat(((IntegerType)respParam.getParameter().get(0).getValue()).getValue(), greaterThan(1));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUploadSctLocalFile() throws Exception {
|
||||
byte[] packageBytes = createSctZip();
|
||||
File tempFile = File.createTempFile("tmp", ".zip");
|
||||
tempFile.deleteOnExit();
|
||||
|
||||
FileOutputStream fos = new FileOutputStream(tempFile);
|
||||
fos.write(packageBytes);
|
||||
fos.close();
|
||||
|
||||
//@formatter:off
|
||||
Parameters respParam = ourClient
|
||||
.operation()
|
||||
.onServer()
|
||||
.named("upload-external-code-system")
|
||||
.withParameter(Parameters.class, "url", new UriType(IHapiTerminologyLoaderSvc.SCT_URL))
|
||||
.andParameter("localfile", new StringType(tempFile.getAbsolutePath()))
|
||||
.execute();
|
||||
//@formatter:on
|
||||
|
||||
String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(respParam);
|
||||
ourLog.info(resp);
|
||||
|
||||
assertThat(((IntegerType)respParam.getParameter().get(0).getValue()).getValue(), greaterThan(1));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUploadInvalidUrl() throws Exception {
|
||||
byte[] packageBytes = createSctZip();
|
||||
|
||||
//@formatter:off
|
||||
try {
|
||||
ourClient
|
||||
.operation()
|
||||
.onServer()
|
||||
.named("upload-external-code-system")
|
||||
.withParameter(Parameters.class, "url", new UriType(IHapiTerminologyLoaderSvc.SCT_URL + "FOO"))
|
||||
.andParameter("package", new Attachment().setData(packageBytes))
|
||||
.execute();
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("HTTP 400 Bad Request: Unknown URL: http://snomed.info/sctFOO", e.getMessage());
|
||||
}
|
||||
//@formatter:on
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUploadMissingUrl() throws Exception {
|
||||
byte[] packageBytes = createSctZip();
|
||||
|
||||
//@formatter:off
|
||||
try {
|
||||
ourClient
|
||||
.operation()
|
||||
.onServer()
|
||||
.named("upload-external-code-system")
|
||||
.withParameter(Parameters.class, "package", new Attachment().setData(packageBytes))
|
||||
.execute();
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("HTTP 400 Bad Request: Unknown URL: ", e.getMessage());
|
||||
}
|
||||
//@formatter:on
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUploadMissingPackage() throws Exception {
|
||||
//@formatter:off
|
||||
try {
|
||||
ourClient
|
||||
.operation()
|
||||
.onServer()
|
||||
.named("upload-external-code-system")
|
||||
.withParameter(Parameters.class, "url", new UriType(IHapiTerminologyLoaderSvc.SCT_URL))
|
||||
.execute();
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("HTTP 400 Bad Request: No 'localfile' or 'package' parameter, or package had no data", e.getMessage());
|
||||
}
|
||||
//@formatter:on
|
||||
}
|
||||
|
||||
private byte[] createSctZip() throws IOException {
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
ZipOutputStream zos = new ZipOutputStream(bos);
|
||||
|
||||
List<String> inputNames = Arrays.asList("sct2_Concept_Full_INT_20160131.txt","sct2_Concept_Full-en_INT_20160131.txt","sct2_Description_Full-en_INT_20160131.txt","sct2_Identifier_Full_INT_20160131.txt","sct2_Relationship_Full_INT_20160131.txt","sct2_StatedRelationship_Full_INT_20160131.txt","sct2_TextDefinition_Full-en_INT_20160131.txt");
|
||||
for (String nextName : inputNames) {
|
||||
zos.putNextEntry(new ZipEntry("SnomedCT_Release_INT_20160131_Full/Terminology/" + nextName));
|
||||
zos.write(IOUtils.toByteArray(getClass().getResourceAsStream("/sct/" + nextName)));
|
||||
}
|
||||
zos.close();
|
||||
byte[] packageBytes = bos.toByteArray();
|
||||
return packageBytes;
|
||||
}
|
||||
|
||||
}
|
|
@ -19,8 +19,8 @@ package ca.uhn.fhir.model.dstu;
|
|||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.*;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.join;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.Collections;
|
||||
|
@ -59,7 +59,6 @@ import ca.uhn.fhir.context.RuntimeCompositeDatatypeDefinition;
|
|||
import ca.uhn.fhir.context.RuntimePrimitiveDatatypeDefinition;
|
||||
import ca.uhn.fhir.context.RuntimeResourceBlockDefinition;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.context.RuntimeResourceReferenceDefinition;
|
||||
import ca.uhn.fhir.model.api.ICompositeDatatype;
|
||||
import ca.uhn.fhir.model.api.IFhirVersion;
|
||||
import ca.uhn.fhir.model.api.IPrimitiveDatatype;
|
||||
|
@ -168,22 +167,6 @@ public class FhirDstu1 implements IFhirVersion {
|
|||
|
||||
private void fillName(StructureElement elem, BaseRuntimeElementDefinition<?> nextDef, String theServerBase) {
|
||||
assert nextDef != null;
|
||||
|
||||
if (nextDef instanceof RuntimeResourceReferenceDefinition) {
|
||||
// RuntimeResourceReferenceDefinition rr = (RuntimeResourceReferenceDefinition) nextDef;
|
||||
// for (Class<? extends IBaseResource> next : rr.getResourceTypes()) {
|
||||
// StructureElementDefinitionType type = elem.getDefinition().addType();
|
||||
// type.getCode().setValue("ResourceReference");
|
||||
//
|
||||
// if (next != IResource.class) {
|
||||
// @SuppressWarnings("unchecked")
|
||||
// RuntimeResourceDefinition resDef = rr.getDefinitionForResourceType((Class<? extends IResource>) next);
|
||||
// type.getProfile().setValueAsString(resDef.getResourceProfile(theServerBase));
|
||||
// }
|
||||
// }
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
StructureElementDefinitionType type = elem.getDefinition().addType();
|
||||
String name = nextDef.getName();
|
||||
|
|
Loading…
Reference in New Issue