Merging master into working branch.

This commit is contained in:
Diederik Muylwyk 2019-09-24 16:47:19 -04:00
commit 799848291f
28 changed files with 514 additions and 200 deletions

View File

@ -6,12 +6,10 @@ HAPI FHIR - Java API for HL7 FHIR Clients and Servers
[![Coverage Status](https://coveralls.io/repos/jamesagnew/hapi-fhir/badge.svg?branch=master&service=github)](https://coveralls.io/github/jamesagnew/hapi-fhir?branch=master)
[![Maven Central](https://maven-badges.herokuapp.com/maven-central/ca.uhn.hapi.fhir/hapi-fhir-base/badge.svg)](http://search.maven.org/#search|ga|1|ca.uhn.hapi.fhir)
[![License](https://img.shields.io/badge/license-apache%202.0-60C060.svg)](http://jamesagnew.github.io/hapi-fhir/license.html)
* Linux Build: [![Build Status](https://travis-ci.org/jamesagnew/hapi-fhir.svg?branch=master)](https://travis-ci.org/jamesagnew/hapi-fhir)
* Windows Build: <a href="https://ci.appveyor.com/project/jamesagnew/hapi-fhir"><img src="https://ci.appveyor.com/api/projects/status/github/jamesagnew/hapi-fhir?branch=master&svg=true"></a>
[![Build Status](https://dev.azure.com/jamesagnew214/jamesagnew214/_apis/build/status/jamesagnew.hapi-fhir?branchName=master)](https://dev.azure.com/jamesagnew214/jamesagnew214/_build/latest?definitionId=1&branchName=master)
Complete project documentation is available here:
http://jamesagnew.github.io/hapi-fhir/
http://hapifhir.io
A demonstration of this project is available here:
http://hapi.fhir.org/
@ -19,3 +17,5 @@ http://hapi.fhir.org/
This project is Open Source, licensed under the Apache Software License 2.0.
Please see [this wiki page](https://github.com/jamesagnew/hapi-fhir/wiki/Getting-Help) for information on where to get help with HAPI FHIR. Please see [Smile CDR](https://smilecdr.com) for information on commercial support.
---

View File

@ -1,9 +0,0 @@
version: 1.0.{build}
image: Visual Studio 2017
cache:
- C:\maven\
- C:\Users\appveyor\.m2\repository
build_script:
- SET JAVA_HOME=C:\Program Files\Java\jdk10
- SET PATH=C:\Program Files\Java\jdk10\bin;%PATH%
- cmd: mvn -P MINPARALLEL,ALLMODULES,REDUCED_JPA_TESTS clean install

56
azure-pipelines.yml Normal file
View File

@ -0,0 +1,56 @@
# Starter pipeline
# Start with a minimal pipeline that you can customize to build and deploy your code.
# Add steps that build, run tests, deploy, and more:
# https://aka.ms/yaml
variables:
MAVEN_CACHE_FOLDER: $(Pipeline.Workspace)/.m2/repository
MAVEN_OPTS: '-Dmaven.repo.local=$(MAVEN_CACHE_FOLDER)'
trigger:
- master
pool:
vmImage: 'ubuntu-latest'
jobs:
- job: Build
timeoutInMinutes: 360
steps:
- task: CacheBeta@0
inputs:
key: maven
path: $(MAVEN_CACHE_FOLDER)
displayName: Cache Maven local repo
- task: Maven@3
inputs:
#mavenPomFile: 'pom.xml'
goals: 'clean install' # Optional
options: ''
#publishJUnitResults: true
#testResultsFiles: '**/surefire-reports/TEST-*.xml' # Required when publishJUnitResults == True
#testRunTitle: # Optional
#codeCoverageToolOption: 'None' # Optional. Options: none, cobertura, jaCoCo. Enabling code coverage inserts the `clean` goal into the Maven goals list when Maven runs.
#codeCoverageClassFilter: # Optional. Comma-separated list of filters to include or exclude classes from collecting code coverage. For example: +:com.*,+:org.*,-:my.app*.*
#codeCoverageClassFilesDirectories: # Optional
#codeCoverageSourceDirectories: # Optional
#codeCoverageFailIfEmpty: false # Optional
#javaHomeOption: 'JDKVersion' # Options: jDKVersion, path
#jdkVersionOption: 'default' # Optional. Options: default, 1.11, 1.10, 1.9, 1.8, 1.7, 1.6
#jdkDirectory: # Required when javaHomeOption == Path
#jdkArchitectureOption: 'x64' # Optional. Options: x86, x64
#mavenVersionOption: 'Default' # Options: default, path
#mavenDirectory: # Required when mavenVersionOption == Path
#mavenSetM2Home: false # Required when mavenVersionOption == Path
mavenOptions: '-Xmx2048m $(MAVEN_OPTS)' # Optional
#mavenAuthenticateFeed: false
#effectivePomSkip: false
#sonarQubeRunAnalysis: false
#sqMavenPluginVersionChoice: 'latest' # Required when sonarQubeRunAnalysis == True# Options: latest, pom
#checkStyleRunAnalysis: false # Optional
#pmdRunAnalysis: false # Optional
#findBugsRunAnalysis: false # Optional

View File

@ -19,25 +19,28 @@ package ca.uhn.fhir.rest.param;
* limitations under the License.
* #L%
*/
import static ca.uhn.fhir.model.primitive.IdDt.isValidLong;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.math.BigDecimal;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.util.CoverageIgnore;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hl7.fhir.instance.model.api.IBaseResource;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.util.CoverageIgnore;
import java.math.BigDecimal;
import static ca.uhn.fhir.model.primitive.IdDt.isValidLong;
import static org.apache.commons.lang3.StringUtils.*;
public class ReferenceParam extends BaseParam /*implements IQueryParameterType*/ {
private String myChain;
private String myResourceType;
private String myBaseUrl;
private String myValue;
private String myIdPart;
private final IdDt myId = new IdDt();
/**
* Constructor
*/
@ -64,12 +67,15 @@ public class ReferenceParam extends BaseParam /*implements IQueryParameterType*/
* Constructor
*/
public ReferenceParam(String theResourceType, String theChain, String theValue) {
String qualifier = "";
if (isNotBlank(theResourceType)) {
setValue(theResourceType + "/" + theValue);
} else {
setValue(theValue);
qualifier = ":" + theResourceType;
}
setChain(theChain);
if (isNotBlank(theChain)) {
qualifier = qualifier + "." + theChain;
}
setValueAsQueryToken(null, null, qualifier, theValue);
}
@Override
@ -91,55 +97,61 @@ public class ReferenceParam extends BaseParam /*implements IQueryParameterType*/
@Override
String doGetValueAsQueryToken(FhirContext theContext) {
if (isBlank(myId.getResourceType())) {
return myId.getValue(); // e.g. urn:asdjd or 123 or cid:wieiuru or #1
if (isBlank(getResourceType())) {
return myValue; // e.g. urn:asdjd or 123 or cid:wieiuru or #1
} else {
if (isBlank(getChain())) {
return getResourceType() + "/" + myId.getIdPart();
if (isBlank(getChain()) && isNotBlank(getResourceType())) {
return getResourceType() + "/" + getIdPart();
}
return myId.getIdPart();
return myValue;
}
}
@Override
void doSetValueAsQueryToken(FhirContext theContext, String theParamName, String theQualifier, String theValue) {
String q = theQualifier;
String resourceType = null;
boolean skipSetValue = false;
if (isNotBlank(q)) {
if (q.startsWith(":")) {
int nextIdx = q.indexOf('.');
if (nextIdx != -1) {
resourceType = q.substring(1, nextIdx);
myChain = q.substring(nextIdx + 1);
// type is explicitly defined so use it
myId.setParts(null, resourceType, theValue, null);
skipSetValue = true;
myResourceType = q.substring(1, nextIdx);
} else {
resourceType = q.substring(1);
myChain = null;
myResourceType = q.substring(1);
}
myValue = theValue;
myIdPart = theValue;
IdDt id = new IdDt(theValue);
if (!id.hasBaseUrl() && id.hasIdPart() && id.hasResourceType()) {
if (id.getResourceType().equals(myResourceType)) {
myIdPart = id.getIdPart();
}
}
} else if (q.startsWith(".")) {
myChain = q.substring(1);
// type not defined but this is a chain, so treat value as opaque
myId.setParts(null, null, theValue, null);
skipSetValue = true;
myResourceType = null;
myValue = theValue;
myIdPart = theValue;
}
} else {
myChain = null;
myValue = theValue;
IdDt id = new IdDt(theValue);
myResourceType = id.getResourceType();
myIdPart = id.getIdPart();
myBaseUrl = id.getBaseUrl();
}
if (!skipSetValue) {
setValue(theValue);
if (isNotBlank(resourceType) && isBlank(getResourceType())) {
setValue(resourceType + '/' + theValue);
}
}
}
@CoverageIgnore
public String getBaseUrl() {
return myId.getBaseUrl();
return myBaseUrl;
}
@ -147,24 +159,34 @@ public class ReferenceParam extends BaseParam /*implements IQueryParameterType*/
return myChain;
}
public ReferenceParam setChain(String theChain) {
myChain = theChain;
return this;
}
@CoverageIgnore
public String getIdPart() {
return myId.getIdPart();
return myIdPart;
}
@CoverageIgnore
public BigDecimal getIdPartAsBigDecimal() {
return myId.getIdPartAsBigDecimal();
return new IdDt(myValue).getIdPartAsBigDecimal();
}
@CoverageIgnore
public Long getIdPartAsLong() {
return myId.getIdPartAsLong();
return new IdDt(myValue).getIdPartAsLong();
}
public String getResourceType() {
return myId.getResourceType();
if (isNotBlank(myResourceType)) {
return myResourceType;
}
if (isBlank(myChain)) {
return new IdDt(myValue).getResourceType();
}
return null;
}
public Class<? extends IBaseResource> getResourceType(FhirContext theCtx) {
@ -175,11 +197,21 @@ public class ReferenceParam extends BaseParam /*implements IQueryParameterType*/
}
public String getValue() {
return myId.getValue();
return myValue;
}
public ReferenceParam setValue(String theValue) {
IdDt id = new IdDt(theValue);
String qualifier= null;
if (id.hasResourceType()) {
qualifier = ":" + id.getResourceType();
}
setValueAsQueryToken(null, null, qualifier, id.getIdPart());
return this;
}
public boolean hasResourceType() {
return myId.hasResourceType();
return isNotBlank(myResourceType);
}
@Override
@ -187,16 +219,6 @@ public class ReferenceParam extends BaseParam /*implements IQueryParameterType*/
return true;
}
public ReferenceParam setChain(String theChain) {
myChain = theChain;
return this;
}
public ReferenceParam setValue(String theValue) {
myId.setValue(theValue);
return this;
}
/**
* Returns a new param containing the same value as this param, but with the type copnverted
* to {@link DateParam}. This is useful if you are using reference parameters and want to handle

View File

@ -78,6 +78,7 @@ public abstract class BaseMigrateDatabaseCommand<T extends Enum> extends BaseCom
addRequiredOption(retVal, "t", "to", "Version", "The database schema version to migrate TO");
addRequiredOption(retVal, "d", "driver", "Driver", "The database driver to use (Options are " + driverOptions() + ")");
addOptionalOption(retVal, "x", "flags", "Flags", "A comma-separated list of any specific migration flags (these flags are version specific, see migrator documentation for details)");
addOptionalOption(retVal, null, "no-column-shrink", false, "If this flag is set, the system will not attempt to reduce the length of columns. This is useful in environments with a lot of existing data, where shrinking a column can take a very long time.");
return retVal;
}
@ -106,6 +107,7 @@ public abstract class BaseMigrateDatabaseCommand<T extends Enum> extends BaseCom
validateVersionSupported(to);
boolean dryRun = theCommandLine.hasOption("r");
boolean noColumnShrink = theCommandLine.hasOption("no-column-shrink");
String flags = theCommandLine.getOptionValue("x");
myFlags = Arrays.stream(defaultString(flags).split(","))
@ -119,6 +121,7 @@ public abstract class BaseMigrateDatabaseCommand<T extends Enum> extends BaseCom
migrator.setUsername(username);
migrator.setPassword(password);
migrator.setDryRun(dryRun);
migrator.setNoColumnShrink(noColumnShrink);
addTasks(migrator, from, to);
migrator.migrate();

View File

@ -7,3 +7,4 @@ Interceptor classes may "hook into" various points in the processing chain in bo
```java
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/MyInterceptor.java|sampleClass}}
```

View File

@ -50,31 +50,26 @@
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-structures-dstu2</artifactId>
<version>${project.version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-structures-hl7org-dstu2</artifactId>
<version>${project.version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-structures-dstu2.1</artifactId>
<version>${project.version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-structures-dstu3</artifactId>
<version>${project.version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-structures-r4</artifactId>
<version>${project.version}</version>
<optional>true</optional>
</dependency>
<dependency>

View File

@ -212,7 +212,6 @@
<dependency>
<groupId>com.github.dnault</groupId>
<artifactId>xml-patch</artifactId>
<version>0.3.0</version>
</dependency>
<!-- FHIR RI is pulled in for UCUM support, but we don't want any of its dependencies. -->

View File

@ -47,12 +47,14 @@ import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.util.*;
import ca.uhn.fhir.util.CoverageIgnore;
import ca.uhn.fhir.util.MetaUtil;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.XmlUtil;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
import com.google.common.collect.Sets;
@ -86,7 +88,6 @@ import javax.xml.stream.events.XMLEvent;
import java.util.*;
import java.util.Map.Entry;
import static ca.uhn.fhir.jpa.model.util.JpaConstants.EXT_EXTERNALIZED_BINARY_ID;
import static org.apache.commons.lang3.StringUtils.*;
/*
@ -974,7 +975,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
// 6. Handle source (provenance)
if (isNotBlank(provenanceRequestId) || isNotBlank(provenanceSourceUri)) {
String sourceString = defaultString(provenanceSourceUri)
String sourceString = cleanProvenanceSourceUri(provenanceSourceUri)
+ (isNotBlank(provenanceRequestId) ? "#" : "")
+ defaultString(provenanceRequestId);
@ -992,6 +993,16 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
return retVal;
}
static String cleanProvenanceSourceUri(String theProvenanceSourceUri) {
if (isNotBlank(theProvenanceSourceUri)) {
int hashIndex = theProvenanceSourceUri.indexOf('#');
if (hashIndex != -1) {
theProvenanceSourceUri = theProvenanceSourceUri.substring(0, hashIndex);
}
}
return defaultString(theProvenanceSourceUri);
}
public String toResourceName(Class<? extends IBaseResource> theResourceType) {
return myContext.getResourceDefinition(theResourceType).getName();
}

View File

@ -573,7 +573,7 @@ public class SearchBuilder implements ISearchBuilder {
private Predicate addPredicateReferenceWithChain(String theResourceName, String theParamName, List<? extends IQueryParameterType> theList, Join<ResourceTable, ResourceLink> theJoin, List<Predicate> theCodePredicates, ReferenceParam theRef, RequestDetails theRequest) {
final List<Class<? extends IBaseResource>> resourceTypes;
String resourceId;
if (!theRef.getValue().matches("[a-zA-Z]+/.*")) {
if (!theRef.hasResourceType()) {
RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(theResourceName, theParamName);
resourceTypes = new ArrayList<>();

View File

@ -35,10 +35,10 @@ public interface ITermCodeSystemVersionDao extends JpaRepository<TermCodeSystemV
@Query("DELETE FROM TermCodeSystemVersion csv WHERE csv.myCodeSystem = :cs")
void deleteForCodeSystem(@Param("cs") TermCodeSystem theCodeSystem);
@Query("SELECT cs FROM TermCodeSystemVersion cs WHERE cs.myResourcePid = :resource_id")
List<TermCodeSystemVersion> findByCodeSystemPid(@Param("resource_id") Long theCodeSystemResourcePid);
@Query("SELECT cs FROM TermCodeSystemVersion cs WHERE cs.myCodeSystemPid = :codesystem_pid")
List<TermCodeSystemVersion> findByCodeSystemPid(@Param("codesystem_pid") Long theCodeSystemPid);
@Query("SELECT cs FROM TermCodeSystemVersion cs WHERE cs.myResource.myId = :resource_id")
@Query("SELECT cs FROM TermCodeSystemVersion cs WHERE cs.myResourcePid = :resource_id")
List<TermCodeSystemVersion> findByCodeSystemResourcePid(@Param("resource_id") Long theCodeSystemResourcePid);
@Query("SELECT cs FROM TermCodeSystemVersion cs WHERE cs.myCodeSystemHavingThisVersionAsCurrentVersionIfAny.myResource.myId = :resource_id")

View File

@ -58,6 +58,7 @@ public class TermCodeSystemVersion implements Serializable {
@Column(name = "CS_VERSION_ID", nullable = true, updatable = false, length = MAX_VERSION_LENGTH)
private String myCodeSystemVersionId;
/**
* This was added in HAPI FHIR 3.3.0 and is nullable just to avoid migration
* issued. It should be made non-nullable at some point.
@ -65,8 +66,11 @@ public class TermCodeSystemVersion implements Serializable {
@ManyToOne
@JoinColumn(name = "CODESYSTEM_PID", referencedColumnName = "PID", nullable = true, foreignKey = @ForeignKey(name = "FK_CODESYSVER_CS_ID"))
private TermCodeSystem myCodeSystem;
@SuppressWarnings("unused")
@Column(name = "CODESYSTEM_PID", insertable = false, updatable = false)
private Long myCodeSystemPid;
@SuppressWarnings("unused")
@OneToOne(mappedBy = "myCurrentVersion", optional = true)
private TermCodeSystem myCodeSystemHavingThisVersionAsCurrentVersionIfAny;

View File

@ -0,0 +1,16 @@
package ca.uhn.fhir.jpa.dao;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class BaseHapiFhirDaoTest {
@Test
public void cleanProvenanceSourceUri() {
assertEquals("", BaseHapiFhirDao.cleanProvenanceSourceUri(null));
assertEquals("abc", BaseHapiFhirDao.cleanProvenanceSourceUri("abc"));
assertEquals("abc", BaseHapiFhirDao.cleanProvenanceSourceUri("abc#def"));
assertEquals("abc", BaseHapiFhirDao.cleanProvenanceSourceUri("abc#def#ghi"));
}
}

View File

@ -1368,11 +1368,14 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
map.setLastUpdated(new DateRangeParam().setUpperBound(new DateParam(ParamPrefixEnum.LESSTHAN, "2022-01-01")));
IBundleProvider found = myPatientDao.search(map);
Set<String> dates = new HashSet<>();
String searchId = found.getUuid();
for (int i = 0; i < 9; i++) {
List<IBaseResource> resources = found.getResources(i, i + 1);
assertThat("Failed to load range " + i + " - " + (i + 1), resources, hasSize(1));
assertThat("Failed to load range " + i + " - " + (i + 1) + " - from provider of type: " + found.getClass(), resources, hasSize(1));
Patient nextResource = (Patient) resources.get(0);
dates.add(nextResource.getBirthDateElement().getValueAsString());
found = myPagingProvider.retrieveResultList(null, searchId);
}
assertThat(dates, hasItems(

View File

@ -120,7 +120,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
myDaoConfig.setSearchPreFetchThresholds(Arrays.asList(20, 50, 190));
// Seach with count only
// Search with count only
SearchParameterMap params = new SearchParameterMap();
params.add(Patient.SP_NAME, new StringParam("FAM"));
params.setSummaryMode((SummaryEnum.COUNT));
@ -142,7 +142,9 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
assertEquals(201, results.size().intValue());
ids = toUnqualifiedVersionlessIdValues(results, 0, 10, true);
assertThat(ids, hasSize(10));
assertEquals(201, myDatabaseBackedPagingProvider.retrieveResultList(null, uuid).size().intValue());
IBundleProvider bundleProvider = myDatabaseBackedPagingProvider.retrieveResultList(null, uuid);
Integer bundleSize = bundleProvider.size();
assertEquals(201, bundleSize.intValue());
// Search with count only
params = new SearchParameterMap();

View File

@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.provider.dstu3;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.provider.r4.ResourceProviderR4Test;
import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl;
import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
@ -3913,6 +3914,31 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
}
@Test
public void testUpdateWithSource() {
Patient patient = new Patient();
patient.setActive(false);
IIdType patientid = ourClient.create().resource(patient).execute().getId().toUnqualifiedVersionless();
{
Patient readPatient = (Patient) ourClient.read().resource("Patient").withId(patientid).execute();
assertThat(readPatient.getMeta().getExtensionString(JpaConstants.EXT_META_SOURCE), matchesPattern("#[a-f0-9]+"));
}
patient.setId(patientid);
patient.setActive(true);
ourClient.update().resource(patient).execute();
{
Patient readPatient = (Patient) ourClient.read().resource("Patient").withId(patientid).execute();
assertThat(readPatient.getMeta().getExtensionString(JpaConstants.EXT_META_SOURCE), matchesPattern("#[a-f0-9]+"));
readPatient.addName().setFamily("testUpdateWithSource");
ourClient.update().resource(readPatient).execute();
readPatient = (Patient) ourClient.read().resource("Patient").withId(patientid).execute();
assertThat(readPatient.getMeta().getExtensionString(JpaConstants.EXT_META_SOURCE), matchesPattern("#[a-f0-9]+"));
}
}
@Test
public void testUpdateWithETag() throws Exception {
String methodName = "testUpdateWithETag";

View File

@ -167,6 +167,54 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
}
@Test
public void testSearchWithSlashes() {
myDaoConfig.setSearchPreFetchThresholds(Lists.newArrayList(10, 50, 10000));
Procedure procedure = new Procedure();
procedure.setStatus(Procedure.ProcedureStatus.COMPLETED);
String procedureId = ourClient.create().resource(procedure).execute().getId().toUnqualifiedVersionless().getValue();
DocumentReference dr = new DocumentReference();
dr.addContent().getAttachment().setContentType("application/vnd.mfer");
String drId = ourClient.create().resource(dr).execute().getId().toUnqualifiedVersionless().getValue();
for (int i = 0; i < 60; i++) {
Observation obs = new Observation();
obs.addPartOf().setReference(procedureId);
obs.addDerivedFrom().setReference(drId);
ourClient.create().resource(obs).execute();
}
ourLog.info("Starting search");
Bundle response = ourClient
.search()
.byUrl("Observation?part-of=" + procedureId + "&derived-from:DocumentReference.contenttype=application/vnd.mfer&_total=accurate&_count=2")
.returnBundle(Bundle.class)
.execute();
int obsCount = 0;
int pageCount = 0;
while (response != null) {
obsCount += response.getEntry().size();
pageCount++;
if (response.getLink("next") != null) {
response = ourClient.loadPage().next(response).execute();
} else {
response = null;
}
ourLog.info("Have loaded {} pages and {} reources", pageCount, obsCount);
}
assertEquals(60, obsCount);
assertEquals(30, pageCount);
}
@Test
public void testManualPagingLinkOffsetDoesntReturnBeyondEnd() {
myDaoConfig.setSearchPreFetchThresholds(Lists.newArrayList(10, 1000));
@ -5081,6 +5129,31 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
}
@Test
public void testUpdateWithSource() {
Patient patient = new Patient();
patient.setActive(false);
IIdType patientid = ourClient.create().resource(patient).execute().getId().toUnqualifiedVersionless();
{
Patient readPatient = (Patient) ourClient.read().resource("Patient").withId(patientid).execute();
assertThat(readPatient.getMeta().getSource(), matchesPattern("#[a-f0-9]+"));
}
patient.setId(patientid);
patient.setActive(true);
ourClient.update().resource(patient).execute();
{
Patient readPatient = (Patient) ourClient.read().resource("Patient").withId(patientid).execute();
assertThat(readPatient.getMeta().getSource(), matchesPattern("#[a-f0-9]+"));
readPatient.addName().setFamily("testUpdateWithSource");
ourClient.update().resource(readPatient).execute();
readPatient = (Patient) ourClient.read().resource("Patient").withId(patientid).execute();
assertThat(readPatient.getMeta().getSource(), matchesPattern("#[a-f0-9]+"));
}
}
@Test
public void testUpdateWithETag() throws Exception {
String methodName = "testUpdateWithETag";

View File

@ -22,6 +22,9 @@ package ca.uhn.fhir.jpa.migrate;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTableColumnTypeTask;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.dialect.internal.StandardDialectResolver;
@ -48,6 +51,73 @@ import static org.thymeleaf.util.StringUtils.toUpperCase;
public class JdbcUtils {
private static final Logger ourLog = LoggerFactory.getLogger(JdbcUtils.class);
public static class ColumnType {
private final BaseTableColumnTypeTask.ColumnTypeEnum myColumnTypeEnum;
private final Long myLength;
public ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum theColumnType, Long theLength) {
myColumnTypeEnum = theColumnType;
myLength = theLength;
}
public ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum theColumnType, int theLength) {
this(theColumnType, (long) theLength);
}
public ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum theColumnType) {
this(theColumnType, null);
}
@Override
public boolean equals(Object theO) {
if (this == theO) {
return true;
}
if (theO == null || getClass() != theO.getClass()) {
return false;
}
ColumnType that = (ColumnType) theO;
return new EqualsBuilder()
.append(myColumnTypeEnum, that.myColumnTypeEnum)
.append(myLength, that.myLength)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(myColumnTypeEnum)
.append(myLength)
.toHashCode();
}
@Override
public String toString() {
ToStringBuilder b = new ToStringBuilder(this);
b.append("type", myColumnTypeEnum);
if (myLength != null) {
b.append("length", myLength);
}
return b.toString();
}
public BaseTableColumnTypeTask.ColumnTypeEnum getColumnTypeEnum() {
return myColumnTypeEnum;
}
public Long getLength() {
return myLength;
}
public boolean equals(BaseTableColumnTypeTask.ColumnTypeEnum theColumnType, Long theColumnLength) {
return myColumnTypeEnum == theColumnType && (myLength == null || myLength.equals(theColumnLength));
}
}
/**
* Retrieve all index names
*/
@ -127,7 +197,7 @@ public class JdbcUtils {
/**
* Retrieve all index names
*/
public static String getColumnType(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName, String theColumnName) throws SQLException {
public static ColumnType getColumnType(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName, String theColumnName) throws SQLException {
DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource());
try (Connection connection = dataSource.getConnection()) {
return theConnectionProperties.getTxTemplate().execute(t -> {
@ -153,18 +223,18 @@ public class JdbcUtils {
Long length = indexes.getLong("COLUMN_SIZE");
switch (dataType) {
case Types.VARCHAR:
return BaseTableColumnTypeTask.ColumnTypeEnum.STRING.getDescriptor(length);
return new ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, length);
case Types.NUMERIC:
case Types.BIGINT:
case Types.DECIMAL:
return BaseTableColumnTypeTask.ColumnTypeEnum.LONG.getDescriptor(null);
return new ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG, length);
case Types.INTEGER:
return BaseTableColumnTypeTask.ColumnTypeEnum.INT.getDescriptor(null);
return new ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.INT, length);
case Types.TIMESTAMP:
case Types.TIMESTAMP_WITH_TIMEZONE:
return BaseTableColumnTypeTask.ColumnTypeEnum.DATE_TIMESTAMP.getDescriptor(null);
return new ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.DATE_TIMESTAMP, length);
case Types.BLOB:
return BaseTableColumnTypeTask.ColumnTypeEnum.BLOB.getDescriptor(null);
return new ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.BLOB, length);
default:
throw new IllegalArgumentException("Don't know how to handle datatype " + dataType + " for column " + theColumnName + " on table " + theTableName);
}

View File

@ -44,6 +44,7 @@ public class Migrator {
private int myChangesCount;
private boolean myDryRun;
private List<BaseTask.ExecutedStatement> myExecutedStatements = new ArrayList<>();
private boolean myNoColumnShrink;
public int getChangesCount() {
return myChangesCount;
@ -82,6 +83,7 @@ public class Migrator {
next.setDriverType(myDriverType);
next.setConnectionProperties(myConnectionProperties);
next.setDryRun(myDryRun);
next.setNoColumnShrink(myNoColumnShrink);
try {
next.execute();
} catch (SQLException e) {
@ -126,4 +128,8 @@ public class Migrator {
public void addTasks(List<BaseTask<?>> theTasks) {
theTasks.forEach(this::addTask);
}
public void setNoColumnShrink(boolean theNoColumnShrink) {
myNoColumnShrink = theNoColumnShrink;
}
}

View File

@ -71,7 +71,7 @@ public class ArbitrarySqlTask extends BaseTask<ArbitrarySqlTask> {
}
for (TableAndColumn next : myConditionalOnExistenceOf) {
String columnType = JdbcUtils.getColumnType(getConnectionProperties(), next.getTable(), next.getColumn());
JdbcUtils.ColumnType columnType = JdbcUtils.getColumnType(getConnectionProperties(), next.getTable(), next.getColumn());
if (columnType == null) {
ourLog.info("Table {} does not have column {} - No action performed", next.getTable(), next.getColumn());
return;

View File

@ -164,74 +164,23 @@ public abstract class BaseTableColumnTypeTask<T extends BaseTableTask> extends B
return myColumnLength;
}
public BaseTableColumnTypeTask<T> setColumnLength(int theColumnLength) {
myColumnLength = (long) theColumnLength;
public BaseTableColumnTypeTask<T> setColumnLength(long theColumnLength) {
myColumnLength = theColumnLength;
return this;
}
public enum ColumnTypeEnum {
LONG {
@Override
public String getDescriptor(Long theColumnLength) {
Assert.isTrue(theColumnLength == null, "Must not supply a column length");
return "bigint";
}
},
STRING {
@Override
public String getDescriptor(Long theColumnLength) {
Assert.isTrue(theColumnLength != null, "Must supply a column length");
return "varchar(" + theColumnLength + ")";
}
},
DATE_TIMESTAMP {
@Override
public String getDescriptor(Long theColumnLength) {
Assert.isTrue(theColumnLength == null, "Must not supply a column length");
return "timestamp";
}
},
BOOLEAN {
@Override
public String getDescriptor(Long theColumnLength) {
Assert.isTrue(theColumnLength == null, "Must not supply a column length");
return "boolean";
}
},
FLOAT {
@Override
public String getDescriptor(Long theColumnLength) {
Assert.isTrue(theColumnLength == null, "Must not supply a column length");
return "float";
}
},
INT {
@Override
public String getDescriptor(Long theColumnLength) {
Assert.isTrue(theColumnLength == null, "Must not supply a column length");
return "int";
}
},
BLOB {
@Override
public String getDescriptor(Long theColumnLength) {
Assert.isTrue(theColumnLength == null, "Must not supply a column length");
return "blob";
}
},
CLOB {
@Override
public String getDescriptor(Long theColumnLength) {
Assert.isTrue(theColumnLength == null, "Must not supply a column length");
return "clob";
}
};
public abstract String getDescriptor(Long theColumnLength);
LONG,
STRING,
DATE_TIMESTAMP,
BOOLEAN,
FLOAT,
INT,
BLOB,
CLOB
;
}

View File

@ -42,6 +42,15 @@ public abstract class BaseTask<T extends BaseTask> {
private int myChangesCount;
private boolean myDryRun;
private List<ExecutedStatement> myExecutedStatements = new ArrayList<>();
private boolean myNoColumnShrink;
public boolean isNoColumnShrink() {
return myNoColumnShrink;
}
public void setNoColumnShrink(boolean theNoColumnShrink) {
myNoColumnShrink = theNoColumnShrink;
}
public boolean isDryRun() {
return myDryRun;

View File

@ -36,7 +36,7 @@ public class ModifyColumnTask extends BaseTableColumnTypeTask<ModifyColumnTask>
@Override
public void execute() throws SQLException {
String existingType;
JdbcUtils.ColumnType existingType;
boolean nullable;
Set<String> columnNames = JdbcUtils.getColumnNames(getConnectionProperties(), getTableName());
@ -52,11 +52,17 @@ public class ModifyColumnTask extends BaseTableColumnTypeTask<ModifyColumnTask>
throw new InternalErrorException(e);
}
String wantedType = getColumnType().getDescriptor(getColumnLength());
boolean alreadyOfCorrectType = existingType.equals(wantedType);
if (isNoColumnShrink()) {
long existingLength = existingType.getLength() != null ? existingType.getLength() : 0;
if (existingLength > getColumnLength()) {
setColumnLength(existingLength);
}
}
boolean alreadyOfCorrectType = existingType.equals(getColumnType(), getColumnLength());
boolean alreadyCorrectNullable = isNullable() == nullable;
if (alreadyOfCorrectType && alreadyCorrectNullable) {
ourLog.info("Column {} on table {} is already of type {} and has nullable {} - No action performed", getColumnName(), getTableName(), wantedType, nullable);
ourLog.info("Column {} on table {} is already of type {} and has nullable {} - No action performed", getColumnName(), getTableName(), existingType, nullable);
return;
}

View File

@ -40,8 +40,8 @@ public class AddColumnTest extends BaseTest {
getMigrator().migrate();
String type = JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "newcolint");
assertEquals(BaseTableColumnTypeTask.ColumnTypeEnum.INT.getDescriptor(null), type);
JdbcUtils.ColumnType type = JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "newcolint");
assertEquals(BaseTableColumnTypeTask.ColumnTypeEnum.INT, type.getColumnTypeEnum());
}
@Test

View File

@ -25,7 +25,32 @@ public class ModifyColumnTest extends BaseTest {
getMigrator().migrate();
assertEquals("varchar(300)", JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
assertEquals(new JdbcUtils.ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 300), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
assertEquals(1, task.getExecutedStatements().size());
// Make sure additional migrations don't crash
getMigrator().migrate();
getMigrator().migrate();
}
@Test
public void testNoShrink_SameNullable() throws SQLException {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255), newcol bigint)");
ModifyColumnTask task = new ModifyColumnTask();
task.setTableName("SOMETABLE");
task.setColumnName("TEXTCOL");
task.setColumnType(AddColumnTask.ColumnTypeEnum.STRING);
task.setNullable(true);
task.setColumnLength(200);
getMigrator().setNoColumnShrink(true);
getMigrator().addTask(task);
getMigrator().migrate();
assertEquals(0, task.getExecutedStatements().size());
assertEquals(new JdbcUtils.ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 255), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
// Make sure additional migrations don't crash
getMigrator().migrate();
@ -38,8 +63,8 @@ public class ModifyColumnTest extends BaseTest {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255) not null)");
assertFalse(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "PID"));
assertFalse(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
assertEquals("bigint", JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
assertEquals("varchar(255)", JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
assertEquals(new JdbcUtils.ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG, 19), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
assertEquals(new JdbcUtils.ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 255), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
// PID
ModifyColumnTask task = new ModifyColumnTask();
@ -63,8 +88,8 @@ public class ModifyColumnTest extends BaseTest {
assertTrue(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "PID"));
assertTrue(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
assertEquals("bigint", JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
assertEquals("varchar(255)", JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
assertEquals(new JdbcUtils.ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG, 19), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
assertEquals(new JdbcUtils.ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 255), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
// Make sure additional migrations don't crash
getMigrator().migrate();
@ -78,8 +103,8 @@ public class ModifyColumnTest extends BaseTest {
executeSql("create table SOMETABLE (PID bigint, TEXTCOL varchar(255))");
assertTrue(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "PID"));
assertTrue(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
assertEquals("bigint", JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
assertEquals("varchar(255)", JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
assertEquals(new JdbcUtils.ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG, 19), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
assertEquals(new JdbcUtils.ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 255), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
// PID
ModifyColumnTask task = new ModifyColumnTask();
@ -103,8 +128,8 @@ public class ModifyColumnTest extends BaseTest {
assertFalse(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "PID"));
assertFalse(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
assertEquals("bigint", JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
assertEquals("varchar(255)", JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
assertEquals(new JdbcUtils.ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG, 19), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
assertEquals(new JdbcUtils.ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 255), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
// Make sure additional migrations don't crash
getMigrator().migrate();

View File

@ -1,39 +1,60 @@
package ca.uhn.fhir.rest.param;
import static org.junit.Assert.*;
import org.junit.AfterClass;
import org.junit.Test;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.util.TestUtil;
import com.google.common.base.Charsets;
import org.apache.commons.lang3.SerializationUtils;
import org.junit.AfterClass;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
public class ReferenceParamTest {
private static final Logger ourLog = LoggerFactory.getLogger(ReferenceParamTest.class);
private FhirContext ourCtx = FhirContext.forDstu3();
@Test
public void testValueWithSlashPersistsAcrossSerialization() {
ReferenceParam param = new ReferenceParam();
param.setValueAsQueryToken(ourCtx, "derived-from", ":DocumentReference.contenttype", "application/vnd.mfer");
assertEquals("application/vnd.mfer", param.getValueAsQueryToken(ourCtx));
assertEquals(":DocumentReference.contenttype", param.getQueryParameterQualifier());
byte[] serialized = SerializationUtils.serialize(param);
ourLog.info("Serialized: {}", new String(serialized, Charsets.US_ASCII));
param = SerializationUtils.deserialize(serialized);
assertEquals("application/vnd.mfer", param.getValueAsQueryToken(ourCtx));
assertEquals(":DocumentReference.contenttype", param.getQueryParameterQualifier());
}
@Test
public void testWithResourceType() {
ReferenceParam rp = new ReferenceParam();
rp.setValueAsQueryToken(ourCtx, null, null, "Location/123");
assertEquals("Location", rp.getResourceType());
assertEquals("123", rp.getIdPart());
assertEquals("Location/123", rp.getValue());
assertEquals(null, rp.getQueryParameterQualifier());
}
@Test
public void testWithResourceType_AbsoluteUrl() {
ReferenceParam rp = new ReferenceParam();
rp.setValueAsQueryToken(ourCtx, null, null, "http://a.b/c/d/e");
assertEquals("d", rp.getResourceType());
assertEquals("e", rp.getIdPart());
assertEquals("http://a.b/c/d/e", rp.getValue());
assertEquals(null, rp.getQueryParameterQualifier());
}
@Test
@ -74,24 +95,26 @@ public class ReferenceParamTest {
assertEquals("name", rp.getChain());
}
@Test
public void testWithResourceTypeAsQualifier() {
ReferenceParam rp = new ReferenceParam();
rp.setValueAsQueryToken(ourCtx, null, ":Location", "123");
assertEquals("Location", rp.getResourceType());
assertEquals("123", rp.getIdPart());
assertEquals("Location/123", rp.getValue());
assertEquals("123", rp.getValue());
assertEquals(null, rp.getQueryParameterQualifier());
}
// TODO: verify this behavior is correct. If type is explicitly specified (i.e. :Location), should it be
// an error if it gets overriden by the resourceType in the url?
/**
* TODO: is this an error?
*/
@Test
public void testWithResourceTypeAsQualifier_RelativeUrl() {
@Ignore
public void testMismatchedTypeAndValueType() {
ReferenceParam rp = new ReferenceParam();
rp.setValueAsQueryToken(ourCtx, null, ":Location", "Patient/123");
assertEquals("Patient", rp.getResourceType());
@ -101,14 +124,26 @@ public class ReferenceParamTest {
}
@Test
public void testDuplicatedTypeAndValueType() {
ReferenceParam rp = new ReferenceParam();
rp.setValueAsQueryToken(ourCtx, null, ":Patient", "Patient/123");
assertEquals("Patient", rp.getResourceType());
assertEquals("123", rp.getIdPart());
assertEquals("Patient/123", rp.getValue());
assertEquals(null, rp.getQueryParameterQualifier());
}
// TODO: verify this behavior is correct. Same case as testWithResourceTypeAsQualifier_RelativeUrl()
@Test
public void testWithResourceTypeAsQualifier_AbsoluteUrl() {
ReferenceParam rp = new ReferenceParam();
rp.setValueAsQueryToken(ourCtx, null, ":Location", "http://a.b/c/d/e");
assertEquals("d", rp.getResourceType());
assertEquals("e", rp.getIdPart());
assertEquals("Location", rp.getResourceType());
assertEquals("http://a.b/c/d/e", rp.getIdPart());
assertEquals("http://a.b/c/d/e", rp.getValue());
assertEquals(null, rp.getQueryParameterQualifier());
@ -122,7 +157,7 @@ public class ReferenceParamTest {
rp.setValueAsQueryToken(ourCtx, null, ":Location.name", "FOO");
assertEquals("Location", rp.getResourceType());
assertEquals("FOO", rp.getIdPart());
assertEquals("Location/FOO", rp.getValue());
assertEquals("FOO", rp.getValue());
assertEquals(":Location.name", rp.getQueryParameterQualifier());
assertEquals("name", rp.getChain());
@ -135,7 +170,7 @@ public class ReferenceParamTest {
rp.setValueAsQueryToken(ourCtx, null, ":Patient.identifier", "http://hey.there/a/b|123");
assertEquals("Patient", rp.getResourceType());
assertEquals("http://hey.there/a/b|123", rp.getIdPart());
assertEquals("Patient/http://hey.there/a/b|123", rp.getValue());
assertEquals("http://hey.there/a/b|123", rp.getValue());
assertEquals(":Patient.identifier", rp.getQueryParameterQualifier());
assertEquals("identifier", rp.getChain());
@ -147,8 +182,8 @@ public class ReferenceParamTest {
ReferenceParam rp = new ReferenceParam();
rp.setValueAsQueryToken(ourCtx, null, ":Patient.identifier", "http://hey.there/a/b|");
assertEquals("Patient", rp.getResourceType());
assertEquals("http://hey.there/a/b|", rp.getValue());
assertEquals("http://hey.there/a/b|", rp.getIdPart());
assertEquals("Patient/http://hey.there/a/b|", rp.getValue());
assertEquals(":Patient.identifier", rp.getQueryParameterQualifier());
assertEquals("identifier", rp.getChain());
@ -161,7 +196,7 @@ public class ReferenceParamTest {
rp.setValueAsQueryToken(ourCtx, null, ":Patient.identifier", "|abc");
assertEquals("Patient", rp.getResourceType());
assertEquals("|abc", rp.getIdPart());
assertEquals("Patient/|abc", rp.getValue());
assertEquals("|abc", rp.getValue());
assertEquals(":Patient.identifier", rp.getQueryParameterQualifier());
assertEquals("identifier", rp.getChain());

13
pom.xml
View File

@ -45,14 +45,7 @@
</scm>
<repositories>
<repository>
<snapshots>
<enabled>false</enabled>
</snapshots>
<id>bintray-dnault-maven</id>
<name>bintray</name>
<url>https://dl.bintray.com/dnault/maven</url>
</repository>
<!--
<repository>
<id>jitpack.io</id>
<url>https://jitpack.io</url>
@ -60,12 +53,16 @@
<enabled>false</enabled>
</snapshots>
</repository>
-->
<repository>
<id>oss-snapshot</id>
<url>https://oss.sonatype.org/content/repositories/snapshots/</url>
<snapshots>
<enabled>true</enabled>
</snapshots>
<releases>
<enabled>false</enabled>
</releases>
</repository>
</repositories>

View File

@ -193,10 +193,25 @@
resulted in some ValueSets with duplicate codes. This has been corrected by specifying a path with each
filename.
</action>
<action>
A corner case bug in the JPA server was solved: When performing a search that contained chained reference searches
where the value contained slashes (e.g.
<![CDATA[<code>Observation?derived-from:DocumentReference.contenttype=application/vnd.mfer</code>]]>)
the server could fail to load later pages in the search results.
</action>
<action type="add">
A new flag has been added to the JPA migrator tool that causes the migrator to not try to reduce the length
of existing columns in the schema.
</action>
<action type="fix" issue="1483">
Some resource IDs and URLs for LOINC ValueSets and ConceptMaps were inconsistently populated by the
terminology uploader. This has been corrected.
</action>
<action type="fix">
When a resource was updated with a meta.source containing a request id, the meta.source was getting appended
with the new request id, resulting in an ever growing source.meta value. E.g. after the first update, it looks
like "#9f0a901387128111#5f37835ee38a89e2" when it should only be "#5f37835ee38a89e2". This has been corrected.
</action>
</release>
<release version="4.0.3" date="2019-09-03" description="Igloo (Point Release)">
<action type="fix">