diff --git a/README.md b/README.md
index d753df33492..4d31ea76496 100644
--- a/README.md
+++ b/README.md
@@ -6,12 +6,10 @@ HAPI FHIR - Java API for HL7 FHIR Clients and Servers
[![Coverage Status](https://coveralls.io/repos/jamesagnew/hapi-fhir/badge.svg?branch=master&service=github)](https://coveralls.io/github/jamesagnew/hapi-fhir?branch=master)
[![Maven Central](https://maven-badges.herokuapp.com/maven-central/ca.uhn.hapi.fhir/hapi-fhir-base/badge.svg)](http://search.maven.org/#search|ga|1|ca.uhn.hapi.fhir)
[![License](https://img.shields.io/badge/license-apache%202.0-60C060.svg)](http://jamesagnew.github.io/hapi-fhir/license.html)
-
-* Linux Build: [![Build Status](https://travis-ci.org/jamesagnew/hapi-fhir.svg?branch=master)](https://travis-ci.org/jamesagnew/hapi-fhir)
-* Windows Build:
+[![Build Status](https://dev.azure.com/jamesagnew214/jamesagnew214/_apis/build/status/jamesagnew.hapi-fhir?branchName=master)](https://dev.azure.com/jamesagnew214/jamesagnew214/_build/latest?definitionId=1&branchName=master)
Complete project documentation is available here:
-http://jamesagnew.github.io/hapi-fhir/
+http://hapifhir.io
A demonstration of this project is available here:
http://hapi.fhir.org/
@@ -19,3 +17,5 @@ http://hapi.fhir.org/
This project is Open Source, licensed under the Apache Software License 2.0.
Please see [this wiki page](https://github.com/jamesagnew/hapi-fhir/wiki/Getting-Help) for information on where to get help with HAPI FHIR. Please see [Smile CDR](https://smilecdr.com) for information on commercial support.
+
+---
diff --git a/appveyor.yml b/appveyor.yml
deleted file mode 100644
index 8a1b3c96a0b..00000000000
--- a/appveyor.yml
+++ /dev/null
@@ -1,9 +0,0 @@
-version: 1.0.{build}
-image: Visual Studio 2017
-cache:
- - C:\maven\
- - C:\Users\appveyor\.m2\repository
-build_script:
- - SET JAVA_HOME=C:\Program Files\Java\jdk10
- - SET PATH=C:\Program Files\Java\jdk10\bin;%PATH%
- - cmd: mvn -P MINPARALLEL,ALLMODULES,REDUCED_JPA_TESTS clean install
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
new file mode 100644
index 00000000000..6685d4e41c5
--- /dev/null
+++ b/azure-pipelines.yml
@@ -0,0 +1,56 @@
+# Starter pipeline
+# Start with a minimal pipeline that you can customize to build and deploy your code.
+# Add steps that build, run tests, deploy, and more:
+# https://aka.ms/yaml
+
+variables:
+ MAVEN_CACHE_FOLDER: $(Pipeline.Workspace)/.m2/repository
+ MAVEN_OPTS: '-Dmaven.repo.local=$(MAVEN_CACHE_FOLDER)'
+
+trigger:
+- master
+
+pool:
+ vmImage: 'ubuntu-latest'
+
+
+jobs:
+ - job: Build
+ timeoutInMinutes: 360
+ steps:
+ - task: CacheBeta@0
+ inputs:
+ key: maven
+ path: $(MAVEN_CACHE_FOLDER)
+ displayName: Cache Maven local repo
+
+ - task: Maven@3
+ inputs:
+ #mavenPomFile: 'pom.xml'
+ goals: 'clean install' # Optional
+ options: ''
+ #publishJUnitResults: true
+ #testResultsFiles: '**/surefire-reports/TEST-*.xml' # Required when publishJUnitResults == True
+ #testRunTitle: # Optional
+ #codeCoverageToolOption: 'None' # Optional. Options: none, cobertura, jaCoCo. Enabling code coverage inserts the `clean` goal into the Maven goals list when Maven runs.
+ #codeCoverageClassFilter: # Optional. Comma-separated list of filters to include or exclude classes from collecting code coverage. For example: +:com.*,+:org.*,-:my.app*.*
+ #codeCoverageClassFilesDirectories: # Optional
+ #codeCoverageSourceDirectories: # Optional
+ #codeCoverageFailIfEmpty: false # Optional
+ #javaHomeOption: 'JDKVersion' # Options: jDKVersion, path
+ #jdkVersionOption: 'default' # Optional. Options: default, 1.11, 1.10, 1.9, 1.8, 1.7, 1.6
+ #jdkDirectory: # Required when javaHomeOption == Path
+ #jdkArchitectureOption: 'x64' # Optional. Options: x86, x64
+ #mavenVersionOption: 'Default' # Options: default, path
+ #mavenDirectory: # Required when mavenVersionOption == Path
+ #mavenSetM2Home: false # Required when mavenVersionOption == Path
+ mavenOptions: '-Xmx2048m $(MAVEN_OPTS)' # Optional
+ #mavenAuthenticateFeed: false
+ #effectivePomSkip: false
+ #sonarQubeRunAnalysis: false
+ #sqMavenPluginVersionChoice: 'latest' # Required when sonarQubeRunAnalysis == True# Options: latest, pom
+ #checkStyleRunAnalysis: false # Optional
+ #pmdRunAnalysis: false # Optional
+ #findBugsRunAnalysis: false # Optional
+
+
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceParam.java
index b934f99dd77..e018fb94cff 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceParam.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceParam.java
@@ -19,25 +19,28 @@ package ca.uhn.fhir.rest.param;
* limitations under the License.
* #L%
*/
-import static ca.uhn.fhir.model.primitive.IdDt.isValidLong;
-import static org.apache.commons.lang3.StringUtils.isBlank;
-import static org.apache.commons.lang3.StringUtils.isNotBlank;
-
-import java.math.BigDecimal;
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.model.primitive.IdDt;
+import ca.uhn.fhir.parser.DataFormatException;
+import ca.uhn.fhir.util.CoverageIgnore;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hl7.fhir.instance.model.api.IBaseResource;
-import ca.uhn.fhir.context.FhirContext;
-import ca.uhn.fhir.model.primitive.IdDt;
-import ca.uhn.fhir.util.CoverageIgnore;
+import java.math.BigDecimal;
+
+import static ca.uhn.fhir.model.primitive.IdDt.isValidLong;
+import static org.apache.commons.lang3.StringUtils.*;
public class ReferenceParam extends BaseParam /*implements IQueryParameterType*/ {
private String myChain;
+ private String myResourceType;
+ private String myBaseUrl;
+ private String myValue;
+ private String myIdPart;
- private final IdDt myId = new IdDt();
/**
* Constructor
*/
@@ -64,12 +67,15 @@ public class ReferenceParam extends BaseParam /*implements IQueryParameterType*/
* Constructor
*/
public ReferenceParam(String theResourceType, String theChain, String theValue) {
+ String qualifier = "";
if (isNotBlank(theResourceType)) {
- setValue(theResourceType + "/" + theValue);
- } else {
- setValue(theValue);
+ qualifier = ":" + theResourceType;
}
- setChain(theChain);
+ if (isNotBlank(theChain)) {
+ qualifier = qualifier + "." + theChain;
+ }
+
+ setValueAsQueryToken(null, null, qualifier, theValue);
}
@Override
@@ -91,55 +97,61 @@ public class ReferenceParam extends BaseParam /*implements IQueryParameterType*/
@Override
String doGetValueAsQueryToken(FhirContext theContext) {
- if (isBlank(myId.getResourceType())) {
- return myId.getValue(); // e.g. urn:asdjd or 123 or cid:wieiuru or #1
+ if (isBlank(getResourceType())) {
+ return myValue; // e.g. urn:asdjd or 123 or cid:wieiuru or #1
} else {
- if (isBlank(getChain())) {
- return getResourceType() + "/" + myId.getIdPart();
+ if (isBlank(getChain()) && isNotBlank(getResourceType())) {
+ return getResourceType() + "/" + getIdPart();
}
- return myId.getIdPart();
+ return myValue;
}
}
@Override
void doSetValueAsQueryToken(FhirContext theContext, String theParamName, String theQualifier, String theValue) {
String q = theQualifier;
- String resourceType = null;
- boolean skipSetValue = false;
if (isNotBlank(q)) {
if (q.startsWith(":")) {
int nextIdx = q.indexOf('.');
if (nextIdx != -1) {
- resourceType = q.substring(1, nextIdx);
myChain = q.substring(nextIdx + 1);
- // type is explicitly defined so use it
- myId.setParts(null, resourceType, theValue, null);
- skipSetValue = true;
+ myResourceType = q.substring(1, nextIdx);
} else {
- resourceType = q.substring(1);
+ myChain = null;
+ myResourceType = q.substring(1);
}
+
+ myValue = theValue;
+ myIdPart = theValue;
+
+ IdDt id = new IdDt(theValue);
+ if (!id.hasBaseUrl() && id.hasIdPart() && id.hasResourceType()) {
+ if (id.getResourceType().equals(myResourceType)) {
+ myIdPart = id.getIdPart();
+ }
+ }
+
} else if (q.startsWith(".")) {
myChain = q.substring(1);
- // type not defined but this is a chain, so treat value as opaque
- myId.setParts(null, null, theValue, null);
- skipSetValue = true;
+ myResourceType = null;
+ myValue = theValue;
+ myIdPart = theValue;
}
+ } else {
+ myChain = null;
+ myValue = theValue;
+ IdDt id = new IdDt(theValue);
+ myResourceType = id.getResourceType();
+ myIdPart = id.getIdPart();
+ myBaseUrl = id.getBaseUrl();
}
- if (!skipSetValue) {
- setValue(theValue);
-
- if (isNotBlank(resourceType) && isBlank(getResourceType())) {
- setValue(resourceType + '/' + theValue);
- }
- }
}
-
@CoverageIgnore
public String getBaseUrl() {
- return myId.getBaseUrl();
+ return myBaseUrl;
}
@@ -147,24 +159,34 @@ public class ReferenceParam extends BaseParam /*implements IQueryParameterType*/
return myChain;
}
+ public ReferenceParam setChain(String theChain) {
+ myChain = theChain;
+ return this;
+ }
@CoverageIgnore
public String getIdPart() {
- return myId.getIdPart();
+ return myIdPart;
}
@CoverageIgnore
public BigDecimal getIdPartAsBigDecimal() {
- return myId.getIdPartAsBigDecimal();
+ return new IdDt(myValue).getIdPartAsBigDecimal();
}
-
+
@CoverageIgnore
public Long getIdPartAsLong() {
- return myId.getIdPartAsLong();
+ return new IdDt(myValue).getIdPartAsLong();
}
public String getResourceType() {
- return myId.getResourceType();
+ if (isNotBlank(myResourceType)) {
+ return myResourceType;
+ }
+ if (isBlank(myChain)) {
+ return new IdDt(myValue).getResourceType();
+ }
+ return null;
}
public Class extends IBaseResource> getResourceType(FhirContext theCtx) {
@@ -175,11 +197,21 @@ public class ReferenceParam extends BaseParam /*implements IQueryParameterType*/
}
public String getValue() {
- return myId.getValue();
+ return myValue;
+ }
+
+ public ReferenceParam setValue(String theValue) {
+ IdDt id = new IdDt(theValue);
+ String qualifier= null;
+ if (id.hasResourceType()) {
+ qualifier = ":" + id.getResourceType();
+ }
+ setValueAsQueryToken(null, null, qualifier, id.getIdPart());
+ return this;
}
public boolean hasResourceType() {
- return myId.hasResourceType();
+ return isNotBlank(myResourceType);
}
@Override
@@ -187,16 +219,6 @@ public class ReferenceParam extends BaseParam /*implements IQueryParameterType*/
return true;
}
- public ReferenceParam setChain(String theChain) {
- myChain = theChain;
- return this;
- }
-
- public ReferenceParam setValue(String theValue) {
- myId.setValue(theValue);
- return this;
- }
-
/**
* Returns a new param containing the same value as this param, but with the type copnverted
* to {@link DateParam}. This is useful if you are using reference parameters and want to handle
diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseMigrateDatabaseCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseMigrateDatabaseCommand.java
index c4cf570b8f4..72759e380c1 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseMigrateDatabaseCommand.java
+++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseMigrateDatabaseCommand.java
@@ -78,6 +78,7 @@ public abstract class BaseMigrateDatabaseCommand extends BaseCom
addRequiredOption(retVal, "t", "to", "Version", "The database schema version to migrate TO");
addRequiredOption(retVal, "d", "driver", "Driver", "The database driver to use (Options are " + driverOptions() + ")");
addOptionalOption(retVal, "x", "flags", "Flags", "A comma-separated list of any specific migration flags (these flags are version specific, see migrator documentation for details)");
+ addOptionalOption(retVal, null, "no-column-shrink", false, "If this flag is set, the system will not attempt to reduce the length of columns. This is useful in environments with a lot of existing data, where shrinking a column can take a very long time.");
return retVal;
}
@@ -106,6 +107,7 @@ public abstract class BaseMigrateDatabaseCommand extends BaseCom
validateVersionSupported(to);
boolean dryRun = theCommandLine.hasOption("r");
+ boolean noColumnShrink = theCommandLine.hasOption("no-column-shrink");
String flags = theCommandLine.getOptionValue("x");
myFlags = Arrays.stream(defaultString(flags).split(","))
@@ -119,6 +121,7 @@ public abstract class BaseMigrateDatabaseCommand extends BaseCom
migrator.setUsername(username);
migrator.setPassword(password);
migrator.setDryRun(dryRun);
+ migrator.setNoColumnShrink(noColumnShrink);
addTasks(migrator, from, to);
migrator.migrate();
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/interceptors/interceptors.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/interceptors/interceptors.md
index b31ee889750..d8cc8c82bef 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/interceptors/interceptors.md
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/interceptors/interceptors.md
@@ -7,3 +7,4 @@ Interceptor classes may "hook into" various points in the processing chain in bo
```java
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/MyInterceptor.java|sampleClass}}
```
+
diff --git a/hapi-fhir-jaxrsserver-base/pom.xml b/hapi-fhir-jaxrsserver-base/pom.xml
index 44f7d694601..9f2aa4accea 100644
--- a/hapi-fhir-jaxrsserver-base/pom.xml
+++ b/hapi-fhir-jaxrsserver-base/pom.xml
@@ -50,31 +50,26 @@
ca.uhn.hapi.fhir
hapi-fhir-structures-dstu2
${project.version}
- true
ca.uhn.hapi.fhir
hapi-fhir-structures-hl7org-dstu2
${project.version}
- true
ca.uhn.hapi.fhir
hapi-fhir-structures-dstu2.1
${project.version}
- true
ca.uhn.hapi.fhir
hapi-fhir-structures-dstu3
${project.version}
- true
ca.uhn.hapi.fhir
hapi-fhir-structures-r4
${project.version}
- true
diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml
index 11ab97ac90f..dad80f5523a 100644
--- a/hapi-fhir-jpaserver-base/pom.xml
+++ b/hapi-fhir-jpaserver-base/pom.xml
@@ -212,7 +212,6 @@
com.github.dnault
xml-patch
- 0.3.0
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
index 0a3c7eff1a4..255275ff131 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
@@ -47,12 +47,14 @@ import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
-import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
-import ca.uhn.fhir.util.*;
+import ca.uhn.fhir.util.CoverageIgnore;
+import ca.uhn.fhir.util.MetaUtil;
+import ca.uhn.fhir.util.StopWatch;
+import ca.uhn.fhir.util.XmlUtil;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
import com.google.common.collect.Sets;
@@ -86,7 +88,6 @@ import javax.xml.stream.events.XMLEvent;
import java.util.*;
import java.util.Map.Entry;
-import static ca.uhn.fhir.jpa.model.util.JpaConstants.EXT_EXTERNALIZED_BINARY_ID;
import static org.apache.commons.lang3.StringUtils.*;
/*
@@ -974,7 +975,7 @@ public abstract class BaseHapiFhirDao implements IDao,
// 6. Handle source (provenance)
if (isNotBlank(provenanceRequestId) || isNotBlank(provenanceSourceUri)) {
- String sourceString = defaultString(provenanceSourceUri)
+ String sourceString = cleanProvenanceSourceUri(provenanceSourceUri)
+ (isNotBlank(provenanceRequestId) ? "#" : "")
+ defaultString(provenanceRequestId);
@@ -992,6 +993,16 @@ public abstract class BaseHapiFhirDao implements IDao,
return retVal;
}
+ static String cleanProvenanceSourceUri(String theProvenanceSourceUri) {
+ if (isNotBlank(theProvenanceSourceUri)) {
+ int hashIndex = theProvenanceSourceUri.indexOf('#');
+ if (hashIndex != -1) {
+ theProvenanceSourceUri = theProvenanceSourceUri.substring(0, hashIndex);
+ }
+ }
+ return defaultString(theProvenanceSourceUri);
+ }
+
public String toResourceName(Class extends IBaseResource> theResourceType) {
return myContext.getResourceDefinition(theResourceType).getName();
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java
index d6fef210449..31e988a78d6 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java
@@ -573,7 +573,7 @@ public class SearchBuilder implements ISearchBuilder {
private Predicate addPredicateReferenceWithChain(String theResourceName, String theParamName, List extends IQueryParameterType> theList, Join theJoin, List theCodePredicates, ReferenceParam theRef, RequestDetails theRequest) {
final List> resourceTypes;
String resourceId;
- if (!theRef.getValue().matches("[a-zA-Z]+/.*")) {
+ if (!theRef.hasResourceType()) {
RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(theResourceName, theParamName);
resourceTypes = new ArrayList<>();
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermCodeSystemVersionDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermCodeSystemVersionDao.java
index 4d9d76065f8..bf90aad1df3 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermCodeSystemVersionDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermCodeSystemVersionDao.java
@@ -35,10 +35,10 @@ public interface ITermCodeSystemVersionDao extends JpaRepository findByCodeSystemPid(@Param("resource_id") Long theCodeSystemResourcePid);
+ @Query("SELECT cs FROM TermCodeSystemVersion cs WHERE cs.myCodeSystemPid = :codesystem_pid")
+ List findByCodeSystemPid(@Param("codesystem_pid") Long theCodeSystemPid);
- @Query("SELECT cs FROM TermCodeSystemVersion cs WHERE cs.myResource.myId = :resource_id")
+ @Query("SELECT cs FROM TermCodeSystemVersion cs WHERE cs.myResourcePid = :resource_id")
List findByCodeSystemResourcePid(@Param("resource_id") Long theCodeSystemResourcePid);
@Query("SELECT cs FROM TermCodeSystemVersion cs WHERE cs.myCodeSystemHavingThisVersionAsCurrentVersionIfAny.myResource.myId = :resource_id")
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermCodeSystemVersion.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermCodeSystemVersion.java
index b305eb68f21..85874bac10c 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermCodeSystemVersion.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermCodeSystemVersion.java
@@ -58,6 +58,7 @@ public class TermCodeSystemVersion implements Serializable {
@Column(name = "CS_VERSION_ID", nullable = true, updatable = false, length = MAX_VERSION_LENGTH)
private String myCodeSystemVersionId;
+
/**
* This was added in HAPI FHIR 3.3.0 and is nullable just to avoid migration
* issued. It should be made non-nullable at some point.
@@ -65,8 +66,11 @@ public class TermCodeSystemVersion implements Serializable {
@ManyToOne
@JoinColumn(name = "CODESYSTEM_PID", referencedColumnName = "PID", nullable = true, foreignKey = @ForeignKey(name = "FK_CODESYSVER_CS_ID"))
private TermCodeSystem myCodeSystem;
- @SuppressWarnings("unused")
+ @Column(name = "CODESYSTEM_PID", insertable = false, updatable = false)
+ private Long myCodeSystemPid;
+
+ @SuppressWarnings("unused")
@OneToOne(mappedBy = "myCurrentVersion", optional = true)
private TermCodeSystem myCodeSystemHavingThisVersionAsCurrentVersionIfAny;
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDaoTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDaoTest.java
new file mode 100644
index 00000000000..787b3fa2ec8
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDaoTest.java
@@ -0,0 +1,16 @@
+package ca.uhn.fhir.jpa.dao;
+
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+public class BaseHapiFhirDaoTest {
+
+ @Test
+ public void cleanProvenanceSourceUri() {
+ assertEquals("", BaseHapiFhirDao.cleanProvenanceSourceUri(null));
+ assertEquals("abc", BaseHapiFhirDao.cleanProvenanceSourceUri("abc"));
+ assertEquals("abc", BaseHapiFhirDao.cleanProvenanceSourceUri("abc#def"));
+ assertEquals("abc", BaseHapiFhirDao.cleanProvenanceSourceUri("abc#def#ghi"));
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchNoFtTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchNoFtTest.java
index 7c18e672068..46c25066285 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchNoFtTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchNoFtTest.java
@@ -1368,11 +1368,14 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
map.setLastUpdated(new DateRangeParam().setUpperBound(new DateParam(ParamPrefixEnum.LESSTHAN, "2022-01-01")));
IBundleProvider found = myPatientDao.search(map);
Set dates = new HashSet<>();
+ String searchId = found.getUuid();
for (int i = 0; i < 9; i++) {
List resources = found.getResources(i, i + 1);
- assertThat("Failed to load range " + i + " - " + (i + 1), resources, hasSize(1));
+ assertThat("Failed to load range " + i + " - " + (i + 1) + " - from provider of type: " + found.getClass(), resources, hasSize(1));
Patient nextResource = (Patient) resources.get(0);
dates.add(nextResource.getBirthDateElement().getValueAsString());
+
+ found = myPagingProvider.retrieveResultList(null, searchId);
}
assertThat(dates, hasItems(
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchOptimizedTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchOptimizedTest.java
index 6d0931d484d..79d3929a8e1 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchOptimizedTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchOptimizedTest.java
@@ -120,7 +120,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
myDaoConfig.setSearchPreFetchThresholds(Arrays.asList(20, 50, 190));
- // Seach with count only
+ // Search with count only
SearchParameterMap params = new SearchParameterMap();
params.add(Patient.SP_NAME, new StringParam("FAM"));
params.setSummaryMode((SummaryEnum.COUNT));
@@ -142,7 +142,9 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
assertEquals(201, results.size().intValue());
ids = toUnqualifiedVersionlessIdValues(results, 0, 10, true);
assertThat(ids, hasSize(10));
- assertEquals(201, myDatabaseBackedPagingProvider.retrieveResultList(null, uuid).size().intValue());
+ IBundleProvider bundleProvider = myDatabaseBackedPagingProvider.retrieveResultList(null, uuid);
+ Integer bundleSize = bundleProvider.size();
+ assertEquals(201, bundleSize.intValue());
// Search with count only
params = new SearchParameterMap();
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java
index 91d6fa55695..c44d8e47ce7 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java
@@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.provider.dstu3;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
import ca.uhn.fhir.jpa.entity.Search;
+import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.provider.r4.ResourceProviderR4Test;
import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl;
import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
@@ -3913,6 +3914,31 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
}
+ @Test
+ public void testUpdateWithSource() {
+ Patient patient = new Patient();
+ patient.setActive(false);
+ IIdType patientid = ourClient.create().resource(patient).execute().getId().toUnqualifiedVersionless();
+
+ {
+ Patient readPatient = (Patient) ourClient.read().resource("Patient").withId(patientid).execute();
+ assertThat(readPatient.getMeta().getExtensionString(JpaConstants.EXT_META_SOURCE), matchesPattern("#[a-f0-9]+"));
+ }
+
+ patient.setId(patientid);
+ patient.setActive(true);
+ ourClient.update().resource(patient).execute();
+ {
+ Patient readPatient = (Patient) ourClient.read().resource("Patient").withId(patientid).execute();
+ assertThat(readPatient.getMeta().getExtensionString(JpaConstants.EXT_META_SOURCE), matchesPattern("#[a-f0-9]+"));
+
+ readPatient.addName().setFamily("testUpdateWithSource");
+ ourClient.update().resource(readPatient).execute();
+ readPatient = (Patient) ourClient.read().resource("Patient").withId(patientid).execute();
+ assertThat(readPatient.getMeta().getExtensionString(JpaConstants.EXT_META_SOURCE), matchesPattern("#[a-f0-9]+"));
+ }
+ }
+
@Test
public void testUpdateWithETag() throws Exception {
String methodName = "testUpdateWithETag";
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java
index 2c25bc2c640..2dddbe66a89 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java
@@ -167,6 +167,54 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
}
+ @Test
+ public void testSearchWithSlashes() {
+ myDaoConfig.setSearchPreFetchThresholds(Lists.newArrayList(10, 50, 10000));
+
+ Procedure procedure = new Procedure();
+ procedure.setStatus(Procedure.ProcedureStatus.COMPLETED);
+ String procedureId = ourClient.create().resource(procedure).execute().getId().toUnqualifiedVersionless().getValue();
+
+ DocumentReference dr = new DocumentReference();
+ dr.addContent().getAttachment().setContentType("application/vnd.mfer");
+ String drId = ourClient.create().resource(dr).execute().getId().toUnqualifiedVersionless().getValue();
+
+ for (int i = 0; i < 60; i++) {
+ Observation obs = new Observation();
+ obs.addPartOf().setReference(procedureId);
+ obs.addDerivedFrom().setReference(drId);
+ ourClient.create().resource(obs).execute();
+ }
+
+ ourLog.info("Starting search");
+
+ Bundle response = ourClient
+ .search()
+ .byUrl("Observation?part-of=" + procedureId + "&derived-from:DocumentReference.contenttype=application/vnd.mfer&_total=accurate&_count=2")
+ .returnBundle(Bundle.class)
+ .execute();
+
+ int obsCount = 0;
+ int pageCount = 0;
+ while (response != null) {
+ obsCount += response.getEntry().size();
+ pageCount++;
+ if (response.getLink("next") != null) {
+ response = ourClient.loadPage().next(response).execute();
+ } else {
+ response = null;
+ }
+
+
+ ourLog.info("Have loaded {} pages and {} reources", pageCount, obsCount);
+ }
+
+ assertEquals(60, obsCount);
+ assertEquals(30, pageCount);
+
+ }
+
+
@Test
public void testManualPagingLinkOffsetDoesntReturnBeyondEnd() {
myDaoConfig.setSearchPreFetchThresholds(Lists.newArrayList(10, 1000));
@@ -5081,6 +5129,31 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
}
+ @Test
+ public void testUpdateWithSource() {
+ Patient patient = new Patient();
+ patient.setActive(false);
+ IIdType patientid = ourClient.create().resource(patient).execute().getId().toUnqualifiedVersionless();
+
+ {
+ Patient readPatient = (Patient) ourClient.read().resource("Patient").withId(patientid).execute();
+ assertThat(readPatient.getMeta().getSource(), matchesPattern("#[a-f0-9]+"));
+ }
+
+ patient.setId(patientid);
+ patient.setActive(true);
+ ourClient.update().resource(patient).execute();
+ {
+ Patient readPatient = (Patient) ourClient.read().resource("Patient").withId(patientid).execute();
+ assertThat(readPatient.getMeta().getSource(), matchesPattern("#[a-f0-9]+"));
+
+ readPatient.addName().setFamily("testUpdateWithSource");
+ ourClient.update().resource(readPatient).execute();
+ readPatient = (Patient) ourClient.read().resource("Patient").withId(patientid).execute();
+ assertThat(readPatient.getMeta().getSource(), matchesPattern("#[a-f0-9]+"));
+ }
+ }
+
@Test
public void testUpdateWithETag() throws Exception {
String methodName = "testUpdateWithETag";
diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/JdbcUtils.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/JdbcUtils.java
index 412dd261895..f1c889442be 100644
--- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/JdbcUtils.java
+++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/JdbcUtils.java
@@ -22,6 +22,9 @@ package ca.uhn.fhir.jpa.migrate;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTableColumnTypeTask;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.apache.commons.lang3.builder.ToStringBuilder;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.dialect.internal.StandardDialectResolver;
@@ -48,6 +51,73 @@ import static org.thymeleaf.util.StringUtils.toUpperCase;
public class JdbcUtils {
private static final Logger ourLog = LoggerFactory.getLogger(JdbcUtils.class);
+ public static class ColumnType {
+ private final BaseTableColumnTypeTask.ColumnTypeEnum myColumnTypeEnum;
+ private final Long myLength;
+
+ public ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum theColumnType, Long theLength) {
+ myColumnTypeEnum = theColumnType;
+ myLength = theLength;
+ }
+
+ public ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum theColumnType, int theLength) {
+ this(theColumnType, (long) theLength);
+ }
+
+ public ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum theColumnType) {
+ this(theColumnType, null);
+ }
+
+ @Override
+ public boolean equals(Object theO) {
+ if (this == theO) {
+ return true;
+ }
+
+ if (theO == null || getClass() != theO.getClass()) {
+ return false;
+ }
+
+ ColumnType that = (ColumnType) theO;
+
+ return new EqualsBuilder()
+ .append(myColumnTypeEnum, that.myColumnTypeEnum)
+ .append(myLength, that.myLength)
+ .isEquals();
+ }
+
+ @Override
+ public int hashCode() {
+ return new HashCodeBuilder(17, 37)
+ .append(myColumnTypeEnum)
+ .append(myLength)
+ .toHashCode();
+ }
+
+ @Override
+ public String toString() {
+ ToStringBuilder b = new ToStringBuilder(this);
+ b.append("type", myColumnTypeEnum);
+ if (myLength != null) {
+ b.append("length", myLength);
+ }
+ return b.toString();
+ }
+
+ public BaseTableColumnTypeTask.ColumnTypeEnum getColumnTypeEnum() {
+ return myColumnTypeEnum;
+ }
+
+ public Long getLength() {
+ return myLength;
+ }
+
+ public boolean equals(BaseTableColumnTypeTask.ColumnTypeEnum theColumnType, Long theColumnLength) {
+ return myColumnTypeEnum == theColumnType && (myLength == null || myLength.equals(theColumnLength));
+ }
+
+ }
+
/**
* Retrieve all index names
*/
@@ -127,7 +197,7 @@ public class JdbcUtils {
/**
* Retrieve all index names
*/
- public static String getColumnType(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName, String theColumnName) throws SQLException {
+ public static ColumnType getColumnType(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName, String theColumnName) throws SQLException {
DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource());
try (Connection connection = dataSource.getConnection()) {
return theConnectionProperties.getTxTemplate().execute(t -> {
@@ -153,18 +223,18 @@ public class JdbcUtils {
Long length = indexes.getLong("COLUMN_SIZE");
switch (dataType) {
case Types.VARCHAR:
- return BaseTableColumnTypeTask.ColumnTypeEnum.STRING.getDescriptor(length);
+ return new ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, length);
case Types.NUMERIC:
case Types.BIGINT:
case Types.DECIMAL:
- return BaseTableColumnTypeTask.ColumnTypeEnum.LONG.getDescriptor(null);
+ return new ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG, length);
case Types.INTEGER:
- return BaseTableColumnTypeTask.ColumnTypeEnum.INT.getDescriptor(null);
+ return new ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.INT, length);
case Types.TIMESTAMP:
case Types.TIMESTAMP_WITH_TIMEZONE:
- return BaseTableColumnTypeTask.ColumnTypeEnum.DATE_TIMESTAMP.getDescriptor(null);
+ return new ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.DATE_TIMESTAMP, length);
case Types.BLOB:
- return BaseTableColumnTypeTask.ColumnTypeEnum.BLOB.getDescriptor(null);
+ return new ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.BLOB, length);
default:
throw new IllegalArgumentException("Don't know how to handle datatype " + dataType + " for column " + theColumnName + " on table " + theTableName);
}
diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/Migrator.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/Migrator.java
index 1278f1a1dbf..9cadd2feb20 100644
--- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/Migrator.java
+++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/Migrator.java
@@ -44,6 +44,7 @@ public class Migrator {
private int myChangesCount;
private boolean myDryRun;
private List myExecutedStatements = new ArrayList<>();
+ private boolean myNoColumnShrink;
public int getChangesCount() {
return myChangesCount;
@@ -82,6 +83,7 @@ public class Migrator {
next.setDriverType(myDriverType);
next.setConnectionProperties(myConnectionProperties);
next.setDryRun(myDryRun);
+ next.setNoColumnShrink(myNoColumnShrink);
try {
next.execute();
} catch (SQLException e) {
@@ -126,4 +128,8 @@ public class Migrator {
public void addTasks(List> theTasks) {
theTasks.forEach(this::addTask);
}
+
+ public void setNoColumnShrink(boolean theNoColumnShrink) {
+ myNoColumnShrink = theNoColumnShrink;
+ }
}
diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ArbitrarySqlTask.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ArbitrarySqlTask.java
index d7acd47efb6..de64bda2543 100644
--- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ArbitrarySqlTask.java
+++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ArbitrarySqlTask.java
@@ -71,7 +71,7 @@ public class ArbitrarySqlTask extends BaseTask {
}
for (TableAndColumn next : myConditionalOnExistenceOf) {
- String columnType = JdbcUtils.getColumnType(getConnectionProperties(), next.getTable(), next.getColumn());
+ JdbcUtils.ColumnType columnType = JdbcUtils.getColumnType(getConnectionProperties(), next.getTable(), next.getColumn());
if (columnType == null) {
ourLog.info("Table {} does not have column {} - No action performed", next.getTable(), next.getColumn());
return;
diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableColumnTypeTask.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableColumnTypeTask.java
index c5cb793b61d..d16d1f5aaef 100644
--- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableColumnTypeTask.java
+++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableColumnTypeTask.java
@@ -164,74 +164,23 @@ public abstract class BaseTableColumnTypeTask extends B
return myColumnLength;
}
- public BaseTableColumnTypeTask setColumnLength(int theColumnLength) {
- myColumnLength = (long) theColumnLength;
+ public BaseTableColumnTypeTask setColumnLength(long theColumnLength) {
+ myColumnLength = theColumnLength;
return this;
}
public enum ColumnTypeEnum {
- LONG {
- @Override
- public String getDescriptor(Long theColumnLength) {
- Assert.isTrue(theColumnLength == null, "Must not supply a column length");
- return "bigint";
- }
- },
- STRING {
- @Override
- public String getDescriptor(Long theColumnLength) {
- Assert.isTrue(theColumnLength != null, "Must supply a column length");
- return "varchar(" + theColumnLength + ")";
- }
- },
- DATE_TIMESTAMP {
- @Override
- public String getDescriptor(Long theColumnLength) {
- Assert.isTrue(theColumnLength == null, "Must not supply a column length");
- return "timestamp";
- }
- },
- BOOLEAN {
- @Override
- public String getDescriptor(Long theColumnLength) {
- Assert.isTrue(theColumnLength == null, "Must not supply a column length");
- return "boolean";
- }
- },
- FLOAT {
- @Override
- public String getDescriptor(Long theColumnLength) {
- Assert.isTrue(theColumnLength == null, "Must not supply a column length");
- return "float";
- }
- },
- INT {
- @Override
- public String getDescriptor(Long theColumnLength) {
- Assert.isTrue(theColumnLength == null, "Must not supply a column length");
- return "int";
- }
- },
-
- BLOB {
- @Override
- public String getDescriptor(Long theColumnLength) {
- Assert.isTrue(theColumnLength == null, "Must not supply a column length");
- return "blob";
- }
- },
-
- CLOB {
- @Override
- public String getDescriptor(Long theColumnLength) {
- Assert.isTrue(theColumnLength == null, "Must not supply a column length");
- return "clob";
- }
- };
-
- public abstract String getDescriptor(Long theColumnLength);
+ LONG,
+ STRING,
+ DATE_TIMESTAMP,
+ BOOLEAN,
+ FLOAT,
+ INT,
+ BLOB,
+ CLOB
+ ;
}
diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTask.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTask.java
index b4c03979b2b..d8986ce325b 100644
--- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTask.java
+++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTask.java
@@ -42,6 +42,15 @@ public abstract class BaseTask {
private int myChangesCount;
private boolean myDryRun;
private List myExecutedStatements = new ArrayList<>();
+ private boolean myNoColumnShrink;
+
+ public boolean isNoColumnShrink() {
+ return myNoColumnShrink;
+ }
+
+ public void setNoColumnShrink(boolean theNoColumnShrink) {
+ myNoColumnShrink = theNoColumnShrink;
+ }
public boolean isDryRun() {
return myDryRun;
diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ModifyColumnTask.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ModifyColumnTask.java
index 6442a4a27c9..7e222905ce1 100644
--- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ModifyColumnTask.java
+++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ModifyColumnTask.java
@@ -36,7 +36,7 @@ public class ModifyColumnTask extends BaseTableColumnTypeTask
@Override
public void execute() throws SQLException {
- String existingType;
+ JdbcUtils.ColumnType existingType;
boolean nullable;
Set columnNames = JdbcUtils.getColumnNames(getConnectionProperties(), getTableName());
@@ -52,11 +52,17 @@ public class ModifyColumnTask extends BaseTableColumnTypeTask
throw new InternalErrorException(e);
}
- String wantedType = getColumnType().getDescriptor(getColumnLength());
- boolean alreadyOfCorrectType = existingType.equals(wantedType);
+ if (isNoColumnShrink()) {
+ long existingLength = existingType.getLength() != null ? existingType.getLength() : 0;
+ if (existingLength > getColumnLength()) {
+ setColumnLength(existingLength);
+ }
+ }
+
+ boolean alreadyOfCorrectType = existingType.equals(getColumnType(), getColumnLength());
boolean alreadyCorrectNullable = isNullable() == nullable;
if (alreadyOfCorrectType && alreadyCorrectNullable) {
- ourLog.info("Column {} on table {} is already of type {} and has nullable {} - No action performed", getColumnName(), getTableName(), wantedType, nullable);
+ ourLog.info("Column {} on table {} is already of type {} and has nullable {} - No action performed", getColumnName(), getTableName(), existingType, nullable);
return;
}
diff --git a/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/AddColumnTest.java b/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/AddColumnTest.java
index cc0ba181df7..c809412e6f0 100644
--- a/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/AddColumnTest.java
+++ b/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/AddColumnTest.java
@@ -40,8 +40,8 @@ public class AddColumnTest extends BaseTest {
getMigrator().migrate();
- String type = JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "newcolint");
- assertEquals(BaseTableColumnTypeTask.ColumnTypeEnum.INT.getDescriptor(null), type);
+ JdbcUtils.ColumnType type = JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "newcolint");
+ assertEquals(BaseTableColumnTypeTask.ColumnTypeEnum.INT, type.getColumnTypeEnum());
}
@Test
diff --git a/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/ModifyColumnTest.java b/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/ModifyColumnTest.java
index cf17003797f..6a2cb7c63d3 100644
--- a/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/ModifyColumnTest.java
+++ b/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/ModifyColumnTest.java
@@ -25,7 +25,32 @@ public class ModifyColumnTest extends BaseTest {
getMigrator().migrate();
- assertEquals("varchar(300)", JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
+ assertEquals(new JdbcUtils.ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 300), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
+ assertEquals(1, task.getExecutedStatements().size());
+
+ // Make sure additional migrations don't crash
+ getMigrator().migrate();
+ getMigrator().migrate();
+
+ }
+
+ @Test
+ public void testNoShrink_SameNullable() throws SQLException {
+ executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255), newcol bigint)");
+
+ ModifyColumnTask task = new ModifyColumnTask();
+ task.setTableName("SOMETABLE");
+ task.setColumnName("TEXTCOL");
+ task.setColumnType(AddColumnTask.ColumnTypeEnum.STRING);
+ task.setNullable(true);
+ task.setColumnLength(200);
+
+ getMigrator().setNoColumnShrink(true);
+ getMigrator().addTask(task);
+ getMigrator().migrate();
+
+ assertEquals(0, task.getExecutedStatements().size());
+ assertEquals(new JdbcUtils.ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 255), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
// Make sure additional migrations don't crash
getMigrator().migrate();
@@ -38,8 +63,8 @@ public class ModifyColumnTest extends BaseTest {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255) not null)");
assertFalse(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "PID"));
assertFalse(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
- assertEquals("bigint", JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
- assertEquals("varchar(255)", JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
+ assertEquals(new JdbcUtils.ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG, 19), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
+ assertEquals(new JdbcUtils.ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 255), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
// PID
ModifyColumnTask task = new ModifyColumnTask();
@@ -63,8 +88,8 @@ public class ModifyColumnTest extends BaseTest {
assertTrue(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "PID"));
assertTrue(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
- assertEquals("bigint", JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
- assertEquals("varchar(255)", JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
+ assertEquals(new JdbcUtils.ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG, 19), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
+ assertEquals(new JdbcUtils.ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 255), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
// Make sure additional migrations don't crash
getMigrator().migrate();
@@ -78,8 +103,8 @@ public class ModifyColumnTest extends BaseTest {
executeSql("create table SOMETABLE (PID bigint, TEXTCOL varchar(255))");
assertTrue(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "PID"));
assertTrue(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
- assertEquals("bigint", JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
- assertEquals("varchar(255)", JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
+ assertEquals(new JdbcUtils.ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG, 19), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
+ assertEquals(new JdbcUtils.ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 255), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
// PID
ModifyColumnTask task = new ModifyColumnTask();
@@ -103,8 +128,8 @@ public class ModifyColumnTest extends BaseTest {
assertFalse(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "PID"));
assertFalse(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
- assertEquals("bigint", JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
- assertEquals("varchar(255)", JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
+ assertEquals(new JdbcUtils.ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG, 19), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
+ assertEquals(new JdbcUtils.ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 255), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
// Make sure additional migrations don't crash
getMigrator().migrate();
diff --git a/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/rest/param/ReferenceParamTest.java b/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/rest/param/ReferenceParamTest.java
index a61aa706ac2..f727d380bf5 100644
--- a/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/rest/param/ReferenceParamTest.java
+++ b/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/rest/param/ReferenceParamTest.java
@@ -1,39 +1,60 @@
package ca.uhn.fhir.rest.param;
-import static org.junit.Assert.*;
-
-import org.junit.AfterClass;
-import org.junit.Test;
-
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.util.TestUtil;
+import com.google.common.base.Charsets;
+import org.apache.commons.lang3.SerializationUtils;
+import org.junit.AfterClass;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.junit.Assert.assertEquals;
public class ReferenceParamTest {
+ private static final Logger ourLog = LoggerFactory.getLogger(ReferenceParamTest.class);
private FhirContext ourCtx = FhirContext.forDstu3();
+ @Test
+ public void testValueWithSlashPersistsAcrossSerialization() {
+ ReferenceParam param = new ReferenceParam();
+ param.setValueAsQueryToken(ourCtx, "derived-from", ":DocumentReference.contenttype", "application/vnd.mfer");
+
+ assertEquals("application/vnd.mfer", param.getValueAsQueryToken(ourCtx));
+ assertEquals(":DocumentReference.contenttype", param.getQueryParameterQualifier());
+
+ byte[] serialized = SerializationUtils.serialize(param);
+ ourLog.info("Serialized: {}", new String(serialized, Charsets.US_ASCII));
+ param = SerializationUtils.deserialize(serialized);
+
+ assertEquals("application/vnd.mfer", param.getValueAsQueryToken(ourCtx));
+ assertEquals(":DocumentReference.contenttype", param.getQueryParameterQualifier());
+ }
+
@Test
public void testWithResourceType() {
-
+
ReferenceParam rp = new ReferenceParam();
rp.setValueAsQueryToken(ourCtx, null, null, "Location/123");
assertEquals("Location", rp.getResourceType());
assertEquals("123", rp.getIdPart());
assertEquals("Location/123", rp.getValue());
assertEquals(null, rp.getQueryParameterQualifier());
-
+
}
@Test
public void testWithResourceType_AbsoluteUrl() {
-
+
ReferenceParam rp = new ReferenceParam();
rp.setValueAsQueryToken(ourCtx, null, null, "http://a.b/c/d/e");
assertEquals("d", rp.getResourceType());
assertEquals("e", rp.getIdPart());
assertEquals("http://a.b/c/d/e", rp.getValue());
assertEquals(null, rp.getQueryParameterQualifier());
-
+
}
@Test
@@ -74,24 +95,26 @@ public class ReferenceParamTest {
assertEquals("name", rp.getChain());
}
-
+
@Test
public void testWithResourceTypeAsQualifier() {
-
+
ReferenceParam rp = new ReferenceParam();
rp.setValueAsQueryToken(ourCtx, null, ":Location", "123");
assertEquals("Location", rp.getResourceType());
assertEquals("123", rp.getIdPart());
- assertEquals("Location/123", rp.getValue());
+ assertEquals("123", rp.getValue());
assertEquals(null, rp.getQueryParameterQualifier());
}
- // TODO: verify this behavior is correct. If type is explicitly specified (i.e. :Location), should it be
- // an error if it gets overriden by the resourceType in the url?
+ /**
+ * TODO: is this an error?
+ */
@Test
- public void testWithResourceTypeAsQualifier_RelativeUrl() {
-
+ @Ignore
+ public void testMismatchedTypeAndValueType() {
+
ReferenceParam rp = new ReferenceParam();
rp.setValueAsQueryToken(ourCtx, null, ":Location", "Patient/123");
assertEquals("Patient", rp.getResourceType());
@@ -101,14 +124,26 @@ public class ReferenceParamTest {
}
+ @Test
+ public void testDuplicatedTypeAndValueType() {
+
+ ReferenceParam rp = new ReferenceParam();
+ rp.setValueAsQueryToken(ourCtx, null, ":Patient", "Patient/123");
+ assertEquals("Patient", rp.getResourceType());
+ assertEquals("123", rp.getIdPart());
+ assertEquals("Patient/123", rp.getValue());
+ assertEquals(null, rp.getQueryParameterQualifier());
+
+ }
+
// TODO: verify this behavior is correct. Same case as testWithResourceTypeAsQualifier_RelativeUrl()
@Test
public void testWithResourceTypeAsQualifier_AbsoluteUrl() {
-
+
ReferenceParam rp = new ReferenceParam();
rp.setValueAsQueryToken(ourCtx, null, ":Location", "http://a.b/c/d/e");
- assertEquals("d", rp.getResourceType());
- assertEquals("e", rp.getIdPart());
+ assertEquals("Location", rp.getResourceType());
+ assertEquals("http://a.b/c/d/e", rp.getIdPart());
assertEquals("http://a.b/c/d/e", rp.getValue());
assertEquals(null, rp.getQueryParameterQualifier());
@@ -122,7 +157,7 @@ public class ReferenceParamTest {
rp.setValueAsQueryToken(ourCtx, null, ":Location.name", "FOO");
assertEquals("Location", rp.getResourceType());
assertEquals("FOO", rp.getIdPart());
- assertEquals("Location/FOO", rp.getValue());
+ assertEquals("FOO", rp.getValue());
assertEquals(":Location.name", rp.getQueryParameterQualifier());
assertEquals("name", rp.getChain());
@@ -135,7 +170,7 @@ public class ReferenceParamTest {
rp.setValueAsQueryToken(ourCtx, null, ":Patient.identifier", "http://hey.there/a/b|123");
assertEquals("Patient", rp.getResourceType());
assertEquals("http://hey.there/a/b|123", rp.getIdPart());
- assertEquals("Patient/http://hey.there/a/b|123", rp.getValue());
+ assertEquals("http://hey.there/a/b|123", rp.getValue());
assertEquals(":Patient.identifier", rp.getQueryParameterQualifier());
assertEquals("identifier", rp.getChain());
@@ -147,8 +182,8 @@ public class ReferenceParamTest {
ReferenceParam rp = new ReferenceParam();
rp.setValueAsQueryToken(ourCtx, null, ":Patient.identifier", "http://hey.there/a/b|");
assertEquals("Patient", rp.getResourceType());
+ assertEquals("http://hey.there/a/b|", rp.getValue());
assertEquals("http://hey.there/a/b|", rp.getIdPart());
- assertEquals("Patient/http://hey.there/a/b|", rp.getValue());
assertEquals(":Patient.identifier", rp.getQueryParameterQualifier());
assertEquals("identifier", rp.getChain());
@@ -161,7 +196,7 @@ public class ReferenceParamTest {
rp.setValueAsQueryToken(ourCtx, null, ":Patient.identifier", "|abc");
assertEquals("Patient", rp.getResourceType());
assertEquals("|abc", rp.getIdPart());
- assertEquals("Patient/|abc", rp.getValue());
+ assertEquals("|abc", rp.getValue());
assertEquals(":Patient.identifier", rp.getQueryParameterQualifier());
assertEquals("identifier", rp.getChain());
diff --git a/pom.xml b/pom.xml
index 7093027e2d9..ebb542bc664 100755
--- a/pom.xml
+++ b/pom.xml
@@ -45,14 +45,7 @@
-
-
- false
-
- bintray-dnault-maven
- bintray
- https://dl.bintray.com/dnault/maven
-
+
oss-snapshot
https://oss.sonatype.org/content/repositories/snapshots/
true
+
+ false
+
diff --git a/src/changes/changes.xml b/src/changes/changes.xml
index f754c34e94c..293b3260a9a 100644
--- a/src/changes/changes.xml
+++ b/src/changes/changes.xml
@@ -193,10 +193,25 @@
resulted in some ValueSets with duplicate codes. This has been corrected by specifying a path with each
filename.
+
+ A corner case bug in the JPA server was solved: When performing a search that contained chained reference searches
+ where the value contained slashes (e.g.
+ Observation?derived-from:DocumentReference.contenttype=application/vnd.mfer]]>)
+ the server could fail to load later pages in the search results.
+
+
+ A new flag has been added to the JPA migrator tool that causes the migrator to not try to reduce the length
+ of existing columns in the schema.
+
Some resource IDs and URLs for LOINC ValueSets and ConceptMaps were inconsistently populated by the
terminology uploader. This has been corrected.
+
+ When a resource was updated with a meta.source containing a request id, the meta.source was getting appended
+ with the new request id, resulting in an ever growing source.meta value. E.g. after the first update, it looks
+ like "#9f0a901387128111#5f37835ee38a89e2" when it should only be "#5f37835ee38a89e2". This has been corrected.
+