From b183bed52fa8d75abddb5bb593495780519b03cb Mon Sep 17 00:00:00 2001
From: Volker Schmidt
Date: Thu, 6 Sep 2018 19:33:34 +0800
Subject: [PATCH 01/97] Multiple values for one HTTP header were reduced to the
last value.
---
.../fhir/jaxrs/server/util/JaxRsResponse.java | 3 +-
.../uhn/fhir/rest/server/RestfulResponse.java | 10 ++-
.../servlet/ServletRestfulResponse.java | 15 ++++-
.../fhir/rest/server/RestfulResponseTest.java | 35 ++++++++++
.../servlet/ServletRestfulResponseTest.java | 65 +++++++++++++++++++
5 files changed, 119 insertions(+), 9 deletions(-)
create mode 100644 hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/RestfulResponseTest.java
create mode 100644 hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/servlet/ServletRestfulResponseTest.java
diff --git a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/util/JaxRsResponse.java b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/util/JaxRsResponse.java
index 182421e83c0..cb243896778 100644
--- a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/util/JaxRsResponse.java
+++ b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/util/JaxRsResponse.java
@@ -22,6 +22,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
* #L%
*/
import java.io.*;
+import java.util.List;
import java.util.Map.Entry;
import javax.ws.rs.core.MediaType;
@@ -104,7 +105,7 @@ public class JaxRsResponse extends RestfulResponse {
private ResponseBuilder buildResponse(int statusCode) {
ResponseBuilder response = Response.status(statusCode);
- for (Entry header : getHeaders().entrySet()) {
+ for (Entry> header : getHeaders().entrySet()) {
response.header(header.getKey(), header.getValue());
}
return response;
diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulResponse.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulResponse.java
index 2bd94a0047e..d56026d2046 100644
--- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulResponse.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulResponse.java
@@ -21,9 +21,7 @@ package ca.uhn.fhir.rest.server;
*/
import java.io.IOException;
-import java.util.Date;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
+import java.util.*;
import org.hl7.fhir.instance.model.api.*;
@@ -35,7 +33,7 @@ public abstract class RestfulResponse implements IRest
private IIdType myOperationResourceId;
private IPrimitiveType myOperationResourceLastUpdated;
- private ConcurrentHashMap theHeaders = new ConcurrentHashMap();
+ private Map> theHeaders = new HashMap<>();
private T theRequestDetails;
public RestfulResponse(T requestDetails) {
@@ -44,14 +42,14 @@ public abstract class RestfulResponse implements IRest
@Override
public void addHeader(String headerKey, String headerValue) {
- this.getHeaders().put(headerKey, headerValue);
+ this.getHeaders().computeIfAbsent(headerKey, k -> new ArrayList<>()).add(headerValue);
}
/**
* Get the http headers
* @return the headers
*/
- public ConcurrentHashMap getHeaders() {
+ public Map> getHeaders() {
return theHeaders;
}
diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/servlet/ServletRestfulResponse.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/servlet/ServletRestfulResponse.java
index 3ffb3d9b9f1..969c0fa12b6 100644
--- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/servlet/ServletRestfulResponse.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/servlet/ServletRestfulResponse.java
@@ -24,6 +24,7 @@ import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.io.Writer;
+import java.util.List;
import java.util.Map.Entry;
import java.util.zip.GZIPOutputStream;
@@ -75,8 +76,18 @@ public class ServletRestfulResponse extends RestfulResponse header : getHeaders().entrySet()) {
- theHttpResponse.setHeader(header.getKey(), header.getValue());
+ for (Entry> header : getHeaders().entrySet()) {
+ final String key = header.getKey();
+ boolean first = true;
+ for (String value : header.getValue()) {
+ // existing headers should be overridden
+ if (first) {
+ theHttpResponse.setHeader(key, value);
+ first = false;
+ } else {
+ theHttpResponse.addHeader(key, value);
+ }
+ }
}
}
diff --git a/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/RestfulResponseTest.java b/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/RestfulResponseTest.java
new file mode 100644
index 00000000000..009dbe3af5b
--- /dev/null
+++ b/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/RestfulResponseTest.java
@@ -0,0 +1,35 @@
+package ca.uhn.fhir.rest.server;
+
+import ca.uhn.fhir.rest.api.server.RequestDetails;
+import org.hamcrest.Matchers;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.MockSettings;
+import org.mockito.Mockito;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
+import static org.mockito.Mockito.CALLS_REAL_METHODS;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.withSettings;
+
+/**
+ * Unit tests of {@link RestfulResponse}.
+ */
+public class RestfulResponseTest {
+ @Test
+ public void addMultipleHeaderValues() {
+ @SuppressWarnings("unchecked")
+ final RestfulResponse> restfulResponse =
+ mock(RestfulResponse.class, withSettings()
+ .useConstructor((RequestDetails) null).defaultAnswer(CALLS_REAL_METHODS));
+
+ restfulResponse.addHeader("Authorization", "Basic");
+ restfulResponse.addHeader("Authorization", "Bearer");
+ restfulResponse.addHeader("Cache-Control", "no-cache, no-store");
+
+ assertEquals(2, restfulResponse.getHeaders().size());
+ assertThat(restfulResponse.getHeaders().get("Authorization"), Matchers.contains("Basic", "Bearer"));
+ assertThat(restfulResponse.getHeaders().get("Cache-Control"), Matchers.contains("no-cache, no-store"));
+ }
+}
diff --git a/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/servlet/ServletRestfulResponseTest.java b/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/servlet/ServletRestfulResponseTest.java
new file mode 100644
index 00000000000..f56e475f7a2
--- /dev/null
+++ b/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/servlet/ServletRestfulResponseTest.java
@@ -0,0 +1,65 @@
+package ca.uhn.fhir.rest.server.servlet;
+
+import ca.uhn.fhir.rest.server.RestfulServer;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.mockito.InOrder;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.mockito.junit.MockitoJUnit;
+import org.mockito.junit.MockitoRule;
+
+import javax.servlet.ServletOutputStream;
+import javax.servlet.http.HttpServletResponse;
+
+import java.io.IOException;
+
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.*;
+
+/**
+ * Unit tests of {@link ServletRestfulResponse}.
+ */
+public class ServletRestfulResponseTest {
+ @Mock
+ private RestfulServer server;
+
+ @Mock
+ private ServletOutputStream servletOutputStream;
+
+ @Mock
+ private HttpServletResponse servletResponse;
+
+ private ServletRequestDetails requestDetails;
+
+ private ServletRestfulResponse response;
+
+ @Rule
+ public MockitoRule mockitoRule = MockitoJUnit.rule();
+
+ @Before
+ public void init() throws IOException {
+ Mockito.when(servletResponse.getOutputStream()).thenReturn(servletOutputStream);
+
+ requestDetails = new ServletRequestDetails();
+ requestDetails.setServer(server);
+ requestDetails.setServletResponse(servletResponse);
+ response = new ServletRestfulResponse(requestDetails);
+ }
+
+ @Test
+ public void addMultipleHeaderValues() throws IOException {
+ final ServletRestfulResponse response = new ServletRestfulResponse(requestDetails);
+ response.addHeader("Authorization", "Basic");
+ response.addHeader("Authorization", "Bearer");
+ response.addHeader("Cache-Control", "no-cache, no-store");
+
+ response.getResponseWriter(200, "Status", "text/plain", "UTF-8", false);
+
+ final InOrder orderVerifier = Mockito.inOrder(servletResponse);
+ orderVerifier.verify(servletResponse).setHeader(eq("Authorization"), eq("Basic"));
+ orderVerifier.verify(servletResponse).addHeader(eq("Authorization"), eq("Bearer"));
+ verify(servletResponse).setHeader(eq("Cache-Control"), eq("no-cache, no-store"));
+ }
+}
From 35c440b7479d259b23b7edcf4b34696ec134d562 Mon Sep 17 00:00:00 2001
From: Volker Schmidt
Date: Thu, 6 Sep 2018 20:59:49 +0800
Subject: [PATCH 02/97] Corrected multi-valued header value handling for
JAX-RS.
---
.../fhir/jaxrs/server/util/JaxRsResponse.java | 5 ++++-
.../jaxrs/server/util/JaxRsResponseTest.java | 18 +++++++++++++++++-
2 files changed, 21 insertions(+), 2 deletions(-)
diff --git a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/util/JaxRsResponse.java b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/util/JaxRsResponse.java
index cb243896778..a79058e12d7 100644
--- a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/util/JaxRsResponse.java
+++ b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/util/JaxRsResponse.java
@@ -106,7 +106,10 @@ public class JaxRsResponse extends RestfulResponse {
private ResponseBuilder buildResponse(int statusCode) {
ResponseBuilder response = Response.status(statusCode);
for (Entry> header : getHeaders().entrySet()) {
- response.header(header.getKey(), header.getValue());
+ final String key = header.getKey();
+ for (String value : header.getValue()) {
+ response.header(key, value);
+ }
}
return response;
}
diff --git a/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/server/util/JaxRsResponseTest.java b/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/server/util/JaxRsResponseTest.java
index b9979da676b..c5b58275855 100644
--- a/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/server/util/JaxRsResponseTest.java
+++ b/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/server/util/JaxRsResponseTest.java
@@ -1,6 +1,7 @@
package ca.uhn.fhir.jaxrs.server.util;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
@@ -10,6 +11,7 @@ import java.util.Set;
import javax.ws.rs.core.Response;
+import org.hamcrest.Matchers;
import org.hl7.fhir.instance.model.api.IBaseBinary;
import org.junit.Before;
import org.junit.Test;
@@ -108,10 +110,24 @@ public class JaxRsResponseTest {
assertEquals("application/xml+fhir; charset=UTF-8", result.getHeaderString(Constants.HEADER_CONTENT_TYPE));
}
+ @Test
+ public void addMultipleHeaderValues() throws IOException {
+ response.addHeader("Authorization", "Basic");
+ response.addHeader("Authorization", "Bearer");
+ response.addHeader("Cache-Control", "no-cache, no-store");
+
+ final IBaseBinary binary = new Binary();
+ binary.setContentType("abc");
+ binary.setContent(new byte[] { 1 });
+ final Response result = (Response) RestfulServerUtils.streamResponseAsResource(request.getServer(), binary, theSummaryMode, 200, false, false, this.request);
+
+ assertThat(result.getHeaders().get("Authorization"), Matchers.contains("Basic", "Bearer"));
+ assertThat(result.getHeaders().get("Cache-Control"), Matchers.contains("no-cache, no-store"));
+ }
+
private Patient createPatient() {
Patient theResource = new Patient();
theResource.setId(new IdDt(15L));
return theResource;
}
-
}
From 734835de96c43b9889228ae8f29c0e58c3bc5996 Mon Sep 17 00:00:00 2001
From: Heinz-Dieter Conradi
Date: Wed, 1 Aug 2018 14:57:34 +0200
Subject: [PATCH 03/97] Add some test demonstrating null pointer problems in
the DateRangeParam class
---
.../fhir/rest/param/DateRangeParamTest.java | 67 +++++++++++++++++++
1 file changed, 67 insertions(+)
create mode 100644 hapi-fhir-base/src/test/java/ca/uhn/fhir/rest/param/DateRangeParamTest.java
diff --git a/hapi-fhir-base/src/test/java/ca/uhn/fhir/rest/param/DateRangeParamTest.java b/hapi-fhir-base/src/test/java/ca/uhn/fhir/rest/param/DateRangeParamTest.java
new file mode 100644
index 00000000000..72b03f5b4c9
--- /dev/null
+++ b/hapi-fhir-base/src/test/java/ca/uhn/fhir/rest/param/DateRangeParamTest.java
@@ -0,0 +1,67 @@
+package ca.uhn.fhir.rest.param;
+
+import static org.junit.Assert.assertTrue;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.rest.api.QualifiedParamList;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.Mockito;
+
+@RunWith(JUnit4.class)
+public class DateRangeParamTest {
+ private FhirContext fhirContext;
+
+ @Before
+ public void initMockContext() {
+ fhirContext = Mockito.mock(FhirContext.class);
+ }
+
+ /** Can happen e.g. when the query parameter for {@code _lastUpdated} is left empty. */
+ @Test
+ public void testParamWithoutPrefixAndWithoutValue() {
+ QualifiedParamList qualifiedParamList = new QualifiedParamList(1);
+ qualifiedParamList.add("");
+
+ List params = new ArrayList<>(1);
+ params.add(qualifiedParamList);
+ DateRangeParam dateRangeParam = new DateRangeParam();
+ dateRangeParam.setValuesAsQueryTokens(fhirContext, "_lastUpdated", params);
+
+ assertTrue(dateRangeParam.isEmpty());
+ }
+
+ /** Can happen e.g. when the query parameter for {@code _lastUpdated} is given as {@code lt} without any value. */
+ @Test
+ public void testUpperBoundWithPrefixWithoutValue() {
+ QualifiedParamList qualifiedParamList = new QualifiedParamList(1);
+ qualifiedParamList.add("lt");
+
+ List params = new ArrayList<>(1);
+ params.add(qualifiedParamList);
+ DateRangeParam dateRangeParam = new DateRangeParam();
+ dateRangeParam.setValuesAsQueryTokens(fhirContext, "_lastUpdated", params);
+
+ assertTrue(dateRangeParam.isEmpty());
+ }
+
+ /** Can happen e.g. when the query parameter for {@code _lastUpdated} is given as {@code gt} without any value. */
+ @Test
+ public void testLowerBoundWithPrefixWithoutValue() {
+ QualifiedParamList qualifiedParamList = new QualifiedParamList(1);
+ qualifiedParamList.add("gt");
+
+ List params = new ArrayList<>(1);
+ params.add(qualifiedParamList);
+ DateRangeParam dateRangeParam = new DateRangeParam();
+ dateRangeParam.setValuesAsQueryTokens(fhirContext, "_lastUpdated", params);
+
+ assertTrue(dateRangeParam.isEmpty());
+ }
+}
From ea5cf9f9564cad1bf2a58d2efe49af802bf67a59 Mon Sep 17 00:00:00 2001
From: Heinz-Dieter Conradi
Date: Wed, 1 Aug 2018 14:58:02 +0200
Subject: [PATCH 04/97] Fix the null pointer problems in the DataRangeParam
class
---
.../src/main/java/ca/uhn/fhir/rest/param/DateRangeParam.java | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/DateRangeParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/DateRangeParam.java
index 434f7664e3e..076b11eabf7 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/DateRangeParam.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/DateRangeParam.java
@@ -256,7 +256,7 @@ public class DateRangeParam implements IQueryParameterAnd {
}
public Date getLowerBoundAsInstant() {
- if (myLowerBound == null) {
+ if (myLowerBound == null || myLowerBound.getValue() == null) {
return null;
}
Date retVal = myLowerBound.getValue();
@@ -303,7 +303,7 @@ public class DateRangeParam implements IQueryParameterAnd {
}
public Date getUpperBoundAsInstant() {
- if (myUpperBound == null) {
+ if (myUpperBound == null || myUpperBound.getValue() == null) {
return null;
}
Date retVal = myUpperBound.getValue();
From 729bbe04d0fd1bcce2981d8b88f66ef1ad42071e Mon Sep 17 00:00:00 2001
From: Ruth Alkema
Date: Fri, 2 Mar 2018 14:41:24 +0100
Subject: [PATCH 05/97] Allow slotting in own IMessageResolver
This is useful in case we want to define our own way of translating
the codes in the thymeleaf templates.
---
.../BaseThymeleafNarrativeGenerator.java | 13 +++++
...tThymeleafNarrativeGeneratorDstu3Test.java | 53 +++++++++++++++++++
.../src/test/resources/TestPatient.html | 4 ++
.../test/resources/testnarrative.properties | 2 +
4 files changed, 72 insertions(+)
create mode 100644 hapi-fhir-structures-dstu3/src/test/resources/TestPatient.html
create mode 100644 hapi-fhir-structures-dstu3/src/test/resources/testnarrative.properties
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative/BaseThymeleafNarrativeGenerator.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative/BaseThymeleafNarrativeGenerator.java
index b9769cb99f0..9ab0c192c8e 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative/BaseThymeleafNarrativeGenerator.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative/BaseThymeleafNarrativeGenerator.java
@@ -34,6 +34,7 @@ import org.thymeleaf.cache.ICacheEntryValidity;
import org.thymeleaf.context.Context;
import org.thymeleaf.context.ITemplateContext;
import org.thymeleaf.engine.AttributeName;
+import org.thymeleaf.messageresolver.IMessageResolver;
import org.thymeleaf.model.IProcessableElementTag;
import org.thymeleaf.processor.IProcessor;
import org.thymeleaf.processor.element.AbstractAttributeTagProcessor;
@@ -65,6 +66,8 @@ public abstract class BaseThymeleafNarrativeGenerator implements INarrativeGener
private HashMap myNameToNarrativeTemplate;
private TemplateEngine myProfileTemplateEngine;
+ private IMessageResolver resolver;
+
/**
* Constructor
*/
@@ -166,11 +169,21 @@ public abstract class BaseThymeleafNarrativeGenerator implements INarrativeGener
};
myProfileTemplateEngine.setDialect(dialect);
+ if (this.resolver != null) {
+ myProfileTemplateEngine.setMessageResolver(this.resolver);
+ }
}
myInitialized = true;
}
+ public void setMessageResolver(IMessageResolver resolver) {
+ this.resolver = resolver;
+ if (myProfileTemplateEngine != null && resolver != null) {
+ myProfileTemplateEngine.setMessageResolver(resolver);
+ }
+ }
+
/**
* If set to true
(which is the default), most whitespace will be trimmed from the generated narrative
* before it is returned.
diff --git a/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu3Test.java b/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu3Test.java
index 62b9f81d527..aa427a35a67 100644
--- a/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu3Test.java
+++ b/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu3Test.java
@@ -5,7 +5,12 @@ import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import java.util.Date;
+import java.util.HashMap;
+import java.util.Locale;
+import java.util.Map;
+import org.apache.commons.collections.Transformer;
+import org.apache.commons.collections.map.LazyMap;
import org.hamcrest.core.StringContains;
import org.hl7.fhir.dstu3.model.CodeableConcept;
import org.hl7.fhir.dstu3.model.Coding;
@@ -28,6 +33,8 @@ import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
+import org.thymeleaf.messageresolver.StandardMessageResolver;
+import org.thymeleaf.templateresource.ITemplateResource;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.parser.DataFormatException;
@@ -77,6 +84,52 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test {
}
+ @Test
+ public void testTranslations() throws DataFormatException {
+ CustomThymeleafNarrativeGenerator customGen = new CustomThymeleafNarrativeGenerator("classpath:/testnarrative.properties");
+ customGen.setIgnoreFailures(false);
+ customGen.setIgnoreMissingTemplates(false);
+
+ FhirContext ctx = FhirContext.forDstu3();
+ ctx.setNarrativeGenerator(customGen);
+
+ Patient value = new Patient();
+
+ value.addIdentifier().setSystem("urn:names").setValue("123456");
+ value.addName().setFamily("blow").addGiven("joe").addGiven((String) null).addGiven("john");
+ //@formatter:off
+ value.addAddress()
+ .addLine("123 Fake Street").addLine("Unit 1")
+ .setCity("Toronto").setState("ON").setCountry("Canada");
+ //@formatter:on
+
+ value.setBirthDate(new Date());
+
+ Transformer transformer = new Transformer() {
+
+ @Override
+ public Object transform(Object input) {
+ return "UNTRANSLATED:" + input;
+ }};
+
+ Map translations = new HashMap<>();
+ translations.put("some_text", "Some beautiful proze");
+
+ customGen.setMessageResolver(new StandardMessageResolver() {
+ @Override
+ protected Map resolveMessagesForTemplate(String template,
+ ITemplateResource templateResource, Locale locale) {
+ return LazyMap.decorate(translations, transformer);
+ }
+ });
+
+ Narrative narrative = new Narrative();
+ customGen.generateNarrative(ctx, value, narrative);
+ String output = narrative.getDiv().getValueAsString();
+ ourLog.info(output);
+ assertThat(output, StringContains.containsString("Some beautiful proze"));
+ assertThat(output, StringContains.containsString("UNTRANSLATED:other_text"));
+ }
@Test
public void testGenerateDiagnosticReport() throws DataFormatException {
diff --git a/hapi-fhir-structures-dstu3/src/test/resources/TestPatient.html b/hapi-fhir-structures-dstu3/src/test/resources/TestPatient.html
new file mode 100644
index 00000000000..88d60488a3e
--- /dev/null
+++ b/hapi-fhir-structures-dstu3/src/test/resources/TestPatient.html
@@ -0,0 +1,4 @@
+
+
Some Text
+
Some Text
+
diff --git a/hapi-fhir-structures-dstu3/src/test/resources/testnarrative.properties b/hapi-fhir-structures-dstu3/src/test/resources/testnarrative.properties
new file mode 100644
index 00000000000..22f3b3c51b0
--- /dev/null
+++ b/hapi-fhir-structures-dstu3/src/test/resources/testnarrative.properties
@@ -0,0 +1,2 @@
+patient.class=org.hl7.fhir.dstu3.model.Patient
+patient.narrative=classpath:/TestPatient.html
From 6b1ea5b989b95892f050371a88715244e19b3b8b Mon Sep 17 00:00:00 2001
From: James Agnew
Date: Tue, 30 Oct 2018 15:22:39 -0400
Subject: [PATCH 06/97] Add tests for operation method binding
---
.../server/method/OperationMethodBinding.java | 4 +++
.../server/OperationGenericServerR4Test.java | 35 ++++++++++++++++---
src/site/xdoc/download.xml.vm | 20 ++++++++++-
3 files changed, 54 insertions(+), 5 deletions(-)
diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/OperationMethodBinding.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/OperationMethodBinding.java
index ed9ccaabd1a..511abb07e34 100644
--- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/OperationMethodBinding.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/OperationMethodBinding.java
@@ -202,6 +202,10 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding {
@Override
public boolean incomingServerRequestMatchesMethod(RequestDetails theRequest) {
+ if (isBlank(theRequest.getOperation())) {
+ return false;
+ }
+
if (!myName.equals(theRequest.getOperation())) {
if (!myName.equals(WILDCARD_NAME)) {
return false;
diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/OperationGenericServerR4Test.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/OperationGenericServerR4Test.java
index 0f0764c421d..98e175deedf 100644
--- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/OperationGenericServerR4Test.java
+++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/OperationGenericServerR4Test.java
@@ -1,10 +1,7 @@
package ca.uhn.fhir.rest.server;
import ca.uhn.fhir.context.FhirContext;
-import ca.uhn.fhir.rest.annotation.IdParam;
-import ca.uhn.fhir.rest.annotation.Operation;
-import ca.uhn.fhir.rest.annotation.OperationParam;
-import ca.uhn.fhir.rest.annotation.ResourceParam;
+import ca.uhn.fhir.rest.annotation.*;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.util.PortUtil;
import ca.uhn.fhir.util.TestUtil;
@@ -31,6 +28,7 @@ import org.junit.BeforeClass;
import org.junit.Test;
import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
@@ -168,6 +166,22 @@ public class OperationGenericServerR4Test {
}
}
+
+ SmartScopeTranslationSvcImplTest @Test
+ public void testSearchGetsClassifiedAppropriately() throws Exception {
+ HttpGet httpPost = new HttpGet("http://localhost:" + ourPort + "/Patient");
+ CloseableHttpResponse status = ourClient.execute(httpPost);
+ try {
+ assertEquals(200, status.getStatusLine().getStatusCode());
+ status.getEntity().getContent().close();
+ } finally {
+ status.getEntity().getContent().close();
+ }
+
+ assertEquals("Patient/search", ourLastMethod);
+ }
+
+
@SuppressWarnings("unused")
public static class PatientProvider implements IResourceProvider {
@@ -215,6 +229,12 @@ public class OperationGenericServerR4Test {
return retVal;
}
+ @Search
+ public List search() {
+ ourLastMethod = "Patient/search";
+ return new ArrayList<>();
+ }
+
}
@@ -239,6 +259,13 @@ public class OperationGenericServerR4Test {
}
+ @Search
+ public List search() {
+ ourLastMethod = "/search";
+ return new ArrayList<>();
+ }
+
+
}
@AfterClass
diff --git a/src/site/xdoc/download.xml.vm b/src/site/xdoc/download.xml.vm
index ed9fb7f20e4..742378d5e4e 100644
--- a/src/site/xdoc/download.xml.vm
+++ b/src/site/xdoc/download.xml.vm
@@ -194,7 +194,25 @@
3.0.1
3.4.0-13732
-
+
+ HAPI FHIR 3.5.0
+ JDK8
+
+ 1.0.2
+ 1.4.0
+ 3.0.1
+ 3.4.0-13732
+
+
+ HAPI FHIR 3.4.0
+ JDK8
+
+ 1.0.2
+ 1.4.0
+ 3.0.1
+ 3.6.0-13732
+
+
From 7acba90d15a42e10e2d32e4b7477832b919780a0 Mon Sep 17 00:00:00 2001
From: James Agnew
Date: Tue, 30 Oct 2018 22:43:16 -0400
Subject: [PATCH 07/97] Update search logic
---
.../jpa/search/SearchCoordinatorSvcImpl.java | 28 +++++++-
.../provider/r4/ResourceProviderR4Test.java | 69 ++++++++++++++-----
.../search/SearchCoordinatorSvcImplTest.java | 7 +-
.../BaseResourceReturningMethodBinding.java | 16 +++--
4 files changed, 95 insertions(+), 25 deletions(-)
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java
index b6eb3a89253..94493d87e9a 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java
@@ -492,8 +492,34 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
do {
synchronized (mySyncedPids) {
ourLog.trace("Search status is {}", mySearch.getStatus());
- keepWaiting = mySyncedPids.size() < theToIndex && mySearch.getStatus() == SearchStatusEnum.LOADING;
+ boolean haveEnoughResults = mySyncedPids.size() >= theToIndex;
+ if (!haveEnoughResults) {
+ switch (mySearch.getStatus()) {
+ case LOADING:
+ keepWaiting = true;
+ break;
+ case PASSCMPLET:
+ /*
+ * If we get here, it means that the user requested resources that crossed the
+ * current pre-fetch boundary. For example, if the prefetch threshold is 50 and the
+ * user has requested resources 0-60, then they would get 0-50 back but the search
+ * coordinator would then stop searching.SearchCoordinatorSvcImplTest
+ */
+ List remainingResources = SearchCoordinatorSvcImpl.this.getResources(mySearch.getUuid(), mySyncedPids.size(), theToIndex);
+ mySyncedPids.addAll(remainingResources);
+ keepWaiting = false;
+ break;
+ case FAILED:
+ case FINISHED:
+ default:
+ keepWaiting = false;
+ break;
+ }
+ } else {
+ keepWaiting = false;
+ }
}
+
if (keepWaiting) {
ourLog.info("Waiting, as we only have {} results", mySyncedPids.size());
try {
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java
index 8c678a06312..f3faff4ccf0 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java
@@ -17,13 +17,7 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.startsWith;
import static org.hamcrest.Matchers.stringContainsInOrder;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.Assert.*;
import java.io.BufferedReader;
import java.io.IOException;
@@ -41,6 +35,7 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
+import java.util.stream.Collectors;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
@@ -159,6 +154,50 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
myDaoConfig.setSearchPreFetchThresholds(new DaoConfig().getSearchPreFetchThresholds());
}
+
+ @Test
+ public void testSearchLinksWorkWithIncludes() {
+ for (int i = 0; i < 5; i++) {
+
+ Organization o = new Organization();
+ o.setId("O" + i);
+ o.setName("O" + i);
+ IIdType oid = ourClient.update().resource(o).execute().getId().toUnqualifiedVersionless();
+
+ Patient p = new Patient();
+ p.setId("P" + i);
+ p.getManagingOrganization().setReference(oid.getValue());
+ ourClient.update().resource(p).execute();
+
+ }
+
+ Bundle output = ourClient
+ .search()
+ .forResource("Patient")
+ .include(IBaseResource.INCLUDE_ALL)
+ .count(3)
+ .returnBundle(Bundle.class)
+ .execute();
+
+ List ids = output.getEntry().stream().map(t -> t.getResource().getIdElement().toUnqualifiedVersionless().getValue()).collect(Collectors.toList());
+ ourLog.info("Ids: {}", ids);
+ assertEquals(6, output.getEntry().size());
+ assertNotNull(output.getLink("next"));
+
+ // Page 2
+ output = ourClient
+ .loadPage()
+ .next(output)
+ .execute();
+
+ ids = output.getEntry().stream().map(t -> t.getResource().getIdElement().toUnqualifiedVersionless().getValue()).collect(Collectors.toList());
+ ourLog.info("Ids: {}", ids);
+ assertEquals(4, output.getEntry().size());
+ assertNull(output.getLink("next"));
+
+ }
+
+
@Test
public void testDeleteConditional() {
@@ -1658,27 +1697,25 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
.returnResourceType(Bundle.class)
.execute();
- TreeSet ids = new TreeSet<>();
+ ArrayList ids = new ArrayList<>();
for (int i = 0; i < responseBundle.getEntry().size(); i++) {
- for (BundleEntryComponent nextEntry : responseBundle.getEntry()) {
- ids.add(nextEntry.getResource().getIdElement().getIdPart());
- }
+ BundleEntryComponent nextEntry = responseBundle.getEntry().get(i);
+ ids.add(nextEntry.getResource().getIdElement().getIdPart());
}
BundleLinkComponent nextLink = responseBundle.getLink("next");
- ourLog.info("Have {} IDs with next link: ", ids.size(), nextLink);
+ ourLog.info("Have {} IDs with next link[{}] : {}", ids.size(), nextLink, ids);
while (nextLink != null) {
String nextUrl = nextLink.getUrl();
responseBundle = ourClient.fetchResourceFromUrl(Bundle.class, nextUrl);
for (int i = 0; i < responseBundle.getEntry().size(); i++) {
- for (BundleEntryComponent nextEntry : responseBundle.getEntry()) {
- ids.add(nextEntry.getResource().getIdElement().getIdPart());
- }
+ BundleEntryComponent nextEntry = responseBundle.getEntry().get(i);
+ ids.add(nextEntry.getResource().getIdElement().getIdPart());
}
nextLink = responseBundle.getLink("next");
- ourLog.info("Have {} IDs with next link: ", ids.size(), nextLink);
+ ourLog.info("Have {} IDs with next link[{}] : {}", ids.size(), nextLink, ids);
}
assertThat(ids, hasItem(id.getIdPart()));
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImplTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImplTest.java
index fd793f2dcf7..cf2ce1bf577 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImplTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImplTest.java
@@ -18,10 +18,7 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.util.TestUtil;
import com.google.common.collect.Lists;
import org.hl7.fhir.instance.model.api.IBaseResource;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.*;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
@@ -153,6 +150,7 @@ public class SearchCoordinatorSvcImplTest {
}
@Test
+ @Ignore // FIXME: activate
public void testAsyncSearchLargeResultSetBigCountSameCoordinator() {
SearchParameterMap params = new SearchParameterMap();
params.add("name", new StringParam("ANAME"));
@@ -217,6 +215,7 @@ public class SearchCoordinatorSvcImplTest {
* page) within the same JVM will not use the original bundle provider
*/
@Test
+ @Ignore // FIXME: activate
public void testAsyncSearchLargeResultSetSecondRequestSameCoordinator() {
SearchParameterMap params = new SearchParameterMap();
params.add("name", new StringParam("ANAME"));
diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseResourceReturningMethodBinding.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseResourceReturningMethodBinding.java
index 3de8c16a612..bd5c7f3a90c 100644
--- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseResourceReturningMethodBinding.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseResourceReturningMethodBinding.java
@@ -215,16 +215,24 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi
linkPrev = RestfulServerUtils.createPagingLink(theIncludes, serverBase, searchId, theResult.getPreviousPageId(), theRequest.getParameters(), prettyPrint, theBundleType);
}
} else if (searchId != null) {
- int offset = theOffset + resourceList.size();
-
// We're doing offset pages
- if (numTotalResults == null || offset < numTotalResults) {
- linkNext = (RestfulServerUtils.createPagingLink(theIncludes, serverBase, searchId, offset, numToReturn, theRequest.getParameters(), prettyPrint, theBundleType));
+ if (numTotalResults == null || theOffset + numToReturn < numTotalResults) {
+ linkNext = (RestfulServerUtils.createPagingLink(theIncludes, serverBase, searchId, theOffset + numToReturn, numToReturn, theRequest.getParameters(), prettyPrint, theBundleType));
}
if (theOffset > 0) {
int start = Math.max(0, theOffset - theLimit);
linkPrev = RestfulServerUtils.createPagingLink(theIncludes, serverBase, searchId, start, theLimit, theRequest.getParameters(), prettyPrint, theBundleType);
}
+// int offset = theOffset + resourceList.size();
+//
+// // We're doing offset pages
+// if (numTotalResults == null || offset < numTotalResults) {
+// linkNext = (RestfulServerUtils.createPagingLink(theIncludes, serverBase, searchId, offset, numToReturn, theRequest.getParameters(), prettyPrint, theBundleType));
+// }
+// if (theOffset > 0) {
+// int start = Math.max(0, theOffset - theLimit);
+// linkPrev = RestfulServerUtils.createPagingLink(theIncludes, serverBase, searchId, start, theLimit, theRequest.getParameters(), prettyPrint, theBundleType);
+// }
}
bundleFactory.addRootPropertiesToBundle(theResult.getUuid(), serverBase, theLinkSelf, linkPrev, linkNext, theResult.size(), theBundleType, theResult.getPublished());
From 8955a9e54dcef057df713b75446f4a805aae64fd Mon Sep 17 00:00:00 2001
From: jamesagnew
Date: Wed, 31 Oct 2018 05:50:50 -0400
Subject: [PATCH 08/97] Add threading to migrator
---
.../fhir/rest/api/SearchTotalModeEnum.java | 20 ++
.../uhn/fhir/jpa/dao/SearchParameterMap.java | 4 +-
hapi-fhir-jpaserver-migrate/pom.xml | 9 +-
.../uhn/fhir/jpa/migrate/DriverTypeEnum.java | 14 +-
.../migrate/taskdef/CalculateHashesTask.java | 205 ++++++++++++++----
...shesTest.java => CalculateHashesTest.java} | 34 ++-
.../method/SearchTotalModeParameter.java | 4 +-
pom.xml | 8 +
src/site/site.xml | 2 +
src/site/xdoc/docindex.xml | 174 +++++++--------
10 files changed, 321 insertions(+), 153 deletions(-)
rename hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/{CreateHashesTest.java => CalculateHashesTest.java} (56%)
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SearchTotalModeEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SearchTotalModeEnum.java
index d10613d01a3..1a0d781e921 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SearchTotalModeEnum.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SearchTotalModeEnum.java
@@ -1,5 +1,25 @@
package ca.uhn.fhir.rest.api;
+/*-
+ * #%L
+ * HAPI FHIR - Core Library
+ * %%
+ * Copyright (C) 2014 - 2018 University Health Network
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
import java.util.HashMap;
import java.util.Map;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchParameterMap.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchParameterMap.java
index f9c38feeed7..111656582d0 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchParameterMap.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchParameterMap.java
@@ -28,9 +28,9 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
diff --git a/hapi-fhir-jpaserver-migrate/pom.xml b/hapi-fhir-jpaserver-migrate/pom.xml
index a804fc80a9d..2618d55c8c7 100644
--- a/hapi-fhir-jpaserver-migrate/pom.xml
+++ b/hapi-fhir-jpaserver-migrate/pom.xml
@@ -31,6 +31,10 @@
org.springframework
spring-jdbc
+
+ org.apache.commons
+ commons-dbcp2
+
@@ -45,11 +49,6 @@
derby
test
-
- org.apache.commons
- commons-dbcp2
- test
-
junit
junit
diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/DriverTypeEnum.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/DriverTypeEnum.java
index 8e5cba77e1d..c3d8a8725ba 100644
--- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/DriverTypeEnum.java
+++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/DriverTypeEnum.java
@@ -1,6 +1,7 @@
package ca.uhn.fhir.jpa.migrate;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
+import org.apache.commons.dbcp2.BasicDataSource;
import org.apache.commons.lang3.Validate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -76,20 +77,13 @@ public enum DriverTypeEnum {
throw new InternalErrorException("Unable to find driver class: " + myDriverClassName, e);
}
- SingleConnectionDataSource dataSource = new SingleConnectionDataSource(){
- @Override
- protected Connection getConnectionFromDriver(Properties props) throws SQLException {
- Connection connect = driver.connect(theUrl, props);
- assert connect != null;
- return connect;
- }
- };
- dataSource.setAutoCommit(false);
+ BasicDataSource dataSource = new BasicDataSource();
+// dataSource.setAutoCommit(false);
dataSource.setDriverClassName(myDriverClassName);
dataSource.setUrl(theUrl);
dataSource.setUsername(theUsername);
dataSource.setPassword(thePassword);
- dataSource.setSuppressClose(true);
+// dataSource.setSuppressClose(true);
DataSourceTransactionManager transactionManager = new DataSourceTransactionManager();
transactionManager.setDataSource(dataSource);
diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateHashesTask.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateHashesTask.java
index 620792a4754..0300e92fa32 100644
--- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateHashesTask.java
+++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateHashesTask.java
@@ -23,15 +23,21 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.util.StopWatch;
import com.google.common.collect.ForwardingMap;
import org.apache.commons.lang3.Validate;
+import org.apache.commons.lang3.concurrent.BasicThreadFactory;
import org.checkerframework.checker.nullness.compatqual.NullableDecl;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.springframework.jdbc.core.ColumnMapRowMapper;
import org.springframework.jdbc.core.JdbcTemplate;
+import org.springframework.jdbc.core.RowCallbackHandler;
+import java.sql.ResultSet;
+import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.concurrent.*;
import java.util.function.Function;
public class CalculateHashesTask extends BaseTableColumnTask {
@@ -39,75 +45,147 @@ public class CalculateHashesTask extends BaseTableColumnTask, Long>> myCalculators = new HashMap<>();
+ private ThreadPoolExecutor myExecutor;
public void setBatchSize(int theBatchSize) {
myBatchSize = theBatchSize;
}
+ /**
+ * Constructor
+ */
+ public CalculateHashesTask() {
+ super();
+ }
@Override
- public void execute() {
+ public synchronized void execute() throws SQLException {
if (isDryRun()) {
return;
}
- List> rows;
- do {
- rows = getTxTemplate().execute(t -> {
- JdbcTemplate jdbcTemplate = newJdbcTemnplate();
- jdbcTemplate.setMaxRows(myBatchSize);
- String sql = "SELECT * FROM " + getTableName() + " WHERE " + getColumnName() + " IS NULL";
- ourLog.info("Finding up to {} rows in {} that requires hashes", myBatchSize, getTableName());
- return jdbcTemplate.queryForList(sql);
- });
+ initializeExecutor();
+ try {
- updateRows(rows);
- } while (rows.size() > 0);
- }
+ while(true) {
+ MyRowCallbackHandler rch = new MyRowCallbackHandler();
+ getTxTemplate().execute(t -> {
+ JdbcTemplate jdbcTemplate = newJdbcTemnplate();
+ jdbcTemplate.setMaxRows(100000);
+ String sql = "SELECT * FROM " + getTableName() + " WHERE " + getColumnName() + " IS NULL";
+ ourLog.info("Finding up to {} rows in {} that requires hashes", myBatchSize, getTableName());
- private void updateRows(List> theRows) {
- StopWatch sw = new StopWatch();
- getTxTemplate().execute(t -> {
+ jdbcTemplate.query(sql, rch);
+ rch.done();
- // Loop through rows
- assert theRows != null;
- for (Map nextRow : theRows) {
+ return null;
+ });
- Map newValues = new HashMap<>();
- MandatoryKeyMap nextRowMandatoryKeyMap = new MandatoryKeyMap<>(nextRow);
-
- // Apply calculators
- for (Map.Entry, Long>> nextCalculatorEntry : myCalculators.entrySet()) {
- String nextColumn = nextCalculatorEntry.getKey();
- Function, Long> nextCalculator = nextCalculatorEntry.getValue();
- Long value = nextCalculator.apply(nextRowMandatoryKeyMap);
- newValues.put(nextColumn, value);
+ rch.submitNext();
+ List> futures = rch.getFutures();
+ if (futures.isEmpty()) {
+ break;
}
- // Generate update SQL
- StringBuilder sqlBuilder = new StringBuilder();
- List arguments = new ArrayList<>();
- sqlBuilder.append("UPDATE ");
- sqlBuilder.append(getTableName());
- sqlBuilder.append(" SET ");
- for (Map.Entry nextNewValueEntry : newValues.entrySet()) {
- if (arguments.size() > 0) {
- sqlBuilder.append(", ");
+ ourLog.info("Waiting for {} tasks to complete", futures.size());
+ for (Future> next : futures) {
+ try {
+ next.get();
+ } catch (Exception e) {
+ throw new SQLException(e);
}
- sqlBuilder.append(nextNewValueEntry.getKey()).append(" = ?");
- arguments.add(nextNewValueEntry.getValue());
}
- sqlBuilder.append(" WHERE SP_ID = ?");
- arguments.add((Long) nextRow.get("SP_ID"));
-
- // Apply update SQL
- newJdbcTemnplate().update(sqlBuilder.toString(), arguments.toArray());
}
- return theRows.size();
- });
- ourLog.info("Updated {} rows on {} in {}", theRows.size(), getTableName(), sw.toString());
+ } finally {
+ destroyExecutor();
+ }
+ }
+
+ private void destroyExecutor() {
+ myExecutor.shutdownNow();
+ }
+
+ private void initializeExecutor() {
+ int maximumPoolSize = Runtime.getRuntime().availableProcessors();
+
+ LinkedBlockingQueue executorQueue = new LinkedBlockingQueue<>(maximumPoolSize);
+ BasicThreadFactory threadFactory = new BasicThreadFactory.Builder()
+ .namingPattern("worker-" + "-%d")
+ .daemon(false)
+ .priority(Thread.NORM_PRIORITY)
+ .build();
+ RejectedExecutionHandler rejectedExecutionHandler = new RejectedExecutionHandler() {
+ @Override
+ public void rejectedExecution(Runnable theRunnable, ThreadPoolExecutor theExecutor) {
+ ourLog.info("Note: Executor queue is full ({} elements), waiting for a slot to become available!", executorQueue.size());
+ StopWatch sw = new StopWatch();
+ try {
+ executorQueue.put(theRunnable);
+ } catch (InterruptedException theE) {
+ throw new RejectedExecutionException("Task " + theRunnable.toString() +
+ " rejected from " + theE.toString());
+ }
+ ourLog.info("Slot become available after {}ms", sw.getMillis());
+ }
+ };
+ myExecutor = new ThreadPoolExecutor(
+ 1,
+ maximumPoolSize,
+ 0L,
+ TimeUnit.MILLISECONDS,
+ executorQueue,
+ threadFactory,
+ rejectedExecutionHandler);
+ }
+
+ private Future> updateRows(List> theRows) {
+ Runnable task = () -> {
+ StopWatch sw = new StopWatch();
+ getTxTemplate().execute(t -> {
+
+ // Loop through rows
+ assert theRows != null;
+ for (Map nextRow : theRows) {
+
+ Map newValues = new HashMap<>();
+ MandatoryKeyMap nextRowMandatoryKeyMap = new MandatoryKeyMap<>(nextRow);
+
+ // Apply calculators
+ for (Map.Entry, Long>> nextCalculatorEntry : myCalculators.entrySet()) {
+ String nextColumn = nextCalculatorEntry.getKey();
+ Function, Long> nextCalculator = nextCalculatorEntry.getValue();
+ Long value = nextCalculator.apply(nextRowMandatoryKeyMap);
+ newValues.put(nextColumn, value);
+ }
+
+ // Generate update SQL
+ StringBuilder sqlBuilder = new StringBuilder();
+ List arguments = new ArrayList<>();
+ sqlBuilder.append("UPDATE ");
+ sqlBuilder.append(getTableName());
+ sqlBuilder.append(" SET ");
+ for (Map.Entry nextNewValueEntry : newValues.entrySet()) {
+ if (arguments.size() > 0) {
+ sqlBuilder.append(", ");
+ }
+ sqlBuilder.append(nextNewValueEntry.getKey()).append(" = ?");
+ arguments.add(nextNewValueEntry.getValue());
+ }
+ sqlBuilder.append(" WHERE SP_ID = ?");
+ arguments.add((Long) nextRow.get("SP_ID"));
+
+ // Apply update SQL
+ newJdbcTemnplate().update(sqlBuilder.toString(), arguments.toArray());
+
+ }
+
+ return theRows.size();
+ });
+ ourLog.info("Updated {} rows on {} in {}", theRows.size(), getTableName(), sw.toString());
+ };
+ return myExecutor.submit(task);
}
public CalculateHashesTask addCalculator(String theColumnName, Function, Long> theConsumer) {
@@ -116,6 +194,39 @@ public class CalculateHashesTask extends BaseTableColumnTask> myRows = new ArrayList<>();
+ private List> myFutures = new ArrayList<>();
+
+ @Override
+ public void processRow(ResultSet rs) throws SQLException {
+ Map row = new ColumnMapRowMapper().mapRow(rs, 0);
+ myRows.add(row);
+
+ if (myRows.size() >= myBatchSize) {
+ submitNext();
+ }
+ }
+
+ private void submitNext() {
+ if (myRows.size() > 0) {
+ myFutures.add(updateRows(myRows));
+ myRows = new ArrayList<>();
+ }
+ }
+
+ public List> getFutures() {
+ return myFutures;
+ }
+
+ public void done() {
+ if (myRows.size() > 0) {
+ submitNext();
+ }
+ }
+ }
+
public static class MandatoryKeyMap extends ForwardingMap {
diff --git a/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/CreateHashesTest.java b/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateHashesTest.java
similarity index 56%
rename from hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/CreateHashesTest.java
rename to hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateHashesTest.java
index a5140a72b83..6e15f8734be 100644
--- a/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/CreateHashesTest.java
+++ b/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateHashesTest.java
@@ -9,7 +9,7 @@ import java.util.Map;
import static org.junit.Assert.assertEquals;
-public class CreateHashesTest extends BaseTest {
+public class CalculateHashesTest extends BaseTest {
@Test
public void testCreateHashes() {
@@ -50,4 +50,36 @@ public class CreateHashesTest extends BaseTest {
});
}
+ @Test
+ public void testCreateHashesLargeNumber() {
+ executeSql("create table HFJ_SPIDX_TOKEN (SP_ID bigint not null, SP_MISSING boolean, SP_NAME varchar(100) not null, RES_ID bigint, RES_TYPE varchar(255) not null, SP_UPDATED timestamp, HASH_IDENTITY bigint, HASH_SYS bigint, HASH_SYS_AND_VALUE bigint, HASH_VALUE bigint, SP_SYSTEM varchar(200), SP_VALUE varchar(200), primary key (SP_ID))");
+
+ for (int i = 0; i < 777; i++) {
+ executeSql("insert into HFJ_SPIDX_TOKEN (SP_MISSING, SP_NAME, RES_ID, RES_TYPE, SP_UPDATED, SP_SYSTEM, SP_VALUE, SP_ID) values (false, 'identifier', 999, 'Patient', '2018-09-03 07:44:49.196', 'urn:oid:1.2.410.100110.10.41308301', '8888888" + i + "', " + i + ")");
+ }
+
+ Long count = getConnectionProperties().getTxTemplate().execute(t -> {
+ JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate();
+ return jdbcTemplate.queryForObject("SELECT count(*) FROM HFJ_SPIDX_TOKEN WHERE HASH_VALUE IS NULL", Long.class);
+ });
+ assertEquals(777L, count.longValue());
+
+ CalculateHashesTask task = new CalculateHashesTask();
+ task.setTableName("HFJ_SPIDX_TOKEN");
+ task.setColumnName("HASH_IDENTITY");
+ task.addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")));
+ task.addCalculator("HASH_SYS", t -> ResourceIndexedSearchParamToken.calculateHashSystem(t.getResourceType(), t.getParamName(), t.getString("SP_SYSTEM")));
+ task.addCalculator("HASH_SYS_AND_VALUE", t -> ResourceIndexedSearchParamToken.calculateHashSystemAndValue(t.getResourceType(), t.getParamName(), t.getString("SP_SYSTEM"), t.getString("SP_VALUE")));
+ task.addCalculator("HASH_VALUE", t -> ResourceIndexedSearchParamToken.calculateHashValue(t.getResourceType(), t.getParamName(), t.getString("SP_VALUE")));
+ task.setBatchSize(3);
+ getMigrator().addTask(task);
+
+ getMigrator().migrate();
+
+ count = getConnectionProperties().getTxTemplate().execute(t -> {
+ JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate();
+ return jdbcTemplate.queryForObject("SELECT count(*) FROM HFJ_SPIDX_TOKEN WHERE HASH_VALUE IS NULL", Long.class);
+ });
+ assertEquals(0L, count.longValue());
+ }
}
diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SearchTotalModeParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SearchTotalModeParameter.java
index 5f9f0e809c0..2bb73bee73d 100644
--- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SearchTotalModeParameter.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SearchTotalModeParameter.java
@@ -18,9 +18,9 @@ import java.util.Collection;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
diff --git a/pom.xml b/pom.xml
index 50cc673a3af..d5ff60b05fb 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1727,6 +1727,12 @@
+
+
+
+
+
+
@@ -2131,6 +2137,8 @@
hapi-fhir-structures-dstu2
hapi-fhir-structures-dstu3
hapi-fhir-structures-r4
+ hapi-fhir-client
+ hapi-fhir-server
hapi-fhir-jpaserver-base
hapi-fhir-jaxrsserver-base
diff --git a/src/site/site.xml b/src/site/site.xml
index 2c5ca40def0..d0e75165cc7 100644
--- a/src/site/site.xml
+++ b/src/site/site.xml
@@ -115,6 +115,8 @@
+
+
diff --git a/src/site/xdoc/docindex.xml b/src/site/xdoc/docindex.xml
index 49cefb6481c..226d7e8b457 100644
--- a/src/site/xdoc/docindex.xml
+++ b/src/site/xdoc/docindex.xml
@@ -1,86 +1,88 @@
-
-
-
-
- Documentation
- James Agnew
-
-
-
-
-
-
-
- Welcome to HAPI FHIR! We hope that the documentation here will be
- helpful to you.
-
-
-
-
- The Data Model
-
-
- RESTful Client
-
-
- RESTful Server
-
-
- Other Features
-
-
-
- JavaDocs
-
-
- Source Cross Reference
-
-
-
-
-
-
-
+
+
+
+
+ Documentation
+ James Agnew
+
+
+
+
+
+
+
+ Welcome to HAPI FHIR! We hope that the documentation here will be
+ helpful to you.
+
+
+
+
+ The Data Model
+
+
+ RESTful Client
+
+
+ RESTful Server
+
+
+ Other Features
+
+
+
+ JavaDocs
+
+
+ Source Cross Reference
+
+
+
+
+
+
+
From 5849960a14907326ea3d2dc8eb8f22c01c6a3f14 Mon Sep 17 00:00:00 2001
From: jamesagnew
Date: Wed, 31 Oct 2018 05:52:15 -0400
Subject: [PATCH 09/97] Add changelog
---
src/changes/changes.xml | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/src/changes/changes.xml b/src/changes/changes.xml
index e06eb146ba0..f71138cf5b9 100644
--- a/src/changes/changes.xml
+++ b/src/changes/changes.xml
@@ -100,6 +100,10 @@
permission is granted. This has been corrected so that transaction() allows both
batch and transaction requests to proceed.
+
+ The JPA server version migrator tool now runs in a multithreaded way, allowing it to
+ upgrade th database faster when migration tasks require data updates.
+
From 041a4c4018dbb4761999ca196c66a845933aad30 Mon Sep 17 00:00:00 2001
From: James Agnew
Date: Wed, 31 Oct 2018 12:36:27 -0400
Subject: [PATCH 10/97] Fix SearchCoordinator tests
---
.../jpa/search/SearchCoordinatorSvcImpl.java | 23 ++++--
.../search/SearchCoordinatorSvcImplTest.java | 78 ++++++++++++++-----
2 files changed, 76 insertions(+), 25 deletions(-)
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java
index 94493d87e9a..44ac751ae71 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java
@@ -168,7 +168,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
verifySearchHasntFailedOrThrowInternalErrorException(search);
if (search.getStatus() == SearchStatusEnum.FINISHED) {
- ourLog.info("Search entity marked as finished");
+ ourLog.info("Search entity marked as finished with {} results", search.getNumFound());
break;
}
if (search.getNumFound() >= theTo) {
@@ -189,7 +189,8 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
search = newSearch.get();
String resourceType = search.getResourceType();
SearchParameterMap params = search.getSearchParameterMap();
- SearchContinuationTask task = new SearchContinuationTask(search, myDaoRegistry.getResourceDao(resourceType), params, resourceType);
+ IFhirResourceDao> resourceDao = myDaoRegistry.getResourceDao(resourceType);
+ SearchContinuationTask task = new SearchContinuationTask(search, resourceDao, params, resourceType);
myIdToSearchTask.put(search.getUuid(), task);
myExecutor.submit(task);
}
@@ -228,10 +229,9 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
txTemplate.afterPropertiesSet();
return txTemplate.execute(t -> {
- Optional searchOpt = mySearchDao.findById(theSearch.getId());
- Search search = searchOpt.orElseThrow(IllegalStateException::new);
- if (search.getStatus() != SearchStatusEnum.PASSCMPLET) {
- throw new IllegalStateException("Can't change to LOADING because state is " + search.getStatus());
+ myEntityManager.refresh(theSearch);
+ if (theSearch.getStatus() != SearchStatusEnum.PASSCMPLET) {
+ throw new IllegalStateException("Can't change to LOADING because state is " + theSearch.getStatus());
}
theSearch.setStatus(SearchStatusEnum.LOADING);
Search newSearch = mySearchDao.save(theSearch);
@@ -239,6 +239,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
});
} catch (Exception e) {
ourLog.warn("Failed to activate search: {}", e.toString());
+ ourLog.trace("Failed to activate search", e);
return Optional.empty();
}
}
@@ -438,6 +439,11 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
myManagedTxManager = theTxManager;
}
+ @VisibleForTesting
+ public void setDaoRegistryForUnitTest(DaoRegistry theDaoRegistry) {
+ myDaoRegistry = theDaoRegistry;
+ }
+
public abstract class BaseTask implements Callable {
private final SearchParameterMap myParams;
private final IDao myCallingDao;
@@ -486,7 +492,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
}
public List getResourcePids(int theFromIndex, int theToIndex) {
- ourLog.info("Requesting search PIDs from {}-{}", theFromIndex, theToIndex);
+ ourLog.debug("Requesting search PIDs from {}-{}", theFromIndex, theToIndex);
boolean keepWaiting;
do {
@@ -506,6 +512,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
* coordinator would then stop searching.SearchCoordinatorSvcImplTest
*/
List remainingResources = SearchCoordinatorSvcImpl.this.getResources(mySearch.getUuid(), mySyncedPids.size(), theToIndex);
+ ourLog.debug("Adding {} resources to the existing {} synced resource IDs", remainingResources.size(), mySyncedPids.size());
mySyncedPids.addAll(remainingResources);
keepWaiting = false;
break;
@@ -834,6 +841,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
* Construct the SQL query we'll be sending to the database
*/
IResultIterator theResultIterator = sb.createQuery(myParams, mySearch.getUuid());
+ assert (theResultIterator != null);
/*
* The following loop actually loads the PIDs of the resources
@@ -895,6 +903,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
txTemplate.afterPropertiesSet();
txTemplate.execute(t -> {
List previouslyAddedResourcePids = mySearchResultDao.findWithSearchUuid(getSearch());
+ ourLog.debug("Have {} previously added IDs in search: {}", previouslyAddedResourcePids.size(), getSearch().getUuid());
setPreviouslyAddedResourcePids(previouslyAddedResourcePids);
return null;
});
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImplTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImplTest.java
index cf2ce1bf577..bdfa14b4ee2 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImplTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImplTest.java
@@ -18,7 +18,10 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.util.TestUtil;
import com.google.common.collect.Lists;
import org.hl7.fhir.instance.model.api.IBaseResource;
-import org.junit.*;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
@@ -27,8 +30,11 @@ import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
+import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.transaction.PlatformTransactionManager;
@@ -50,7 +56,7 @@ public class SearchCoordinatorSvcImplTest {
@Captor
ArgumentCaptor> mySearchResultIterCaptor;
@Mock
- private IDao myCallingDao;
+ private IFhirResourceDao> myCallingDao;
@Mock
private EntityManager myEntityManager;
private int myExpectedNumberOfSearchBuildersCreated = 2;
@@ -67,6 +73,9 @@ public class SearchCoordinatorSvcImplTest {
@Mock
private PlatformTransactionManager myTxManager;
private DaoConfig myDaoConfig;
+ private Search myCurrentSearch;
+ @Mock
+ private DaoRegistry myDaoRegistry;
@After
public void after() {
@@ -75,6 +84,7 @@ public class SearchCoordinatorSvcImplTest {
@Before
public void before() {
+ myCurrentSearch = null;
mySvc = new SearchCoordinatorSvcImpl();
mySvc.setEntityManagerForUnitTest(myEntityManager);
@@ -83,6 +93,7 @@ public class SearchCoordinatorSvcImplTest {
mySvc.setSearchDaoForUnitTest(mySearchDao);
mySvc.setSearchDaoIncludeForUnitTest(mySearchIncludeDao);
mySvc.setSearchDaoResultForUnitTest(mySearchResultDao);
+ mySvc.setDaoRegistryForUnitTest(myDaoRegistry);
myDaoConfig = new DaoConfig();
mySvc.setDaoConfigForUnitTest(myDaoConfig);
@@ -148,25 +159,43 @@ public class SearchCoordinatorSvcImplTest {
}
}
-
+private static final Logger ourLog = LoggerFactory.getLogger(SearchCoordinatorSvcImplTest.class);
@Test
- @Ignore // FIXME: activate
public void testAsyncSearchLargeResultSetBigCountSameCoordinator() {
SearchParameterMap params = new SearchParameterMap();
params.add("name", new StringParam("ANAME"));
List pids = createPidSequence(10, 800);
- IResultIterator iter = new SlowIterator(pids.iterator(), 1);
- when(mySearchBuider.createQuery(Mockito.same(params), any(String.class))).thenReturn(iter);
-
+ SlowIterator iter = new SlowIterator(pids.iterator(), 1);
+ when(mySearchBuider.createQuery(any(), any(String.class))).thenReturn(iter);
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(List.class), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
+ when(mySearchResultDao.findWithSearchUuid(any(), any())).thenAnswer(t -> {
+ List returnedValues = iter.getReturnedValues();
+ Pageable page = (Pageable) t.getArguments()[1];
+ int offset = (int) page.getOffset();
+ int end = (int)(page.getOffset() + page.getPageSize());
+ end = Math.min(end, returnedValues.size());
+ offset = Math.min(offset, returnedValues.size());
+ ourLog.info("findWithSearchUuid {} - {} out of {} values", offset, end, returnedValues.size());
+ return new PageImpl<>(returnedValues.subList(offset, end));
+ });
+
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective());
assertNotNull(result.getUuid());
assertEquals(null, result.size());
List resources;
+ when(mySearchDao.save(any())).thenAnswer(t -> {
+ Search search = (Search) t.getArguments()[0];
+ myCurrentSearch = search;
+ return search;
+ });
+ when(mySearchDao.findByUuid(any())).thenAnswer(t -> myCurrentSearch);
+ IFhirResourceDao dao = myCallingDao;
+ when(myDaoRegistry.getResourceDao(any())).thenReturn(dao);
+
resources = result.getResources(0, 100000);
assertEquals(790, resources.size());
assertEquals("10", resources.get(0).getIdElement().getValueAsString());
@@ -176,7 +205,7 @@ public class SearchCoordinatorSvcImplTest {
verify(mySearchDao, atLeastOnce()).save(searchCaptor.capture());
verify(mySearchResultDao, atLeastOnce()).saveAll(mySearchResultIterCaptor.capture());
- List allResults = new ArrayList();
+ List allResults = new ArrayList<>();
for (Iterable next : mySearchResultIterCaptor.getAllValues()) {
allResults.addAll(Lists.newArrayList(next));
}
@@ -184,6 +213,8 @@ public class SearchCoordinatorSvcImplTest {
assertEquals(790, allResults.size());
assertEquals(10, allResults.get(0).getResourcePid().longValue());
assertEquals(799, allResults.get(789).getResourcePid().longValue());
+
+ myExpectedNumberOfSearchBuildersCreated = 3;
}
@Test
@@ -215,7 +246,6 @@ public class SearchCoordinatorSvcImplTest {
* page) within the same JVM will not use the original bundle provider
*/
@Test
- @Ignore // FIXME: activate
public void testAsyncSearchLargeResultSetSecondRequestSameCoordinator() {
SearchParameterMap params = new SearchParameterMap();
params.add("name", new StringParam("ANAME"));
@@ -223,7 +253,7 @@ public class SearchCoordinatorSvcImplTest {
List pids = createPidSequence(10, 800);
IResultIterator iter = new SlowIterator(pids.iterator(), 2);
when(mySearchBuider.createQuery(Mockito.same(params), any(String.class))).thenReturn(iter);
-
+ when(mySearchDao.save(any())).thenAnswer(t -> t.getArguments()[0]);
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(List.class), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective());
@@ -256,12 +286,6 @@ public class SearchCoordinatorSvcImplTest {
assertEquals("20", resources.get(0).getIdElement().getValueAsString());
assertEquals("29", resources.get(9).getIdElement().getValueAsString());
- provider = new PersistedJpaBundleProvider(result.getUuid(), myCallingDao);
- resources = provider.getResources(20, 99999);
- assertEquals(770, resources.size());
- assertEquals("30", resources.get(0).getIdElement().getValueAsString());
- assertEquals("799", resources.get(769).getIdElement().getValueAsString());
-
myExpectedNumberOfSearchBuildersCreated = 4;
}
@@ -451,11 +475,19 @@ public class SearchCoordinatorSvcImplTest {
}
}
+ /**
+ * THIS CLASS IS FOR UNIT TESTS ONLY - It is delioberately inefficient
+ * and keeps things in memory.
+ *
+ * Don't use it in real code!
+ */
public static class SlowIterator extends BaseIterator implements IResultIterator {
+ private static final Logger ourLog = LoggerFactory.getLogger(SlowIterator.class);
private final IResultIterator myResultIteratorWrap;
private int myDelay;
private Iterator myWrap;
+ private List myReturnedValues = new ArrayList<>();
public SlowIterator(Iterator theWrap, int theDelay) {
myWrap = theWrap;
@@ -469,9 +501,17 @@ public class SearchCoordinatorSvcImplTest {
myDelay = theDelay;
}
+ public List getReturnedValues() {
+ return myReturnedValues;
+ }
+
@Override
public boolean hasNext() {
- return myWrap.hasNext();
+ boolean retVal = myWrap.hasNext();
+ if (!retVal) {
+ ourLog.info("No more results remaining");
+ }
+ return retVal;
}
@Override
@@ -481,7 +521,9 @@ public class SearchCoordinatorSvcImplTest {
} catch (InterruptedException e) {
// ignore
}
- return myWrap.next();
+ Long retVal = myWrap.next();
+ myReturnedValues.add(retVal);
+ return retVal;
}
@Override
From 721c1cd405579110ff9ac15213eff1a98b611639 Mon Sep 17 00:00:00 2001
From: James Agnew
Date: Wed, 31 Oct 2018 16:47:40 -0400
Subject: [PATCH 11/97] Drop column that should have been dropped in 3.4 to 3.5
migration
---
.../ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java | 1 +
1 file changed, 1 insertion(+)
diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
index acaf3be1c6a..c02246a3a14 100644
--- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
+++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
@@ -277,6 +277,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
Builder.BuilderWithTableName spp = version.onTable("HFJ_RES_PARAM_PRESENT");
version.startSectionWithMessage("Starting work on table: " + spp.getTableName());
spp.dropIndex("IDX_RESPARMPRESENT_SPID_RESID");
+ spp.dropColumn("SP_ID");
spp
.addColumn("HASH_PRESENCE")
.nullable()
From bbce2c69cf686ff5aafb26d92958a594030713a6 Mon Sep 17 00:00:00 2001
From: jamesagnew
Date: Thu, 1 Nov 2018 05:51:26 -0400
Subject: [PATCH 12/97] Fix typo
---
.../src/main/java/ca/uhn/fhir/rest/annotation/Operation.java | 4 ++--
hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTerser.java | 4 ++--
.../ca/uhn/fhir/rest/server/OperationGenericServerR4Test.java | 2 +-
3 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Operation.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Operation.java
index 7fdbdefa417..d17b376b08a 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Operation.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Operation.java
@@ -9,9 +9,9 @@ package ca.uhn.fhir.rest.annotation;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTerser.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTerser.java
index 354d52d6201..281b6d94077 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTerser.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTerser.java
@@ -28,9 +28,9 @@ import static org.apache.commons.lang3.StringUtils.*;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/OperationGenericServerR4Test.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/OperationGenericServerR4Test.java
index 98e175deedf..f77882c6f22 100644
--- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/OperationGenericServerR4Test.java
+++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/OperationGenericServerR4Test.java
@@ -167,7 +167,7 @@ public class OperationGenericServerR4Test {
}
- SmartScopeTranslationSvcImplTest @Test
+ @Test
public void testSearchGetsClassifiedAppropriately() throws Exception {
HttpGet httpPost = new HttpGet("http://localhost:" + ourPort + "/Patient");
CloseableHttpResponse status = ourClient.execute(httpPost);
From 3bfdc61866e5c33786cbe5b05d126675a1fadf59 Mon Sep 17 00:00:00 2001
From: James Agnew
Date: Thu, 1 Nov 2018 09:15:03 -0400
Subject: [PATCH 13/97] Fix a couple of test failures
---
...istedJpaSearchFirstPageBundleProvider.java | 9 +++
.../jpa/search/SearchCoordinatorSvcImpl.java | 19 +++---
.../fhir/jpa/dao/r4/FhirSystemDaoR4Test.java | 64 +++++++++++++++++++
.../search/SearchCoordinatorSvcImplTest.java | 9 ++-
.../BaseResourceReturningMethodBinding.java | 10 ---
.../ca/uhn/fhir/util/FhirTerserDstu3Test.java | 29 +++++++++
6 files changed, 117 insertions(+), 23 deletions(-)
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaSearchFirstPageBundleProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaSearchFirstPageBundleProvider.java
index bc8b61a00c4..09338aed558 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaSearchFirstPageBundleProvider.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaSearchFirstPageBundleProvider.java
@@ -65,6 +65,15 @@ public class PersistedJpaSearchFirstPageBundleProvider extends PersistedJpaBundl
txTemplate.setPropagationBehavior(TransactionTemplate.PROPAGATION_REQUIRED);
List retVal = txTemplate.execute(theStatus -> toResourceList(mySearchBuilder, pids));
+ int totalCountWanted = theToIndex - theFromIndex;
+ if (retVal.size() < totalCountWanted) {
+ if (mySearch.getStatus() == SearchStatusEnum.PASSCMPLET) {
+ int remainingWanted = totalCountWanted - retVal.size();
+ int fromIndex = theToIndex - remainingWanted;
+ List remaining = super.getResources(fromIndex, theToIndex);
+ retVal.addAll(remaining);
+ }
+ }
ourLog.trace("Loaded resources to return");
return retVal;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java
index 44ac751ae71..d546ecc9677 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java
@@ -229,17 +229,19 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
txTemplate.afterPropertiesSet();
return txTemplate.execute(t -> {
- myEntityManager.refresh(theSearch);
- if (theSearch.getStatus() != SearchStatusEnum.PASSCMPLET) {
+ Search search = mySearchDao.findById(theSearch.getId()).orElse(theSearch);
+
+ if (search.getStatus() != SearchStatusEnum.PASSCMPLET) {
throw new IllegalStateException("Can't change to LOADING because state is " + theSearch.getStatus());
}
- theSearch.setStatus(SearchStatusEnum.LOADING);
- Search newSearch = mySearchDao.save(theSearch);
+ search.setStatus(SearchStatusEnum.LOADING);
+ Search newSearch = mySearchDao.save(search);
return Optional.of(newSearch);
});
} catch (Exception e) {
ourLog.warn("Failed to activate search: {}", e.toString());
- ourLog.trace("Failed to activate search", e);
+ // FIXME: aaaaa
+ ourLog.info("Failed to activate search", e);
return Optional.empty();
}
}
@@ -511,9 +513,10 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
* user has requested resources 0-60, then they would get 0-50 back but the search
* coordinator would then stop searching.SearchCoordinatorSvcImplTest
*/
- List remainingResources = SearchCoordinatorSvcImpl.this.getResources(mySearch.getUuid(), mySyncedPids.size(), theToIndex);
- ourLog.debug("Adding {} resources to the existing {} synced resource IDs", remainingResources.size(), mySyncedPids.size());
- mySyncedPids.addAll(remainingResources);
+ // FIXME: aaaaaaaa
+// List remainingResources = SearchCoordinatorSvcImpl.this.getResources(mySearch.getUuid(), mySyncedPids.size(), theToIndex);
+// ourLog.debug("Adding {} resources to the existing {} synced resource IDs", remainingResources.size(), mySyncedPids.size());
+// mySyncedPids.addAll(remainingResources);
keepWaiting = false;
break;
case FAILED:
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java
index 6e106dcfea2..5f9770f98b1 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java
@@ -175,6 +175,70 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
fail();
return null;
}
+
+ @Test
+ public void testTransactionReSavesPreviouslyDeletedResources() {
+
+ {
+ Bundle input = new Bundle();
+ input.setType(BundleType.TRANSACTION);
+
+ Patient pt = new Patient();
+ pt.setId("pt");
+ pt.setActive(true);
+ input
+ .addEntry()
+ .setResource(pt)
+ .getRequest()
+ .setUrl("Patient/pt")
+ .setMethod(HTTPVerb.PUT);
+
+ Observation obs = new Observation();
+ obs.setId("obs");
+ obs.getSubject().setReference("Patient/pt");
+ input
+ .addEntry()
+ .setResource(obs)
+ .getRequest()
+ .setUrl("Observation/obs")
+ .setMethod(HTTPVerb.PUT);
+
+ mySystemDao.transaction(null, input);
+ }
+
+ myObservationDao.delete(new IdType("Observation/obs"));
+ myPatientDao.delete(new IdType("Patient/pt"));
+
+ {
+ Bundle input = new Bundle();
+ input.setType(BundleType.TRANSACTION);
+
+ Patient pt = new Patient();
+ pt.setId("pt");
+ pt.setActive(true);
+ input
+ .addEntry()
+ .setResource(pt)
+ .getRequest()
+ .setUrl("Patient/pt")
+ .setMethod(HTTPVerb.PUT);
+
+ Observation obs = new Observation();
+ obs.setId("obs");
+ obs.getSubject().setReference("Patient/pt");
+ input
+ .addEntry()
+ .setResource(obs)
+ .getRequest()
+ .setUrl("Observation/obs")
+ .setMethod(HTTPVerb.PUT);
+
+ mySystemDao.transaction(null, input);
+ }
+
+ myPatientDao.read(new IdType("Patient/pt"));
+ }
+
@Test
public void testResourceCounts() {
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImplTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImplTest.java
index bdfa14b4ee2..91d277b53e1 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImplTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImplTest.java
@@ -34,7 +34,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
-import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.transaction.PlatformTransactionManager;
@@ -52,6 +51,7 @@ import static org.mockito.Mockito.*;
@RunWith(MockitoJUnitRunner.class)
public class SearchCoordinatorSvcImplTest {
+ private static final Logger ourLog = LoggerFactory.getLogger(SearchCoordinatorSvcImplTest.class);
private static FhirContext ourCtx = FhirContext.forDstu3();
@Captor
ArgumentCaptor> mySearchResultIterCaptor;
@@ -69,7 +69,6 @@ public class SearchCoordinatorSvcImplTest {
@Mock
private ISearchResultDao mySearchResultDao;
private SearchCoordinatorSvcImpl mySvc;
-
@Mock
private PlatformTransactionManager myTxManager;
private DaoConfig myDaoConfig;
@@ -159,7 +158,7 @@ public class SearchCoordinatorSvcImplTest {
}
}
-private static final Logger ourLog = LoggerFactory.getLogger(SearchCoordinatorSvcImplTest.class);
+
@Test
public void testAsyncSearchLargeResultSetBigCountSameCoordinator() {
SearchParameterMap params = new SearchParameterMap();
@@ -174,7 +173,7 @@ private static final Logger ourLog = LoggerFactory.getLogger(SearchCoordinatorSv
List returnedValues = iter.getReturnedValues();
Pageable page = (Pageable) t.getArguments()[1];
int offset = (int) page.getOffset();
- int end = (int)(page.getOffset() + page.getPageSize());
+ int end = (int) (page.getOffset() + page.getPageSize());
end = Math.min(end, returnedValues.size());
offset = Math.min(offset, returnedValues.size());
ourLog.info("findWithSearchUuid {} - {} out of {} values", offset, end, returnedValues.size());
@@ -214,7 +213,7 @@ private static final Logger ourLog = LoggerFactory.getLogger(SearchCoordinatorSv
assertEquals(10, allResults.get(0).getResourcePid().longValue());
assertEquals(799, allResults.get(789).getResourcePid().longValue());
- myExpectedNumberOfSearchBuildersCreated = 3;
+ myExpectedNumberOfSearchBuildersCreated = 4;
}
@Test
diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseResourceReturningMethodBinding.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseResourceReturningMethodBinding.java
index bd5c7f3a90c..19b8a282812 100644
--- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseResourceReturningMethodBinding.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseResourceReturningMethodBinding.java
@@ -223,16 +223,6 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi
int start = Math.max(0, theOffset - theLimit);
linkPrev = RestfulServerUtils.createPagingLink(theIncludes, serverBase, searchId, start, theLimit, theRequest.getParameters(), prettyPrint, theBundleType);
}
-// int offset = theOffset + resourceList.size();
-//
-// // We're doing offset pages
-// if (numTotalResults == null || offset < numTotalResults) {
-// linkNext = (RestfulServerUtils.createPagingLink(theIncludes, serverBase, searchId, offset, numToReturn, theRequest.getParameters(), prettyPrint, theBundleType));
-// }
-// if (theOffset > 0) {
-// int start = Math.max(0, theOffset - theLimit);
-// linkPrev = RestfulServerUtils.createPagingLink(theIncludes, serverBase, searchId, start, theLimit, theRequest.getParameters(), prettyPrint, theBundleType);
-// }
}
bundleFactory.addRootPropertiesToBundle(theResult.getUuid(), serverBase, theLinkSelf, linkPrev, linkNext, theResult.size(), theBundleType, theResult.getPublished());
diff --git a/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/util/FhirTerserDstu3Test.java b/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/util/FhirTerserDstu3Test.java
index dadc87e9b62..aa85a74784c 100644
--- a/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/util/FhirTerserDstu3Test.java
+++ b/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/util/FhirTerserDstu3Test.java
@@ -41,6 +41,35 @@ public class FhirTerserDstu3Test {
private static FhirContext ourCtx = FhirContext.forDstu3();
+ @Test
+ public void testCloneIntoBundle() {
+ Bundle input = new Bundle();
+ input.setType(Bundle.BundleType.TRANSACTION);
+
+ Patient pt = new Patient();
+ pt.setId("pt");
+ pt.setActive(true);
+ input
+ .addEntry()
+ .setResource(pt)
+ .getRequest()
+ .setUrl("Patient/pt")
+ .setMethod(Bundle.HTTPVerb.PUT);
+
+ Observation obs = new Observation();
+ obs.setId("obs");
+ obs.getSubject().setReference("Patient/pt");
+ input
+ .addEntry()
+ .setResource(obs)
+ .getRequest()
+ .setUrl("Observation/obs")
+ .setMethod(Bundle.HTTPVerb.PUT);
+
+ Bundle ionputClone = new Bundle();
+ ourCtx.newTerser().cloneInto(input, ionputClone, false);
+ }
+
@Test
public void testCloneIntoComposite() {
Quantity source = new Quantity();
From 1b877ac03eed6504c6cf9202d9c8803b46abb3ba Mon Sep 17 00:00:00 2001
From: jamesagnew
Date: Thu, 1 Nov 2018 09:15:51 -0400
Subject: [PATCH 14/97] Add licene headers
---
.../ca/uhn/fhir/jpa/dao/SearchBuilder.java | 4 ++--
.../fhir/jpa/dao/index/IndexingSupport.java | 20 +++++++++++++++++++
.../index/ResourceIndexedSearchParams.java | 20 +++++++++++++++++++
.../BaseResourceIndexedSearchParam.java | 4 ++--
.../matcher/ISubscriptionMatcher.java | 20 +++++++++++++++++++
.../matcher/SubscriptionMatcherDatabase.java | 20 +++++++++++++++++++
.../matcher/SubscriptionMatcherInMemory.java | 20 +++++++++++++++++++
7 files changed, 104 insertions(+), 4 deletions(-)
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java
index 56d8809b00d..d798fcd3853 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java
@@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.dao;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IndexingSupport.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IndexingSupport.java
index 8441deb4a6b..26a7163758a 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IndexingSupport.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IndexingSupport.java
@@ -1,5 +1,25 @@
package ca.uhn.fhir.jpa.dao.index;
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2018 University Health Network
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
import java.util.Map;
import java.util.Set;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/ResourceIndexedSearchParams.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/ResourceIndexedSearchParams.java
index 9f4b5baf97e..b04467704a0 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/ResourceIndexedSearchParams.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/ResourceIndexedSearchParams.java
@@ -1,5 +1,25 @@
package ca.uhn.fhir.jpa.dao.index;
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2018 University Health Network
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
import static org.apache.commons.lang3.StringUtils.compare;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BaseResourceIndexedSearchParam.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BaseResourceIndexedSearchParam.java
index a51d92819f4..5f54f36c521 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BaseResourceIndexedSearchParam.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BaseResourceIndexedSearchParam.java
@@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/matcher/ISubscriptionMatcher.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/matcher/ISubscriptionMatcher.java
index 22e4943bdad..20abe14988e 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/matcher/ISubscriptionMatcher.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/matcher/ISubscriptionMatcher.java
@@ -1,5 +1,25 @@
package ca.uhn.fhir.jpa.subscription.matcher;
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2018 University Health Network
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
import ca.uhn.fhir.jpa.subscription.ResourceModifiedMessage;
public interface ISubscriptionMatcher {
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/matcher/SubscriptionMatcherDatabase.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/matcher/SubscriptionMatcherDatabase.java
index ce1788c61c8..c4d58963a68 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/matcher/SubscriptionMatcherDatabase.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/matcher/SubscriptionMatcherDatabase.java
@@ -1,5 +1,25 @@
package ca.uhn.fhir.jpa.subscription.matcher;
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2018 University Health Network
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.slf4j.Logger;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/matcher/SubscriptionMatcherInMemory.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/matcher/SubscriptionMatcherInMemory.java
index 3355e415005..0d1dcaa8ae0 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/matcher/SubscriptionMatcherInMemory.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/matcher/SubscriptionMatcherInMemory.java
@@ -1,5 +1,25 @@
package ca.uhn.fhir.jpa.subscription.matcher;
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2018 University Health Network
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
import ca.uhn.fhir.jpa.subscription.ResourceModifiedMessage;
public class SubscriptionMatcherInMemory implements ISubscriptionMatcher {
From bb59e2d73a46ff6ae2e28b7378f59e9e3956729a Mon Sep 17 00:00:00 2001
From: James Agnew
Date: Thu, 1 Nov 2018 10:25:13 -0400
Subject: [PATCH 15/97] Fix broken test
---
.../java/ca/uhn/fhir/util/FhirTerser.java | 393 +++++++++---------
1 file changed, 195 insertions(+), 198 deletions(-)
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTerser.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTerser.java
index 281b6d94077..7fa863b524d 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTerser.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTerser.java
@@ -28,9 +28,9 @@ import static org.apache.commons.lang3.StringUtils.*;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -96,34 +96,36 @@ public class FhirTerser {
/**
* Clones all values from a source object into the equivalent fields in a target object
- * @param theSource The source object (must not be null)
- * @param theTarget The target object to copy values into (must not be null)
+ *
+ * @param theSource The source object (must not be null)
+ * @param theTarget The target object to copy values into (must not be null)
* @param theIgnoreMissingFields The ignore fields in the target which do not exist (if false, an exception will be thrown if the target is unable to accept a value from the source)
+ * @return Returns the target (which will be the same object that was passed into theTarget) for easy chaining
*/
- public void cloneInto(IBase theSource, IBase theTarget, boolean theIgnoreMissingFields) {
+ public IBase cloneInto(IBase theSource, IBase theTarget, boolean theIgnoreMissingFields) {
Validate.notNull(theSource, "theSource must not be null");
Validate.notNull(theTarget, "theTarget must not be null");
-
+
if (theSource instanceof IPrimitiveType>) {
if (theTarget instanceof IPrimitiveType>) {
- ((IPrimitiveType>)theTarget).setValueAsString(((IPrimitiveType>)theSource).getValueAsString());
- return;
+ ((IPrimitiveType>) theTarget).setValueAsString(((IPrimitiveType>) theSource).getValueAsString());
+ return theSource;
}
if (theIgnoreMissingFields) {
- return;
+ return theSource;
}
throw new DataFormatException("Can not copy value from primitive of type " + theSource.getClass().getName() + " into type " + theTarget.getClass().getName());
}
-
- BaseRuntimeElementCompositeDefinition> sourceDef = (BaseRuntimeElementCompositeDefinition>) myContext.getElementDefinition(theSource.getClass());
+
+ BaseRuntimeElementCompositeDefinition> sourceDef = (BaseRuntimeElementCompositeDefinition>) myContext.getElementDefinition(theSource.getClass());
BaseRuntimeElementCompositeDefinition> targetDef = (BaseRuntimeElementCompositeDefinition>) myContext.getElementDefinition(theTarget.getClass());
-
+
List children = sourceDef.getChildren();
if (sourceDef instanceof RuntimeExtensionDtDefinition) {
- children = ((RuntimeExtensionDtDefinition)sourceDef).getChildrenIncludingUrl();
+ children = ((RuntimeExtensionDtDefinition) sourceDef).getChildrenIncludingUrl();
}
-
- for (BaseRuntimeChildDefinition nextChild : children) {
+
+ for (BaseRuntimeChildDefinition nextChild : children)
for (IBase nextValue : nextChild.getAccessor().getValues(theSource)) {
String elementName = nextChild.getChildNameByDatatype(nextValue.getClass());
BaseRuntimeChildDefinition targetChild = targetDef.getChildByName(elementName);
@@ -133,14 +135,15 @@ public class FhirTerser {
}
throw new DataFormatException("Type " + theTarget.getClass().getName() + " does not have a child with name " + elementName);
}
-
- BaseRuntimeElementDefinition> childDef = targetChild.getChildByName(elementName);
- IBase target = childDef.newInstance();
+
+ BaseRuntimeElementDefinition> element = myContext.getElementDefinition(nextValue.getClass());
+ IBase target = element.newInstance();
+
targetChild.getMutator().addValue(theTarget, target);
cloneInto(nextValue, target, theIgnoreMissingFields);
}
- }
-
+
+ return theTarget;
}
/**
@@ -153,11 +156,9 @@ public class FhirTerser {
* Note on scope: This method will descend into any contained resources ({@link IResource#getContained()}) as well, but will not descend into linked resources (e.g.
* {@link BaseResourceReferenceDt#getResource()}) or embedded resources (e.g. Bundle.entry.resource)
*
- *
- * @param theResource
- * The resource instance to search. Must not be null.
- * @param theType
- * The type to search for. Must not be null.
+ *
+ * @param theResource The resource instance to search. Must not be null.
+ * @param theType The type to search for. Must not be null.
* @return Returns a list of all matching elements
*/
public List getAllPopulatedChildElementsOfType(IBaseResource theResource, final Class theType) {
@@ -274,7 +275,7 @@ public class FhirTerser {
.collect(Collectors.toList());
if (theAddExtension
- && (!(theCurrentObj instanceof IBaseExtension) || (extensionDts.isEmpty() && theSubList.size() == 1))) {
+ && (!(theCurrentObj instanceof IBaseExtension) || (extensionDts.isEmpty() && theSubList.size() == 1))) {
extensionDts.add(createEmptyExtensionDt((ISupportsUndeclaredExtensions) theCurrentObj, extensionUrl));
}
@@ -286,7 +287,7 @@ public class FhirTerser {
extensionDts = ((IBaseExtension) theCurrentObj).getExtension();
if (theAddExtension
- && (extensionDts.isEmpty() && theSubList.size() == 1)) {
+ && (extensionDts.isEmpty() && theSubList.size() == 1)) {
extensionDts.add(createEmptyExtensionDt((IBaseExtension) theCurrentObj, extensionUrl));
}
@@ -311,7 +312,7 @@ public class FhirTerser {
.collect(Collectors.toList());
if (theAddExtension
- && (!(theCurrentObj instanceof IBaseExtension) || (extensions.isEmpty() && theSubList.size() == 1))) {
+ && (!(theCurrentObj instanceof IBaseExtension) || (extensions.isEmpty() && theSubList.size() == 1))) {
extensions.add(createEmptyExtension((IBaseHasExtensions) theCurrentObj, extensionUrl));
}
@@ -396,7 +397,7 @@ public class FhirTerser {
.collect(Collectors.toList());
if (theAddExtension
- && (!(theCurrentObj instanceof IBaseExtension) || (extensions.isEmpty() && theSubList.size() == 1))) {
+ && (!(theCurrentObj instanceof IBaseExtension) || (extensions.isEmpty() && theSubList.size() == 1))) {
extensions.add(createEmptyModifierExtension((IBaseHasModifierExtensions) theCurrentObj, extensionUrl));
}
@@ -478,7 +479,7 @@ public class FhirTerser {
* type {@link Object}.
*
* @param theResource The resource instance to be accessed. Must not be null.
- * @param thePath The path for the element to be accessed.
+ * @param thePath The path for the element to be accessed.
* @return A list of values of type {@link Object}.
*/
public List getValues(IBaseResource theResource, String thePath) {
@@ -492,8 +493,8 @@ public class FhirTerser {
* type {@link Object}.
*
* @param theResource The resource instance to be accessed. Must not be null.
- * @param thePath The path for the element to be accessed.
- * @param theCreate When set to true
, the terser will create a null-valued element where none exists.
+ * @param thePath The path for the element to be accessed.
+ * @param theCreate When set to true
, the terser will create a null-valued element where none exists.
* @return A list of values of type {@link Object}.
*/
public List getValues(IBaseResource theResource, String thePath, boolean theCreate) {
@@ -506,9 +507,9 @@ public class FhirTerser {
* Returns values stored in an element identified by its path. The list of values is of
* type {@link Object}.
*
- * @param theResource The resource instance to be accessed. Must not be null.
- * @param thePath The path for the element to be accessed.
- * @param theCreate When set to true
, the terser will create a null-valued element where none exists.
+ * @param theResource The resource instance to be accessed. Must not be null.
+ * @param thePath The path for the element to be accessed.
+ * @param theCreate When set to true
, the terser will create a null-valued element where none exists.
* @param theAddExtension When set to true
, the terser will add a null-valued extension where one or more such extensions already exist.
* @return A list of values of type {@link Object}.
*/
@@ -522,10 +523,10 @@ public class FhirTerser {
* Returns values stored in an element identified by its path. The list of values is of
* type theWantedClass
.
*
- * @param theResource The resource instance to be accessed. Must not be null.
- * @param thePath The path for the element to be accessed.
+ * @param theResource The resource instance to be accessed. Must not be null.
+ * @param thePath The path for the element to be accessed.
* @param theWantedClass The desired class to be returned in a list.
- * @param Type declared by theWantedClass
+ * @param Type declared by theWantedClass
* @return A list of values of type theWantedClass
.
*/
public List getValues(IBaseResource theResource, String thePath, Class theWantedClass) {
@@ -538,11 +539,11 @@ public class FhirTerser {
* Returns values stored in an element identified by its path. The list of values is of
* type theWantedClass
.
*
- * @param theResource The resource instance to be accessed. Must not be null.
- * @param thePath The path for the element to be accessed.
+ * @param theResource The resource instance to be accessed. Must not be null.
+ * @param thePath The path for the element to be accessed.
* @param theWantedClass The desired class to be returned in a list.
- * @param theCreate When set to true
, the terser will create a null-valued element where none exists.
- * @param Type declared by theWantedClass
+ * @param theCreate When set to true
, the terser will create a null-valued element where none exists.
+ * @param Type declared by theWantedClass
* @return A list of values of type theWantedClass
.
*/
public List getValues(IBaseResource theResource, String thePath, Class theWantedClass, boolean theCreate) {
@@ -555,12 +556,12 @@ public class FhirTerser {
* Returns values stored in an element identified by its path. The list of values is of
* type theWantedClass
.
*
- * @param theResource The resource instance to be accessed. Must not be null.
- * @param thePath The path for the element to be accessed.
- * @param theWantedClass The desired class to be returned in a list.
- * @param theCreate When set to true
, the terser will create a null-valued element where none exists.
+ * @param theResource The resource instance to be accessed. Must not be null.
+ * @param thePath The path for the element to be accessed.
+ * @param theWantedClass The desired class to be returned in a list.
+ * @param theCreate When set to true
, the terser will create a null-valued element where none exists.
* @param theAddExtension When set to true
, the terser will add a null-valued extension where one or more such extensions already exist.
- * @param Type declared by theWantedClass
+ * @param Type declared by theWantedClass
* @return A list of values of type theWantedClass
.
*/
public List getValues(IBaseResource theResource, String thePath, Class theWantedClass, boolean theCreate, boolean theAddExtension) {
@@ -605,10 +606,10 @@ public class FhirTerser {
/**
* Returns true
if theSource
is in the compartment named theCompartmentName
* belonging to resource theTarget
- *
+ *
* @param theCompartmentName The name of the compartment
- * @param theSource The potential member of the compartment
- * @param theTarget The owner of the compartment. Note that both the resource type and ID must be filled in on this IIdType or the method will throw an {@link IllegalArgumentException}
+ * @param theSource The potential member of the compartment
+ * @param theTarget The owner of the compartment. Note that both the resource type and ID must be filled in on this IIdType or the method will throw an {@link IllegalArgumentException}
* @return true
if theSource
is in the compartment
* @throws IllegalArgumentException If theTarget does not contain both a resource type and ID
*/
@@ -618,16 +619,16 @@ public class FhirTerser {
Validate.notNull(theTarget, "theTarget must not be null");
Validate.notBlank(defaultString(theTarget.getResourceType()), "theTarget must have a populated resource type (theTarget.getResourceType() does not return a value)");
Validate.notBlank(defaultString(theTarget.getIdPart()), "theTarget must have a populated ID (theTarget.getIdPart() does not return a value)");
-
+
String wantRef = theTarget.toUnqualifiedVersionless().getValue();
-
+
RuntimeResourceDefinition sourceDef = myContext.getResourceDefinition(theSource);
if (theSource.getIdElement().hasIdPart()) {
if (wantRef.equals(sourceDef.getName() + '/' + theSource.getIdElement().getIdPart())) {
return true;
}
}
-
+
List params = sourceDef.getSearchParamsForCompartmentName(theCompartmentName);
for (RuntimeSearchParam nextParam : params) {
for (String nextPath : nextParam.getPathsSplit()) {
@@ -679,12 +680,12 @@ public class FhirTerser {
}
}
}
-
+
return false;
}
private void visit(IBase theElement, BaseRuntimeChildDefinition theChildDefinition, BaseRuntimeElementDefinition> theDefinition, IModelVisitor2 theCallback, List theContainingElementPath,
- List theChildDefinitionPath, List> theElementDefinitionPath) {
+ List theChildDefinitionPath, List> theElementDefinitionPath) {
if (theChildDefinition != null) {
theChildDefinitionPath.add(theChildDefinition);
}
@@ -692,7 +693,7 @@ public class FhirTerser {
theElementDefinitionPath.add(theDefinition);
theCallback.acceptElement(theElement, Collections.unmodifiableList(theContainingElementPath), Collections.unmodifiableList(theChildDefinitionPath),
- Collections.unmodifiableList(theElementDefinitionPath));
+ Collections.unmodifiableList(theElementDefinitionPath));
/*
* Visit undeclared extensions
@@ -710,85 +711,85 @@ public class FhirTerser {
* Now visit the children of the given element
*/
switch (theDefinition.getChildType()) {
- case ID_DATATYPE:
- case PRIMITIVE_XHTML_HL7ORG:
- case PRIMITIVE_XHTML:
- case PRIMITIVE_DATATYPE:
- // These are primitive types, so we don't need to visit their children
- break;
- case RESOURCE:
- case RESOURCE_BLOCK:
- case COMPOSITE_DATATYPE: {
- BaseRuntimeElementCompositeDefinition> childDef = (BaseRuntimeElementCompositeDefinition>) theDefinition;
- for (BaseRuntimeChildDefinition nextChild : childDef.getChildrenAndExtension()) {
- List extends IBase> values = nextChild.getAccessor().getValues(theElement);
- if (values != null) {
- for (IBase nextValue : values) {
- if (nextValue == null) {
- continue;
- }
- if (nextValue.isEmpty()) {
- continue;
- }
- BaseRuntimeElementDefinition> childElementDef;
- childElementDef = nextChild.getChildElementDefinitionByDatatype(nextValue.getClass());
-
- if (childElementDef == null) {
- StringBuilder b = new StringBuilder();
- b.append("Found value of type[");
- b.append(nextValue.getClass().getSimpleName());
- b.append("] which is not valid for field[");
- b.append(nextChild.getElementName());
- b.append("] in ");
- b.append(childDef.getName());
- b.append(" - Valid types: ");
- for (Iterator iter = new TreeSet(nextChild.getValidChildNames()).iterator(); iter.hasNext();) {
- BaseRuntimeElementDefinition> childByName = nextChild.getChildByName(iter.next());
- b.append(childByName.getImplementingClass().getSimpleName());
- if (iter.hasNext()) {
- b.append(", ");
- }
+ case ID_DATATYPE:
+ case PRIMITIVE_XHTML_HL7ORG:
+ case PRIMITIVE_XHTML:
+ case PRIMITIVE_DATATYPE:
+ // These are primitive types, so we don't need to visit their children
+ break;
+ case RESOURCE:
+ case RESOURCE_BLOCK:
+ case COMPOSITE_DATATYPE: {
+ BaseRuntimeElementCompositeDefinition> childDef = (BaseRuntimeElementCompositeDefinition>) theDefinition;
+ for (BaseRuntimeChildDefinition nextChild : childDef.getChildrenAndExtension()) {
+ List extends IBase> values = nextChild.getAccessor().getValues(theElement);
+ if (values != null) {
+ for (IBase nextValue : values) {
+ if (nextValue == null) {
+ continue;
}
- throw new DataFormatException(b.toString());
- }
+ if (nextValue.isEmpty()) {
+ continue;
+ }
+ BaseRuntimeElementDefinition> childElementDef;
+ childElementDef = nextChild.getChildElementDefinitionByDatatype(nextValue.getClass());
- if (nextChild instanceof RuntimeChildDirectResource) {
- // Don't descend into embedded resources
- theContainingElementPath.add(nextValue);
- theChildDefinitionPath.add(nextChild);
- theElementDefinitionPath.add(myContext.getElementDefinition(nextValue.getClass()));
- theCallback.acceptElement(nextValue, Collections.unmodifiableList(theContainingElementPath), Collections.unmodifiableList(theChildDefinitionPath),
+ if (childElementDef == null) {
+ StringBuilder b = new StringBuilder();
+ b.append("Found value of type[");
+ b.append(nextValue.getClass().getSimpleName());
+ b.append("] which is not valid for field[");
+ b.append(nextChild.getElementName());
+ b.append("] in ");
+ b.append(childDef.getName());
+ b.append(" - Valid types: ");
+ for (Iterator iter = new TreeSet(nextChild.getValidChildNames()).iterator(); iter.hasNext(); ) {
+ BaseRuntimeElementDefinition> childByName = nextChild.getChildByName(iter.next());
+ b.append(childByName.getImplementingClass().getSimpleName());
+ if (iter.hasNext()) {
+ b.append(", ");
+ }
+ }
+ throw new DataFormatException(b.toString());
+ }
+
+ if (nextChild instanceof RuntimeChildDirectResource) {
+ // Don't descend into embedded resources
+ theContainingElementPath.add(nextValue);
+ theChildDefinitionPath.add(nextChild);
+ theElementDefinitionPath.add(myContext.getElementDefinition(nextValue.getClass()));
+ theCallback.acceptElement(nextValue, Collections.unmodifiableList(theContainingElementPath), Collections.unmodifiableList(theChildDefinitionPath),
Collections.unmodifiableList(theElementDefinitionPath));
- theChildDefinitionPath.remove(theChildDefinitionPath.size() - 1);
- theContainingElementPath.remove(theContainingElementPath.size() - 1);
- theElementDefinitionPath.remove(theElementDefinitionPath.size() - 1);
- } else {
- visit(nextValue, nextChild, childElementDef, theCallback, theContainingElementPath, theChildDefinitionPath, theElementDefinitionPath);
+ theChildDefinitionPath.remove(theChildDefinitionPath.size() - 1);
+ theContainingElementPath.remove(theContainingElementPath.size() - 1);
+ theElementDefinitionPath.remove(theElementDefinitionPath.size() - 1);
+ } else {
+ visit(nextValue, nextChild, childElementDef, theCallback, theContainingElementPath, theChildDefinitionPath, theElementDefinitionPath);
+ }
}
}
}
+ break;
}
- break;
- }
- case CONTAINED_RESOURCES: {
- BaseContainedDt value = (BaseContainedDt) theElement;
- for (IResource next : value.getContainedResources()) {
- BaseRuntimeElementCompositeDefinition> def = myContext.getResourceDefinition(next);
- visit(next, null, def, theCallback, theContainingElementPath, theChildDefinitionPath, theElementDefinitionPath);
+ case CONTAINED_RESOURCES: {
+ BaseContainedDt value = (BaseContainedDt) theElement;
+ for (IResource next : value.getContainedResources()) {
+ BaseRuntimeElementCompositeDefinition> def = myContext.getResourceDefinition(next);
+ visit(next, null, def, theCallback, theContainingElementPath, theChildDefinitionPath, theElementDefinitionPath);
+ }
+ break;
}
- break;
- }
- case EXTENSION_DECLARED:
- case UNDECL_EXT: {
- throw new IllegalStateException("state should not happen: " + theDefinition.getChildType());
- }
- case CONTAINED_RESOURCE_LIST: {
- if (theElement != null) {
- BaseRuntimeElementDefinition> def = myContext.getElementDefinition(theElement.getClass());
- visit(theElement, null, def, theCallback, theContainingElementPath, theChildDefinitionPath, theElementDefinitionPath);
+ case EXTENSION_DECLARED:
+ case UNDECL_EXT: {
+ throw new IllegalStateException("state should not happen: " + theDefinition.getChildType());
+ }
+ case CONTAINED_RESOURCE_LIST: {
+ if (theElement != null) {
+ BaseRuntimeElementDefinition> def = myContext.getElementDefinition(theElement.getClass());
+ visit(theElement, null, def, theCallback, theContainingElementPath, theChildDefinitionPath, theElementDefinitionPath);
+ }
+ break;
}
- break;
- }
}
if (theChildDefinition != null) {
@@ -800,16 +801,14 @@ public class FhirTerser {
/**
* Visit all elements in a given resource
- *
+ *
*
* Note on scope: This method will descend into any contained resources ({@link IResource#getContained()}) as well, but will not descend into linked resources (e.g.
* {@link BaseResourceReferenceDt#getResource()}) or embedded resources (e.g. Bundle.entry.resource)
*
- *
- * @param theResource
- * The resource to visit
- * @param theVisitor
- * The visitor
+ *
+ * @param theResource The resource to visit
+ * @param theVisitor The visitor
*/
public void visit(IBaseResource theResource, IModelVisitor theVisitor) {
BaseRuntimeElementCompositeDefinition> def = myContext.getResourceDefinition(theResource);
@@ -818,18 +817,16 @@ public class FhirTerser {
/**
* Visit all elements in a given resource
- *
+ *
* THIS ALTERNATE METHOD IS STILL EXPERIMENTAL
- *
+ *
*
* Note on scope: This method will descend into any contained resources ({@link IResource#getContained()}) as well, but will not descend into linked resources (e.g.
* {@link BaseResourceReferenceDt#getResource()}) or embedded resources (e.g. Bundle.entry.resource)
*
- *
- * @param theResource
- * The resource to visit
- * @param theVisitor
- * The visitor
+ *
+ * @param theResource The resource to visit
+ * @param theVisitor The visitor
*/
void visit(IBaseResource theResource, IModelVisitor2 theVisitor) {
BaseRuntimeElementCompositeDefinition> def = myContext.getResourceDefinition(theResource);
@@ -837,22 +834,22 @@ public class FhirTerser {
}
private void visit(IdentityHashMap theStack, IBaseResource theResource, IBase theElement, List thePathToElement, BaseRuntimeChildDefinition theChildDefinition,
- BaseRuntimeElementDefinition> theDefinition, IModelVisitor theCallback) {
+ BaseRuntimeElementDefinition> theDefinition, IModelVisitor theCallback) {
List pathToElement = addNameToList(thePathToElement, theChildDefinition);
if (theStack.put(theElement, theElement) != null) {
return;
}
-
+
theCallback.acceptElement(theResource, theElement, pathToElement, theChildDefinition, theDefinition);
BaseRuntimeElementDefinition> def = theDefinition;
if (def.getChildType() == ChildTypeEnum.CONTAINED_RESOURCE_LIST) {
def = myContext.getElementDefinition(theElement.getClass());
}
-
+
if (theElement instanceof IBaseReference) {
- IBaseResource target = ((IBaseReference)theElement).getResource();
+ IBaseResource target = ((IBaseReference) theElement).getResource();
if (target != null) {
if (target.getIdElement().hasIdPart() == false || target.getIdElement().isLocal()) {
RuntimeResourceDefinition targetDef = myContext.getResourceDefinition(target);
@@ -860,71 +857,71 @@ public class FhirTerser {
}
}
}
-
+
switch (def.getChildType()) {
- case ID_DATATYPE:
- case PRIMITIVE_XHTML_HL7ORG:
- case PRIMITIVE_XHTML:
- case PRIMITIVE_DATATYPE:
- // These are primitive types
- break;
- case RESOURCE:
- case RESOURCE_BLOCK:
- case COMPOSITE_DATATYPE: {
- BaseRuntimeElementCompositeDefinition> childDef = (BaseRuntimeElementCompositeDefinition>) def;
- for (BaseRuntimeChildDefinition nextChild : childDef.getChildrenAndExtension()) {
-
- List> values = nextChild.getAccessor().getValues(theElement);
- if (values != null) {
- for (Object nextValueObject : values) {
- IBase nextValue;
- try {
- nextValue = (IBase) nextValueObject;
- } catch (ClassCastException e) {
- String s = "Found instance of " + nextValueObject.getClass() + " - Did you set a field value to the incorrect type? Expected " + IBase.class.getName();
- throw new ClassCastException(s);
- }
- if (nextValue == null) {
- continue;
- }
- if (nextValue.isEmpty()) {
- continue;
- }
- BaseRuntimeElementDefinition> childElementDef;
- childElementDef = nextChild.getChildElementDefinitionByDatatype(nextValue.getClass());
+ case ID_DATATYPE:
+ case PRIMITIVE_XHTML_HL7ORG:
+ case PRIMITIVE_XHTML:
+ case PRIMITIVE_DATATYPE:
+ // These are primitive types
+ break;
+ case RESOURCE:
+ case RESOURCE_BLOCK:
+ case COMPOSITE_DATATYPE: {
+ BaseRuntimeElementCompositeDefinition> childDef = (BaseRuntimeElementCompositeDefinition>) def;
+ for (BaseRuntimeChildDefinition nextChild : childDef.getChildrenAndExtension()) {
- if (childElementDef == null) {
- childElementDef = myContext.getElementDefinition(nextValue.getClass());
- }
+ List> values = nextChild.getAccessor().getValues(theElement);
+ if (values != null) {
+ for (Object nextValueObject : values) {
+ IBase nextValue;
+ try {
+ nextValue = (IBase) nextValueObject;
+ } catch (ClassCastException e) {
+ String s = "Found instance of " + nextValueObject.getClass() + " - Did you set a field value to the incorrect type? Expected " + IBase.class.getName();
+ throw new ClassCastException(s);
+ }
+ if (nextValue == null) {
+ continue;
+ }
+ if (nextValue.isEmpty()) {
+ continue;
+ }
+ BaseRuntimeElementDefinition> childElementDef;
+ childElementDef = nextChild.getChildElementDefinitionByDatatype(nextValue.getClass());
- if (nextChild instanceof RuntimeChildDirectResource) {
- // Don't descend into embedded resources
- theCallback.acceptElement(theResource, nextValue, null, nextChild, childElementDef);
- } else {
- visit(theStack, theResource, nextValue, pathToElement, nextChild, childElementDef, theCallback);
+ if (childElementDef == null) {
+ childElementDef = myContext.getElementDefinition(nextValue.getClass());
+ }
+
+ if (nextChild instanceof RuntimeChildDirectResource) {
+ // Don't descend into embedded resources
+ theCallback.acceptElement(theResource, nextValue, null, nextChild, childElementDef);
+ } else {
+ visit(theStack, theResource, nextValue, pathToElement, nextChild, childElementDef, theCallback);
+ }
}
}
}
+ break;
}
- break;
- }
- case CONTAINED_RESOURCES: {
- BaseContainedDt value = (BaseContainedDt) theElement;
- for (IResource next : value.getContainedResources()) {
- def = myContext.getResourceDefinition(next);
- visit(theStack, next, next, pathToElement, null, def, theCallback);
+ case CONTAINED_RESOURCES: {
+ BaseContainedDt value = (BaseContainedDt) theElement;
+ for (IResource next : value.getContainedResources()) {
+ def = myContext.getResourceDefinition(next);
+ visit(theStack, next, next, pathToElement, null, def, theCallback);
+ }
+ break;
+ }
+ case CONTAINED_RESOURCE_LIST:
+ case EXTENSION_DECLARED:
+ case UNDECL_EXT: {
+ throw new IllegalStateException("state should not happen: " + def.getChildType());
}
- break;
}
- case CONTAINED_RESOURCE_LIST:
- case EXTENSION_DECLARED:
- case UNDECL_EXT: {
- throw new IllegalStateException("state should not happen: " + def.getChildType());
- }
- }
-
+
theStack.remove(theElement);
-
+
}
}
From 9906243d2db95bbbd2ac05f34b1e22bf05bd7ee0 Mon Sep 17 00:00:00 2001
From: James Agnew
Date: Thu, 1 Nov 2018 13:58:09 -0400
Subject: [PATCH 16/97] More migrator updates
---
.../uhn/fhir/jpa/migrate/DriverTypeEnum.java | 13 +-
.../ca/uhn/fhir/jpa/migrate/JdbcUtils.java | 260 +++++++++---------
.../tasks/HapiFhirJpaMigrationTasks.java | 8 +-
3 files changed, 143 insertions(+), 138 deletions(-)
diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/DriverTypeEnum.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/DriverTypeEnum.java
index c3d8a8725ba..98e4cd3a5b7 100644
--- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/DriverTypeEnum.java
+++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/DriverTypeEnum.java
@@ -8,8 +8,6 @@ import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
-import org.springframework.jdbc.datasource.SimpleDriverDataSource;
-import org.springframework.jdbc.datasource.SingleConnectionDataSource;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.support.TransactionTemplate;
@@ -18,7 +16,6 @@ import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.Driver;
import java.sql.SQLException;
-import java.util.Properties;
/*-
* #%L
@@ -77,13 +74,17 @@ public enum DriverTypeEnum {
throw new InternalErrorException("Unable to find driver class: " + myDriverClassName, e);
}
- BasicDataSource dataSource = new BasicDataSource();
-// dataSource.setAutoCommit(false);
+ BasicDataSource dataSource = new BasicDataSource(){
+ @Override
+ public Connection getConnection() throws SQLException {
+ ourLog.info("Creating new DB connection");
+ return super.getConnection();
+ }
+ };
dataSource.setDriverClassName(myDriverClassName);
dataSource.setUrl(theUrl);
dataSource.setUsername(theUsername);
dataSource.setPassword(thePassword);
-// dataSource.setSuppressClose(true);
DataSourceTransactionManager transactionManager = new DataSourceTransactionManager();
transactionManager.setDataSource(dataSource);
diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/JdbcUtils.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/JdbcUtils.java
index 4a23cb61366..0b800dbb344 100644
--- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/JdbcUtils.java
+++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/JdbcUtils.java
@@ -45,56 +45,56 @@ public class JdbcUtils {
public static Set getIndexNames(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName) throws SQLException {
DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource());
- Connection connection = dataSource.getConnection();
- return theConnectionProperties.getTxTemplate().execute(t -> {
- DatabaseMetaData metadata;
- try {
- metadata = connection.getMetaData();
- ResultSet indexes = metadata.getIndexInfo(null, null, theTableName, false, true);
+ try (Connection connection = dataSource.getConnection()) {
+ return theConnectionProperties.getTxTemplate().execute(t -> {
+ DatabaseMetaData metadata;
+ try {
+ metadata = connection.getMetaData();
+ ResultSet indexes = metadata.getIndexInfo(null, null, theTableName, false, true);
- Set indexNames = new HashSet<>();
- while (indexes.next()) {
+ Set indexNames = new HashSet<>();
+ while (indexes.next()) {
- ourLog.debug("*** Next index: {}", new ColumnMapRowMapper().mapRow(indexes, 0));
+ ourLog.debug("*** Next index: {}", new ColumnMapRowMapper().mapRow(indexes, 0));
- String indexName = indexes.getString("INDEX_NAME");
- indexName = toUpperCase(indexName, Locale.US);
- indexNames.add(indexName);
+ String indexName = indexes.getString("INDEX_NAME");
+ indexName = toUpperCase(indexName, Locale.US);
+ indexNames.add(indexName);
+ }
+
+ return indexNames;
+ } catch (SQLException e) {
+ throw new InternalErrorException(e);
}
-
- return indexNames;
- } catch (SQLException e) {
- throw new InternalErrorException(e);
- }
- });
-
+ });
+ }
}
@SuppressWarnings("ConstantConditions")
public static boolean isIndexUnique(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName, String theIndexName) throws SQLException {
DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource());
- Connection connection = dataSource.getConnection();
- return theConnectionProperties.getTxTemplate().execute(t -> {
- DatabaseMetaData metadata;
- try {
- metadata = connection.getMetaData();
- ResultSet indexes = metadata.getIndexInfo(null, null, theTableName, false, false);
+ try (Connection connection = dataSource.getConnection()) {
+ return theConnectionProperties.getTxTemplate().execute(t -> {
+ DatabaseMetaData metadata;
+ try {
+ metadata = connection.getMetaData();
+ ResultSet indexes = metadata.getIndexInfo(null, null, theTableName, false, false);
- while (indexes.next()) {
- String indexName = indexes.getString("INDEX_NAME");
- if (theIndexName.equalsIgnoreCase(indexName)) {
- boolean nonUnique = indexes.getBoolean("NON_UNIQUE");
- return !nonUnique;
+ while (indexes.next()) {
+ String indexName = indexes.getString("INDEX_NAME");
+ if (theIndexName.equalsIgnoreCase(indexName)) {
+ boolean nonUnique = indexes.getBoolean("NON_UNIQUE");
+ return !nonUnique;
+ }
}
+
+ } catch (SQLException e) {
+ throw new InternalErrorException(e);
}
- } catch (SQLException e) {
- throw new InternalErrorException(e);
- }
-
- throw new InternalErrorException("Can't find index: " + theIndexName + " on table " + theTableName);
- });
-
+ throw new InternalErrorException("Can't find index: " + theIndexName + " on table " + theTableName);
+ });
+ }
}
/**
@@ -153,34 +153,35 @@ public class JdbcUtils {
*/
public static Set getForeignKeys(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName, String theForeignTable) throws SQLException {
DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource());
- Connection connection = dataSource.getConnection();
- return theConnectionProperties.getTxTemplate().execute(t -> {
- DatabaseMetaData metadata;
- try {
- metadata = connection.getMetaData();
- ResultSet indexes = metadata.getCrossReference(null, null, theTableName, null, null, theForeignTable);
+ try (Connection connection = dataSource.getConnection()) {
+ return theConnectionProperties.getTxTemplate().execute(t -> {
+ DatabaseMetaData metadata;
+ try {
+ metadata = connection.getMetaData();
+ ResultSet indexes = metadata.getCrossReference(null, null, theTableName, null, null, theForeignTable);
- Set columnNames = new HashSet<>();
- while (indexes.next()) {
- String tableName = toUpperCase(indexes.getString("PKTABLE_NAME"), Locale.US);
- if (!theTableName.equalsIgnoreCase(tableName)) {
- continue;
- }
- tableName = toUpperCase(indexes.getString("FKTABLE_NAME"), Locale.US);
- if (!theForeignTable.equalsIgnoreCase(tableName)) {
- continue;
+ Set columnNames = new HashSet<>();
+ while (indexes.next()) {
+ String tableName = toUpperCase(indexes.getString("PKTABLE_NAME"), Locale.US);
+ if (!theTableName.equalsIgnoreCase(tableName)) {
+ continue;
+ }
+ tableName = toUpperCase(indexes.getString("FKTABLE_NAME"), Locale.US);
+ if (!theForeignTable.equalsIgnoreCase(tableName)) {
+ continue;
+ }
+
+ String fkName = indexes.getString("FK_NAME");
+ fkName = toUpperCase(fkName, Locale.US);
+ columnNames.add(fkName);
}
- String fkName = indexes.getString("FK_NAME");
- fkName = toUpperCase(fkName, Locale.US);
- columnNames.add(fkName);
+ return columnNames;
+ } catch (SQLException e) {
+ throw new InternalErrorException(e);
}
-
- return columnNames;
- } catch (SQLException e) {
- throw new InternalErrorException(e);
- }
- });
+ });
+ }
}
/**
@@ -188,95 +189,96 @@ public class JdbcUtils {
*/
public static Set getColumnNames(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName) throws SQLException {
DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource());
- Connection connection = dataSource.getConnection();
- return theConnectionProperties.getTxTemplate().execute(t -> {
- DatabaseMetaData metadata;
- try {
- metadata = connection.getMetaData();
- ResultSet indexes = metadata.getColumns(null, null, null, null);
+ try (Connection connection = dataSource.getConnection()) {
+ return theConnectionProperties.getTxTemplate().execute(t -> {
+ DatabaseMetaData metadata;
+ try {
+ metadata = connection.getMetaData();
+ ResultSet indexes = metadata.getColumns(null, null, null, null);
- Set columnNames = new HashSet<>();
- while (indexes.next()) {
- String tableName = toUpperCase(indexes.getString("TABLE_NAME"), Locale.US);
- if (!theTableName.equalsIgnoreCase(tableName)) {
- continue;
+ Set columnNames = new HashSet<>();
+ while (indexes.next()) {
+ String tableName = toUpperCase(indexes.getString("TABLE_NAME"), Locale.US);
+ if (!theTableName.equalsIgnoreCase(tableName)) {
+ continue;
+ }
+
+ String columnName = indexes.getString("COLUMN_NAME");
+ columnName = toUpperCase(columnName, Locale.US);
+ columnNames.add(columnName);
}
- String columnName = indexes.getString("COLUMN_NAME");
- columnName = toUpperCase(columnName, Locale.US);
- columnNames.add(columnName);
+ return columnNames;
+ } catch (SQLException e) {
+ throw new InternalErrorException(e);
}
-
- return columnNames;
- } catch (SQLException e) {
- throw new InternalErrorException(e);
- }
- });
-
+ });
+ }
}
public static Set getTableNames(DriverTypeEnum.ConnectionProperties theConnectionProperties) throws SQLException {
DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource());
- Connection connection = dataSource.getConnection();
- return theConnectionProperties.getTxTemplate().execute(t -> {
- DatabaseMetaData metadata;
- try {
- metadata = connection.getMetaData();
- ResultSet tables = metadata.getTables(null, null, null, null);
+ try (Connection connection = dataSource.getConnection()) {
+ return theConnectionProperties.getTxTemplate().execute(t -> {
+ DatabaseMetaData metadata;
+ try {
+ metadata = connection.getMetaData();
+ ResultSet tables = metadata.getTables(null, null, null, null);
- Set columnNames = new HashSet<>();
- while (tables.next()) {
- String tableName = tables.getString("TABLE_NAME");
- tableName = toUpperCase(tableName, Locale.US);
+ Set columnNames = new HashSet<>();
+ while (tables.next()) {
+ String tableName = tables.getString("TABLE_NAME");
+ tableName = toUpperCase(tableName, Locale.US);
- String tableType = tables.getString("TABLE_TYPE");
- if ("SYSTEM TABLE".equalsIgnoreCase(tableType)) {
- continue;
+ String tableType = tables.getString("TABLE_TYPE");
+ if ("SYSTEM TABLE".equalsIgnoreCase(tableType)) {
+ continue;
+ }
+
+ columnNames.add(tableName);
}
- columnNames.add(tableName);
+ return columnNames;
+ } catch (SQLException e) {
+ throw new InternalErrorException(e);
}
-
- return columnNames;
- } catch (SQLException e) {
- throw new InternalErrorException(e);
- }
- });
+ });
+ }
}
public static boolean isColumnNullable(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName, String theColumnName) throws SQLException {
DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource());
- Connection connection = dataSource.getConnection();
- //noinspection ConstantConditions
- return theConnectionProperties.getTxTemplate().execute(t -> {
- DatabaseMetaData metadata;
- try {
- metadata = connection.getMetaData();
- ResultSet tables = metadata.getColumns(null, null, null, null);
+ try (Connection connection = dataSource.getConnection()) {
+ //noinspection ConstantConditions
+ return theConnectionProperties.getTxTemplate().execute(t -> {
+ DatabaseMetaData metadata;
+ try {
+ metadata = connection.getMetaData();
+ ResultSet tables = metadata.getColumns(null, null, null, null);
- while (tables.next()) {
- String tableName = toUpperCase(tables.getString("TABLE_NAME"), Locale.US);
- if (!theTableName.equalsIgnoreCase(tableName)) {
- continue;
- }
+ while (tables.next()) {
+ String tableName = toUpperCase(tables.getString("TABLE_NAME"), Locale.US);
+ if (!theTableName.equalsIgnoreCase(tableName)) {
+ continue;
+ }
- if (theColumnName.equalsIgnoreCase(tables.getString("COLUMN_NAME"))) {
- String nullable = tables.getString("IS_NULLABLE");
- if ("YES".equalsIgnoreCase(nullable)) {
- return true;
- } else if ("NO".equalsIgnoreCase(nullable)) {
- return false;
- } else {
- throw new IllegalStateException("Unknown nullable: " + nullable);
+ if (theColumnName.equalsIgnoreCase(tables.getString("COLUMN_NAME"))) {
+ String nullable = tables.getString("IS_NULLABLE");
+ if ("YES".equalsIgnoreCase(nullable)) {
+ return true;
+ } else if ("NO".equalsIgnoreCase(nullable)) {
+ return false;
+ } else {
+ throw new IllegalStateException("Unknown nullable: " + nullable);
+ }
}
}
+
+ throw new IllegalStateException("Did not find column " + theColumnName);
+ } catch (SQLException e) {
+ throw new InternalErrorException(e);
}
-
- throw new IllegalStateException("Did not find column " + theColumnName);
- } catch (SQLException e) {
- throw new InternalErrorException(e);
- }
- });
-
+ });
+ }
}
}
diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
index c02246a3a14..1c1beac3280 100644
--- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
+++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
@@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.migrate.tasks;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -277,7 +277,6 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
Builder.BuilderWithTableName spp = version.onTable("HFJ_RES_PARAM_PRESENT");
version.startSectionWithMessage("Starting work on table: " + spp.getTableName());
spp.dropIndex("IDX_RESPARMPRESENT_SPID_RESID");
- spp.dropColumn("SP_ID");
spp
.addColumn("HASH_PRESENCE")
.nullable()
@@ -307,6 +306,9 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
});
version.addTask(consolidateSearchParamPresenceIndexesTask);
+ // SP_ID is no longer needed
+ spp.dropColumn("SP_ID");
+
// Concept
Builder.BuilderWithTableName trmConcept = version.onTable("TRM_CONCEPT");
version.startSectionWithMessage("Starting work on table: " + trmConcept.getTableName());
From 8c2d868f16725db52454673a5b12b852a856fa36 Mon Sep 17 00:00:00 2001
From: jamesagnew
Date: Thu, 1 Nov 2018 16:13:34 -0400
Subject: [PATCH 17/97] License updates
---
hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTerser.java | 4 ++--
.../uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java | 4 ++--
2 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTerser.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTerser.java
index 7fa863b524d..ec8f590d723 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTerser.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTerser.java
@@ -28,9 +28,9 @@ import static org.apache.commons.lang3.StringUtils.*;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
index 1c1beac3280..ef93f2e8f02 100644
--- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
+++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
@@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.migrate.tasks;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
From b1283791caa74fd7c6cf88ea3064a7f3f793cfb8 Mon Sep 17 00:00:00 2001
From: James Agnew
Date: Fri, 2 Nov 2018 16:45:21 -0400
Subject: [PATCH 18/97] Allow JPA server to restore resources and link to them
in a single transaction
---
.../fhir/jpa/dao/BaseHapiFhirResourceDao.java | 13 +
.../jpa/provider/SystemProviderDstu2Test.java | 158 +++++--
.../jpa/provider/r4/SystemProviderR4Test.java | 61 ++-
.../resources/dstu2/createdeletebundle.json | 438 ++++++++++++++++++
.../test/resources/r4/createdeletebundle.json | 37 ++
src/changes/changes.xml | 5 +
6 files changed, 658 insertions(+), 54 deletions(-)
create mode 100644 hapi-fhir-jpaserver-base/src/test/resources/dstu2/createdeletebundle.json
create mode 100644 hapi-fhir-jpaserver-base/src/test/resources/r4/createdeletebundle.json
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
index d5cc0ce599c..7d7e65ca449 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
@@ -1253,6 +1253,19 @@ public abstract class BaseHapiFhirResourceDao extends B
IBaseResource oldResource = toResource(entity, false);
+ /*
+ * Mark the entity as not deleted - This is also done in the actual updateInternal()
+ * method later on so it usually doesn't matter whether we do it here, but in the
+ * case of a transaction with multiple PUTs we don't get there until later so
+ * having this here means that a transaction can have a reference in one
+ * resource to another resource in the same transaction that is being
+ * un-deleted by the transaction. Wacky use case, sure. But it's real.
+ *
+ * See SystemProviderR4Test#testTransactionReSavesPreviouslyDeletedResources
+ * for a test that needs this.
+ */
+ entity.setDeleted(null);
+
/*
* If we aren't indexing, that means we're doing this inside a transaction.
* The transaction will do the actual storage to the database a bit later on,
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/SystemProviderDstu2Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/SystemProviderDstu2Test.java
index 99340a794c3..ae52fbdfcd3 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/SystemProviderDstu2Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/SystemProviderDstu2Test.java
@@ -1,12 +1,25 @@
package ca.uhn.fhir.jpa.provider;
-import static org.hamcrest.Matchers.*;
-import static org.junit.Assert.*;
-
-import java.io.InputStream;
-import java.nio.charset.StandardCharsets;
-import java.util.concurrent.TimeUnit;
-
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.jpa.dao.dstu2.BaseJpaDstu2Test;
+import ca.uhn.fhir.jpa.rp.dstu2.*;
+import ca.uhn.fhir.jpa.testutil.RandomServerPortProvider;
+import ca.uhn.fhir.model.dstu2.resource.*;
+import ca.uhn.fhir.model.dstu2.valueset.BundleTypeEnum;
+import ca.uhn.fhir.model.dstu2.valueset.HTTPVerbEnum;
+import ca.uhn.fhir.model.primitive.DecimalDt;
+import ca.uhn.fhir.model.primitive.IdDt;
+import ca.uhn.fhir.model.primitive.StringDt;
+import ca.uhn.fhir.rest.api.EncodingEnum;
+import ca.uhn.fhir.rest.client.api.IGenericClient;
+import ca.uhn.fhir.rest.server.FifoMemoryPagingProvider;
+import ca.uhn.fhir.rest.server.RestfulServer;
+import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
+import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
+import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
+import ca.uhn.fhir.util.BundleUtil;
+import ca.uhn.fhir.util.TestUtil;
+import com.google.common.base.Charsets;
import org.apache.commons.io.IOUtils;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
@@ -17,46 +30,32 @@ import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.hl7.fhir.instance.model.api.IIdType;
+import org.hl7.fhir.r4.model.IdType;
import org.junit.AfterClass;
import org.junit.Before;
-import org.junit.Test;
import org.junit.Ignore;
+import org.junit.Test;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
-import ca.uhn.fhir.context.FhirContext;
-import ca.uhn.fhir.jpa.dao.dstu2.BaseJpaDstu2Test;
-import ca.uhn.fhir.jpa.rp.dstu2.*;
-import ca.uhn.fhir.jpa.testutil.RandomServerPortProvider;
-import ca.uhn.fhir.model.dstu2.resource.*;
-import ca.uhn.fhir.model.dstu2.valueset.BundleTypeEnum;
-import ca.uhn.fhir.model.dstu2.valueset.HTTPVerbEnum;
-import ca.uhn.fhir.model.primitive.*;
-import ca.uhn.fhir.rest.api.EncodingEnum;
-import ca.uhn.fhir.rest.client.api.IGenericClient;
-import ca.uhn.fhir.rest.server.FifoMemoryPagingProvider;
-import ca.uhn.fhir.rest.server.RestfulServer;
-import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
-import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
-import ca.uhn.fhir.util.TestUtil;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
+import java.util.concurrent.TimeUnit;
+
+import static org.hamcrest.Matchers.*;
+import static org.junit.Assert.*;
public class SystemProviderDstu2Test extends BaseJpaDstu2Test {
+ private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SystemProviderDstu2Test.class);
private static RestfulServer myRestServer;
private static IGenericClient ourClient;
private static FhirContext ourCtx;
private static CloseableHttpClient ourHttpClient;
- private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SystemProviderDstu2Test.class);
private static Server ourServer;
private static String ourServerBase;
- @AfterClass
- public static void afterClassClearContext() throws Exception {
- ourServer.stop();
- TestUtil.clearAllStaticFieldsForUnitTest();
- }
-
-
@Before
public void beforeStartServer() throws Exception {
if (myRestServer == null) {
@@ -72,9 +71,23 @@ public class SystemProviderDstu2Test extends BaseJpaDstu2Test {
OrganizationResourceProvider organizationRp = new OrganizationResourceProvider();
organizationRp.setDao(myOrganizationDao);
+ LocationResourceProvider locationRp = new LocationResourceProvider();
+ locationRp.setDao(myLocationDao);
+
+ BinaryResourceProvider binaryRp = new BinaryResourceProvider();
+ binaryRp.setDao(myBinaryDao);
+
+ DiagnosticReportResourceProvider diagnosticReportRp = new DiagnosticReportResourceProvider();
+ diagnosticReportRp.setDao(myDiagnosticReportDao);
+ DiagnosticOrderResourceProvider diagnosticOrderRp = new DiagnosticOrderResourceProvider();
+ diagnosticOrderRp.setDao(myDiagnosticOrderDao);
+ PractitionerResourceProvider practitionerRp = new PractitionerResourceProvider();
+ practitionerRp.setDao(myPractitionerDao);
+
+
RestfulServer restServer = new RestfulServer(ourCtx);
restServer.setPagingProvider(new FifoMemoryPagingProvider(10).setDefaultPageSize(10));
- restServer.setResourceProviders(patientRp, questionnaireRp, observationRp, organizationRp);
+ restServer.setResourceProviders(patientRp, questionnaireRp, observationRp, organizationRp, binaryRp, locationRp, diagnosticReportRp, diagnosticOrderRp, practitionerRp);
restServer.setPlainProviders(mySystemProvider);
@@ -157,10 +170,10 @@ public class SystemProviderDstu2Test extends BaseJpaDstu2Test {
ourLog.info(response);
assertThat(response, not(containsString("_format")));
assertEquals(200, http.getStatusLine().getStatusCode());
-
+
Bundle responseBundle = ourCtx.newXmlParser().parseResource(Bundle.class, response);
assertEquals(BundleTypeEnum.SEARCH_RESULTS, responseBundle.getTypeElement().getValueAsEnum());
-
+
} finally {
http.close();
}
@@ -179,10 +192,10 @@ public class SystemProviderDstu2Test extends BaseJpaDstu2Test {
}
}
- @Transactional(propagation=Propagation.NEVER)
+ @Transactional(propagation = Propagation.NEVER)
@Test
public void testSuggestKeywords() throws Exception {
-
+
Patient patient = new Patient();
patient.addName().addFamily("testSuggest");
IIdType ptId = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
@@ -197,21 +210,21 @@ public class SystemProviderDstu2Test extends BaseJpaDstu2Test {
obs.getSubject().setReference(ptId);
obs.getCode().setText("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL");
myObservationDao.update(obs, mySrd);
-
+
HttpGet get = new HttpGet(ourServerBase + "/$suggest-keywords?context=Patient/" + ptId.getIdPart() + "/$everything&searchParam=_content&text=zxc&_pretty=true&_format=xml");
CloseableHttpResponse http = ourHttpClient.execute(get);
try {
assertEquals(200, http.getStatusLine().getStatusCode());
String output = IOUtils.toString(http.getEntity().getContent(), StandardCharsets.UTF_8);
ourLog.info(output);
-
+
Parameters parameters = ourCtx.newXmlParser().parseResource(Parameters.class, output);
assertEquals(2, parameters.getParameter().size());
assertEquals("keyword", parameters.getParameter().get(0).getPart().get(0).getName());
assertEquals(new StringDt("ZXCVBNM"), parameters.getParameter().get(0).getPart().get(0).getValue());
assertEquals("score", parameters.getParameter().get(0).getPart().get(1).getName());
assertEquals(new DecimalDt("1.0"), parameters.getParameter().get(0).getPart().get(1).getValue());
-
+
} finally {
http.close();
}
@@ -227,7 +240,7 @@ public class SystemProviderDstu2Test extends BaseJpaDstu2Test {
obs.getSubject().setReference(ptId);
obs.getCode().setText("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL");
myObservationDao.create(obs, mySrd);
-
+
HttpGet get = new HttpGet(ourServerBase + "/$suggest-keywords");
CloseableHttpResponse http = ourHttpClient.execute(get);
try {
@@ -238,7 +251,7 @@ public class SystemProviderDstu2Test extends BaseJpaDstu2Test {
} finally {
http.close();
}
-
+
get = new HttpGet(ourServerBase + "/$suggest-keywords?context=Patient/" + ptId.getIdPart() + "/$everything");
http = ourHttpClient.execute(get);
try {
@@ -269,6 +282,44 @@ public class SystemProviderDstu2Test extends BaseJpaDstu2Test {
assertEquals("get-resource-counts", op.getCode());
}
+ @Test
+ public void testTransactionReSavesPreviouslyDeletedResources() throws IOException {
+
+ for (int i = 0; i < 10; i++) {
+ ourLog.info("** Beginning pass {}", i);
+
+ Bundle input = myFhirCtx.newJsonParser().parseResource(Bundle.class, IOUtils.toString(getClass().getResourceAsStream("/dstu2/createdeletebundle.json"), Charsets.UTF_8));
+ ourClient.transaction().withBundle(input).execute();
+
+ myPatientDao.read(new IdType("Patient/Patient1063259"));
+
+ deleteAllOfType("Binary");
+ deleteAllOfType("Location");
+ deleteAllOfType("DiagnosticReport");
+ deleteAllOfType("Observation");
+ deleteAllOfType("DiagnosticOrder");
+ deleteAllOfType("Practitioner");
+ deleteAllOfType("Patient");
+ deleteAllOfType("Organization");
+
+ try {
+ myPatientDao.read(new IdType("Patient/Patient1063259"));
+ fail();
+ } catch (ResourceGoneException e) {
+ // good
+ }
+
+ }
+
+ }
+
+ private void deleteAllOfType(String theType) {
+ BundleUtil.toListOfResources(myFhirCtx, ourClient.search().forResource(theType).execute())
+ .forEach(t -> {
+ ourClient.delete().resourceById(t.getIdElement()).execute();
+ });
+ }
+
@Test
public void testTransactionFromBundle() throws Exception {
InputStream bundleRes = SystemProviderDstu2Test.class.getResourceAsStream("/transaction_link_patient_eve.xml");
@@ -372,20 +423,20 @@ public class SystemProviderDstu2Test extends BaseJpaDstu2Test {
@Test
public void testTransactionSearch() throws Exception {
- for (int i = 0; i < 20; i ++) {
+ for (int i = 0; i < 20; i++) {
Patient p = new Patient();
p.addName().addFamily("PATIENT_" + i);
myPatientDao.create(p, mySrd);
}
-
+
Bundle req = new Bundle();
req.setType(BundleTypeEnum.TRANSACTION);
req.addEntry().getRequest().setMethod(HTTPVerbEnum.GET).setUrl("Patient?");
Bundle resp = ourClient.transaction().withBundle(req).execute();
ourLog.info(ourCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(resp));
-
+
assertEquals(1, resp.getEntry().size());
- Bundle respSub = (Bundle)resp.getEntry().get(0).getResource();
+ Bundle respSub = (Bundle) resp.getEntry().get(0).getResource();
assertEquals("self", respSub.getLink().get(0).getRelation());
assertEquals(ourServerBase + "/Patient", respSub.getLink().get(0).getUrl());
assertEquals("next", respSub.getLink().get(1).getRelation());
@@ -396,20 +447,20 @@ public class SystemProviderDstu2Test extends BaseJpaDstu2Test {
@Test
public void testTransactionCount() throws Exception {
- for (int i = 0; i < 20; i ++) {
+ for (int i = 0; i < 20; i++) {
Patient p = new Patient();
p.addName().addFamily("PATIENT_" + i);
myPatientDao.create(p, mySrd);
}
-
+
Bundle req = new Bundle();
req.setType(BundleTypeEnum.TRANSACTION);
req.addEntry().getRequest().setMethod(HTTPVerbEnum.GET).setUrl("Patient?_summary=count");
Bundle resp = ourClient.transaction().withBundle(req).execute();
ourLog.info(ourCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(resp));
-
+
assertEquals(1, resp.getEntry().size());
- Bundle respSub = (Bundle)resp.getEntry().get(0).getResource();
+ Bundle respSub = (Bundle) resp.getEntry().get(0).getResource();
assertEquals(20, respSub.getTotal().intValue());
assertEquals(0, respSub.getEntry().size());
}
@@ -423,9 +474,16 @@ public class SystemProviderDstu2Test extends BaseJpaDstu2Test {
ourLog.info(output);
assertEquals(200, http.getStatusLine().getStatusCode());
} finally {
- IOUtils.closeQuietly(http);;
+ IOUtils.closeQuietly(http);
+ ;
}
}
+ @AfterClass
+ public static void afterClassClearContext() throws Exception {
+ ourServer.stop();
+ TestUtil.clearAllStaticFieldsForUnitTest();
+ }
+
}
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/SystemProviderR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/SystemProviderR4Test.java
index 0a74eb50b8b..1c1b2903a0b 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/SystemProviderR4Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/SystemProviderR4Test.java
@@ -3,9 +3,7 @@ package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test;
-import ca.uhn.fhir.jpa.rp.r4.ObservationResourceProvider;
-import ca.uhn.fhir.jpa.rp.r4.OrganizationResourceProvider;
-import ca.uhn.fhir.jpa.rp.r4.PatientResourceProvider;
+import ca.uhn.fhir.jpa.rp.r4.*;
import ca.uhn.fhir.jpa.testutil.RandomServerPortProvider;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.EncodingEnum;
@@ -17,8 +15,10 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
+import ca.uhn.fhir.util.BundleUtil;
import ca.uhn.fhir.util.TestUtil;
import ca.uhn.fhir.validation.ResultSeverityEnum;
+import com.google.common.base.Charsets;
import org.apache.commons.io.IOUtils;
import org.apache.http.Header;
import org.apache.http.client.methods.CloseableHttpResponse;
@@ -42,6 +42,7 @@ import org.junit.*;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
+import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.concurrent.TimeUnit;
@@ -88,8 +89,21 @@ public class SystemProviderR4Test extends BaseJpaR4Test {
OrganizationResourceProvider organizationRp = new OrganizationResourceProvider();
organizationRp.setDao(myOrganizationDao);
+ LocationResourceProvider locationRp = new LocationResourceProvider();
+ locationRp.setDao(myLocationDao);
+
+ BinaryResourceProvider binaryRp = new BinaryResourceProvider();
+ binaryRp.setDao(myBinaryDao);
+
+ DiagnosticReportResourceProvider diagnosticReportRp = new DiagnosticReportResourceProvider();
+ diagnosticReportRp.setDao(myDiagnosticReportDao);
+ ServiceRequestResourceProvider diagnosticOrderRp = new ServiceRequestResourceProvider();
+ diagnosticOrderRp.setDao(myServiceRequestDao);
+ PractitionerResourceProvider practitionerRp = new PractitionerResourceProvider();
+ practitionerRp.setDao(myPractitionerDao);
+
RestfulServer restServer = new RestfulServer(ourCtx);
- restServer.setResourceProviders(patientRp, questionnaireRp, observationRp, organizationRp);
+ restServer.setResourceProviders(patientRp, questionnaireRp, observationRp, organizationRp, locationRp, binaryRp, diagnosticReportRp, diagnosticOrderRp, practitionerRp);
restServer.setPlainProviders(mySystemProvider);
@@ -385,6 +399,45 @@ public class SystemProviderR4Test extends BaseJpaR4Test {
assertEquals("201 Created", resp.getEntry().get(0).getResponse().getStatus());
}
+
+ @Test
+ public void testTransactionReSavesPreviouslyDeletedResources() throws IOException {
+
+ for (int i = 0; i < 10; i++) {
+ ourLog.info("** Beginning pass {}", i);
+
+ Bundle input = myFhirCtx.newJsonParser().parseResource(Bundle.class, IOUtils.toString(getClass().getResourceAsStream("/r4/createdeletebundle.json"), Charsets.UTF_8));
+ ourClient.transaction().withBundle(input).execute();
+
+ myPatientDao.read(new IdType("Patient/Patient1063259"));
+
+ deleteAllOfType("Binary");
+ deleteAllOfType("Location");
+ deleteAllOfType("DiagnosticReport");
+ deleteAllOfType("Observation");
+ deleteAllOfType("ServiceRequest");
+ deleteAllOfType("Practitioner");
+ deleteAllOfType("Patient");
+ deleteAllOfType("Organization");
+
+ try {
+ myPatientDao.read(new IdType("Patient/Patient1063259"));
+ fail();
+ } catch (ResourceGoneException e) {
+ // good
+ }
+
+ }
+
+ }
+
+ private void deleteAllOfType(String theType) {
+ BundleUtil.toListOfResources(myFhirCtx, ourClient.search().forResource(theType).execute())
+ .forEach(t -> {
+ ourClient.delete().resourceById(t.getIdElement()).execute();
+ });
+ }
+
@Test
public void testTransactionDeleteWithDuplicateDeletes() throws Exception {
myDaoConfig.setAllowInlineMatchUrlReferences(true);
diff --git a/hapi-fhir-jpaserver-base/src/test/resources/dstu2/createdeletebundle.json b/hapi-fhir-jpaserver-base/src/test/resources/dstu2/createdeletebundle.json
new file mode 100644
index 00000000000..acd21119b49
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/test/resources/dstu2/createdeletebundle.json
@@ -0,0 +1,438 @@
+ {
+ "resourceType": "Bundle",
+ "type": "transaction",
+ "entry": [
+ {
+ "fullUrl": "Organization/OrgSJHMC",
+ "resource": {
+ "resourceType": "Organization",
+ "id": "OrgSJHMC",
+ "identifier": [
+ {
+ "system": "http://www.foo.com/fhir/OrganizationIdentifier",
+ "value": "SJHMC"
+ }
+ ],
+ "name": "SJHMC"
+ },
+ "request": {
+ "method": "PUT",
+ "url": "Organization/OrgSJHMC"
+ }
+ },
+ {
+ "fullUrl": "Binary/BinaryQ4564699444",
+ "resource": {
+ "resourceType": "Binary",
+ "id": "BinaryQ4564699444",
+ "contentType": "text/plain",
+ "content": "MSH|^~\&|HNAM|SJHMC|HNAM|SJHMC|20160314110329||ORU^R01|Q4564699444|T|2.4||||||8859/1PID|1|963258|963258^^^SJHMC_MRN^MRN~1063259^^^AZ_EID||Boba^Fett||19711012|F||1|124 W THOMAS RD^^PHOENIX^AZ^85013||(602)666-5555|(000)000-0000|1|S|NON|18513341^^^SJHMC_FIN||||2|||0PV1|1|P|TOW8^8T22^01^SJHMC|R|||057539^Fgdeg^Ugngx^^^^^^SJHMC_ORG_DOCNUM^1699710046|057539^Fgdeg^Ugngx^^^^^^SJHMC_ORG_DOCNUM^1699710046||LTS||||RA|||057539^Fgdeg^Ugngx^^^^^^SJHMC_ORG_DOCNUM^1699710046|I||E|||||||||||||||||||SJHMC||A|||20160313090000ORC|REOBR|1|5674832^HNAM_ORDERID||SPATH^Surgical Pathology Report|||20160313103900|||||||||12345^TestG^ED Physician||||00010SP20160000336^HNA_ACCN~41673658^HNA_ACCNID||20160313105850||AP|F||1|||||&Rigpmga&Ntgaai-CC&&&Application Sys Analyst II - L||12345^TestG^ED PhysicianOBX|1|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||||||||F|||20160313105850OBX|2|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||||||||F|||20160313105850OBX|3|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||||||||F|||20160313105850OBX|4|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||||||||F|||20160313105850OBX|5|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||                              Surgical Pathology Report||||||F|||20160313105850OBX|6|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||||||||F|||20160313105850OBX|7|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     Collected Date/Time    Received Date/Time               Accession Number||||||F|||20160313105850OBX|8|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     03/13/2016 10:39:00    03/13/2016 10:52:50              10-SP-17-000336||||||F|||20160313105850OBX|9|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     MST                    MST||||||F|||20160313105850OBX|10|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||||||||F|||20160313105850OBX|11|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||                                      Diagnosis||||||F|||20160313105850OBX|12|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     1-3.  Lung, left upper lobe, CT guided biopsies with touch preparation:||||||F|||20160313105850OBX|13|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||       - Poorly differentiated non-small cell carcinoma, pending special stains||||||F|||20160313105850OBX|14|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||||||||F|||20160313105850OBX|15|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     Ntgaai-CC Rigpmga, Application Sys Analyst II - L||||||F|||20160313105850OBX|16|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     (Electronically signed)||||||F|||20160313105850OBX|17|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     Verified: 03/13/2016 10:58||||||F|||20160313105850OBX|18|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     NR /NR||||||F|||20160313105850OBX|19|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||||||||F|||20160313105850OBX|20|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||                                Clinical Information||||||F|||20160313105850OBX|21|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     Pre-op diagnosis: Transplant||||||F|||20160313105850OBX|22|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     Procedure: Biopsy||||||F|||20160313105850OBX|23|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     Post-op diagnosis: N/A||||||F|||20160313105850OBX|24|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     Clinical History: Lung transplant||||||F|||20160313105850OBX|25|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||||||||F|||20160313105850OBX|26|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||                                 Specimen Submitted||||||F|||20160313105850OBX|27|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     LUNG, TRNSBR BX||||||F|||20160313105850OBX|28|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||||||||F|||20160313105850OBX|29|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||                                  Gross Description||||||F|||20160313105850OBX|30|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     1. Received in formalin labeled with the patient's name, medical record||||||F|||20160313105850OBX|31|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     number and left upper lobe core biopsy, is a single red-tan, variegated,||||||F|||20160313105850OBX|32|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     friable soft tissue core, 0.9 cm.  The specimen is entirely submitted in||||||F|||20160313105850OBX|33|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     cassette 1A.||||||F|||20160313105850OBX|34|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||||||||F|||20160313105850OBX|35|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     2. Received in formalin labeled with the patient's name, medical record||||||F|||20160313105850OBX|36|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     number and left upper lobe core biopsy, are two pale gray, friable soft||||||F|||20160313105850OBX|37|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     tissue cores, 0.4, and 1.0 cm.  The specimen is entirely submitted in||||||F|||20160313105850OBX|38|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     cassette 2A.  A quick stain is prepared and examined.||||||F|||20160313105850OBX|39|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||||||||F|||20160313105850OBX|40|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     Quick Stain Interpretation: [JME]||||||F|||20160313105850OBX|41|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||       QS1: Positive.||||||F|||20160313105850OBX|42|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||||||||F|||20160313105850OBX|43|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     3.  Received in formalin labeled with the patient's name, medical record||||||F|||20160313105850OBX|44|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     number and left upper lobe core biopsy, is a single red-tan soft tissue||||||F|||20160313105850OBX|45|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     core, 0.5 cm.  The specimen is entirely submitted in cassette 3A.||||||F|||20160313105850OBX|46|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||||||||F|||20160313105850OBX|47|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||                               Microscopic Description||||||F|||20160313105850OBX|48|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     Microscopic examination performed on all histologic sections.And also found incidental lung nodule.||||||F|||20160313105850OBX|49|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||||||||F|||20160313105850OBX|50|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||||||||F|||20160313105850OBX|51|TX|SURGPATH^Surgical Pathology Report^^SPATH^Surgical Pathology Report||     ZZTEST, TRANSPLANT                     1516050(SJH)||||||F|||20160313105850"
+ },
+ "request": {
+ "method": "POST",
+ "url": "Binary"
+ }
+ },
+ {
+ "fullUrl": "Patient/Patient1063259",
+ "resource": {
+ "resourceType": "Patient",
+ "id": "Patient1063259",
+ "extension": [
+ {
+ "url": "http://www.foo.com/fhir/extensions/CurrentWorkFlow",
+ "valueString": "pulmonary"
+ }
+ ],
+ "identifier": [
+ {
+ "system": "http://www.foo.com/fhir/identifier-type/EnterpriseId",
+ "value": "1063259"
+ },
+ {
+ "type": {
+ "coding": [
+ {
+ "system": "http://www.foo.com/Patient/UnknownCode",
+ "code": "MRN",
+ "display": "MRN"
+ }
+ ]
+ },
+ "system": "http://www.foo.com/fhir/identifier-type/MR",
+ "value": "963258"
+ },
+ {
+ "type": {
+ "coding": [
+ {
+ "system": "http://www.foo.com/Patient/UnknownCode",
+ "code": "MRN",
+ "display": "MRN"
+ }
+ ]
+ },
+ "system": "http://www.foo.com/fhir/identifier-type/MRN",
+ "value": "963258"
+ },
+ {
+ "system": "http://www.foo.com/fhir/identifier-type/",
+ "value": "1063259"
+ },
+ {
+ "type": {
+ "coding": [
+ {
+ "system": "http://hl7.org/fhir/identifier-type",
+ "code": "AN",
+ "display": "Account number"
+ }
+ ]
+ },
+ "system": "http://www.foo.com/fhir/identifier-type/AN",
+ "value": "18513341"
+ }
+ ],
+ "name": [
+ {
+ "use": "usual",
+ "family": [
+ "Boba"
+ ],
+ "given": [
+ "Fett"
+ ]
+ }
+ ],
+ "telecom": [
+ {
+ "system": "phone",
+ "value": "(602)666-5555",
+ "use": "home"
+ },
+ {
+ "system": "phone",
+ "value": "(000)000-0000",
+ "use": "work"
+ }
+ ],
+ "gender": "female",
+ "birthDate": "1971-10-12",
+ "address": [
+ {
+ "line": [
+ "124 W THOMAS RD"
+ ],
+ "city": "PHOENIX",
+ "state": "AZ",
+ "postalCode": "85013"
+ }
+ ],
+ "maritalStatus": {
+ "coding": [
+ {
+ "system": "http://hl7.org/fhir/v3/MaritalStatus",
+ "code": "S",
+ "display": "Never Married"
+ }
+ ]
+ },
+ "multipleBirthInteger": 0,
+ "communication": [
+ {
+ "language": {
+ "coding": [
+ {
+ "code": "1"
+ }
+ ]
+ },
+ "preferred": true
+ }
+ ],
+ "active": false
+ },
+ "request": {
+ "method": "PUT",
+ "url": "Patient/Patient1063259"
+ }
+ },
+ {
+ "fullUrl": "Practitioner/Pract057539",
+ "resource": {
+ "resourceType": "Practitioner",
+ "id": "Pract057539",
+ "identifier": [
+ {
+ "use": "official",
+ "system": "http://www.foo.com/fhir/PractitionerIdentifier",
+ "value": "057539"
+ }
+ ],
+ "name": {
+ "family": [
+ "Fgdeg"
+ ],
+ "given": [
+ "Ugngx"
+ ]
+ },
+ "gender": "unknown",
+ "practitionerRole": [
+ {
+ "role": {
+ "coding": [
+ {
+ "system": "http://hl7.org/fhir/practitioner-role",
+ "code": "doctor",
+ "display": "Doctor"
+ }
+ ]
+ }
+ }
+ ],
+ "communication": [
+ {
+ "coding": [
+ {
+ "code": "1"
+ }
+ ]
+ }
+ ]
+ },
+ "request": {
+ "method": "PUT",
+ "url": "Practitioner/Pract057539"
+ }
+ },
+ {
+ "fullUrl": "Practitioner/Pract12345",
+ "resource": {
+ "resourceType": "Practitioner",
+ "id": "Pract12345",
+ "identifier": [
+ {
+ "use": "official",
+ "system": "http://www.foo.com/fhir/PractitionerIdentifier",
+ "value": "12345"
+ }
+ ],
+ "name": {
+ "family": [
+ "TestG"
+ ],
+ "given": [
+ "ED Physician"
+ ]
+ },
+ "gender": "unknown",
+ "practitionerRole": [
+ {
+ "role": {
+ "coding": [
+ {
+ "system": "http://hl7.org/fhir/practitioner-role",
+ "code": "doctor",
+ "display": "Doctor"
+ }
+ ]
+ }
+ }
+ ],
+ "communication": [
+ {
+ "coding": [
+ {
+ "code": "1"
+ }
+ ]
+ }
+ ]
+ },
+ "request": {
+ "method": "PUT",
+ "url": "Practitioner/Pract12345"
+ }
+ },
+ {
+ "fullUrl": "Location/LocTOW8.8T22.01",
+ "resource": {
+ "resourceType": "Location",
+ "id": "LocTOW8.8T22.01",
+ "identifier": [
+ {
+ "system": "http://www.foo.com/fhir/LocationIdentifier",
+ "value": "TOW8.8T22.01"
+ }
+ ],
+ "name": "SJHMC"
+ },
+ "request": {
+ "method": "PUT",
+ "url": "Location/LocTOW8.8T22.01"
+ }
+ },
+ {
+ "fullUrl": "Observation/ObxSURGPATH0",
+ "resource": {
+ "resourceType": "Observation",
+ "id": "ObxSURGPATH0",
+ "identifier": [
+ {
+ "type": {
+ "coding": [
+ {
+ "system": "http://www.foo.com/fhir/",
+ "code": "LOOKUP",
+ "display": "LOOKUP"
+ }
+ ]
+ },
+ "value": "_SURGPATH"
+ }
+ ],
+ "status": "final",
+ "code": {
+ "coding": [
+ {
+ "system": "http://www.foo.com/Observation/UnknownCode",
+ "code": "SURGPATH",
+ "display": "SURGPATH"
+ }
+ ]
+ },
+ "subject": {
+ "reference": "Patient/Patient1063259",
+ "display": "Boba Fett "
+ },
+ "effectiveDateTime": "2016-03-13T15:58:50Z",
+ "issued": "2016-03-13T15:58:50Z",
+ "performer": [
+ {
+ "reference": "Practitioner/Pract12345"
+ }
+ ],
+ "valueString": "\\\\n\\\\n\\\\n\\\\n Surgical Pathology Report\\\\n\\\\n Collected Date/Time Received Date/Time Accession Number\\\\n 03/13/2016 10:39:00 03/13/2016 10:52:50 10-SP-17-000336\\\\n MST MST\\\\n\\\\n Diagnosis\\\\n 1-3. Lung, left upper lobe, CT guided biopsies with touch preparation:\\\\n - Poorly differentiated non-small cell carcinoma, pending special stains\\\\n\\\\n Ntgaai-CC Rigpmga, Application Sys Analyst II - L\\\\n (Electronically signed)\\\\n Verified: 03/13/2016 10:58\\\\n NR /NR\\\\n\\\\n Clinical Information\\\\n Pre-op diagnosis: Transplant\\\\n Procedure: Biopsy\\\\n Post-op diagnosis: N/A\\\\n Clinical History: Lung transplant\\\\n\\\\n Specimen Submitted\\\\n LUNG, TRNSBR BX\\\\n\\\\n Gross Description\\\\n 1. Received in formalin labeled with the patient's name, medical record\\\\n number and left upper lobe core biopsy, is a single red-tan, variegated,\\\\n friable soft tissue core, 0.9 cm. The specimen is entirely submitted in\\\\n cassette 1A.\\\\n\\\\n 2. Received in formalin labeled with the patient's name, medical record\\\\n number and left upper lobe core biopsy, are two pale gray, friable soft\\\\n tissue cores, 0.4, and 1.0 cm. The specimen is entirely submitted in\\\\n cassette 2A. A quick stain is prepared and examined.\\\\n\\\\n Quick Stain Interpretation: [JME]\\\\n QS1: Positive.\\\\n\\\\n 3. Received in formalin labeled with the patient's name, medical record\\\\n number and left upper lobe core biopsy, is a single red-tan soft tissue\\\\n core, 0.5 cm. The specimen is entirely submitted in cassette 3A.\\\\n\\\\n Microscopic Description\\\\n Microscopic examination performed on all histologic sections.And also found incidental lung nodule.\\\\n\\\\n\\\\n ZZTEST, TRANSPLANT 1516050(SJH)",
+ "device": {
+ "display": "EMR"
+ }
+ },
+ "request": {
+ "method": "PUT",
+ "url": "Observation/ObxSURGPATH0"
+ }
+ },
+ {
+ "fullUrl": "DiagnosticReport/ReportSPATH",
+ "resource": {
+ "resourceType": "DiagnosticReport",
+ "id": "ReportSPATH",
+ "identifier": [
+ {
+ "system": "http://www.foo.com/fhir/DiagnosticReport",
+ "value": "5674832"
+ }
+ ],
+ "status": "final",
+ "category": {
+ "coding": [
+ {
+ "system": "http://www.foo.com/DiagnosticReport/UnknownCode",
+ "code": "00010SP20160000336",
+ "display": "00010SP20160000336"
+ }
+ ],
+ "text": "00010SP20160000336"
+ },
+ "code": {
+ "coding": [
+ {
+ "system": "http://snomed.info/sct",
+ "code": "SPATH",
+ "display": "Surgical Pathology Report"
+ }
+ ],
+ "text": "Surgical Pathology Report"
+ },
+ "subject": {
+ "reference": "Patient/Patient1063259",
+ "display": "Boba Fett "
+ },
+ "effectiveDateTime": "2016-03-13T15:39:00Z",
+ "issued": "2016-03-13T15:58:50Z",
+ "performer": {
+ "reference": "Practitioner/Pract12345"
+ },
+ "request": [
+ {
+ "reference": "DiagnosticOrder/ORCSPATH"
+ }
+ ],
+ "result": [
+ {
+ "reference": "Observation/ObxSURGPATH0"
+ }
+ ]
+ },
+ "request": {
+ "method": "PUT",
+ "url": "DiagnosticReport/ReportSPATH"
+ }
+ },
+ {
+ "fullUrl": "DiagnosticOrder/ORCSPATH",
+ "resource": {
+ "resourceType": "DiagnosticOrder",
+ "id": "ORCSPATH",
+ "extension": [
+ {
+ "url": "http://www.foo.com/fhir/extensions/ModalityType",
+ "valueString": "AP"
+ },
+ {
+ "url": "http://www.foo.com/fhir/extensions/SendingApplication",
+ "valueString": "EPIC"
+ }
+ ],
+ "subject": {
+ "reference": "Patient/Patient1063259"
+ },
+ "orderer": {
+ "reference": "Practitioner/Pract12345"
+ },
+ "identifier": [
+ {
+ "system": "http://www.foo.com/fhir/DiagnosticOrder",
+ "value": "EPIC_5674832"
+ }
+ ],
+ "status": "completed",
+ "event": [
+ {
+ "status": "in-progress",
+ "dateTime": "2016-03-13T15:39:00Z"
+ }
+ ],
+ "item": [
+ {
+ "code": {
+ "coding": [
+ {
+ "system": "http://snomed.info/sct",
+ "code": "SPATH",
+ "display": "Surgical Pathology Report"
+ }
+ ],
+ "text": "Surgical Pathology Report"
+ }
+ }
+ ]
+ },
+ "request": {
+ "method": "PUT",
+ "url": "DiagnosticOrder/ORCEPIC5674832"
+ }
+ }
+ ]
+}
diff --git a/hapi-fhir-jpaserver-base/src/test/resources/r4/createdeletebundle.json b/hapi-fhir-jpaserver-base/src/test/resources/r4/createdeletebundle.json
new file mode 100644
index 00000000000..abe36b0a8a9
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/test/resources/r4/createdeletebundle.json
@@ -0,0 +1,37 @@
+{
+ "resourceType": "Bundle",
+ "type": "transaction",
+ "entry": [
+ {
+ "fullUrl": "Patient/Patient1063259",
+ "resource": {
+ "resourceType": "Patient",
+ "id": "Patient1063259",
+ "identifier": [
+ {
+ "system": "http://www.foo.com/fhir/identifier-type/EnterpriseId",
+ "value": "1063259"
+ }
+ ]
+ },
+ "request": {
+ "method": "PUT",
+ "url": "Patient/Patient1063259"
+ }
+ },
+ {
+ "fullUrl": "DiagnosticReport/ReportSPATH",
+ "resource": {
+ "resourceType": "DiagnosticReport",
+ "id": "ReportSPATH",
+ "subject": {
+ "reference": "Patient/Patient1063259"
+ }
+ },
+ "request": {
+ "method": "PUT",
+ "url": "DiagnosticReport/ReportSPATH"
+ }
+ }
+ ]
+}
diff --git a/src/changes/changes.xml b/src/changes/changes.xml
index 3f2c12cf3d6..409fbfa1bc9 100644
--- a/src/changes/changes.xml
+++ b/src/changes/changes.xml
@@ -146,6 +146,11 @@
The JPA server version migrator tool now runs in a multithreaded way, allowing it to
upgrade th database faster when migration tasks require data updates.
+
+ A bug in the JPA server was fixed: When a resource was previously deleted,
+ a transaction could not be posted that both restored the deleted resource but
+ also contained references to the now-restored resource.
+
From bc720935556a1adb9d09e49145d7fa6f85ee12be Mon Sep 17 00:00:00 2001
From: James Agnew
Date: Sun, 4 Nov 2018 20:00:27 +0100
Subject: [PATCH 19/97] Add reindexing support based on table instead of column
---
.../fhir/cli/BaseMigrateDatabaseCommand.java | 20 +-
.../fhir/cli/HapiMigrateDatabaseCommand.java | 2 +-
.../cli/HapiMigrateDatabaseCommandTest.java | 288 ++++++++++-
.../ca/uhn/fhir/jpa/config/BaseConfig.java | 21 +-
.../ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java | 14 -
.../fhir/jpa/dao/BaseHapiFhirResourceDao.java | 21 +-
.../fhir/jpa/dao/BaseHapiFhirSystemDao.java | 250 +---------
.../java/ca/uhn/fhir/jpa/dao/DaoRegistry.java | 23 +-
.../uhn/fhir/jpa/dao/FhirSystemDaoDstu2.java | 6 +-
.../main/java/ca/uhn/fhir/jpa/dao/IDao.java | 9 +-
.../ca/uhn/fhir/jpa/dao/IFhirSystemDao.java | 13 +-
.../ca/uhn/fhir/jpa/dao/ISearchBuilder.java | 2 +-
.../ca/uhn/fhir/jpa/dao/SearchBuilder.java | 6 +-
.../fhir/jpa/dao/TransactionProcessor.java | 4 +-
.../jpa/dao/data/IResourceReindexJobDao.java | 58 +++
.../fhir/jpa/dao/data/IResourceTableDao.java | 23 +-
.../fhir/jpa/dao/index/IndexingSupport.java | 5 +
.../jpa/entity/ResourceReindexJobEntity.java | 113 +++++
.../ca/uhn/fhir/jpa/entity/ResourceTable.java | 2 -
.../jpa/provider/BaseJpaSystemProvider.java | 12 +-
.../BaseJpaSystemProviderDstu2Plus.java | 6 +-
.../search/PersistedJpaBundleProvider.java | 4 +-
.../jpa/search/SearchCoordinatorSvcImpl.java | 4 +-
.../reindex/IResourceReindexingSvc.java | 34 ++
.../reindex/ResourceReindexingSvcImpl.java | 450 ++++++++++++++++++
.../uhn/fhir/jpa/util/IReindexController.java | 34 --
.../uhn/fhir/jpa/util/ReindexController.java | 119 -----
.../ca/uhn/fhir/jpa/config/TestR4Config.java | 1 -
.../java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java | 16 +-
.../fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java | 7 +-
.../FhirResourceDaoDstu2InterceptorTest.java | 2 +-
...ceDaoDstu2SearchCustomSearchParamTest.java | 4 +-
.../dao/dstu2/FhirResourceDaoDstu2Test.java | 2 +-
.../fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java | 9 +-
.../FhirResourceDaoDstu3CodeSystemTest.java | 5 +-
.../FhirResourceDaoDstu3InterceptorTest.java | 2 +-
...ceDaoDstu3SearchCustomSearchParamTest.java | 3 +-
...eDaoDstu3SearchWithLuceneDisabledTest.java | 5 +-
.../FhirResourceDaoDstu3TerminologyTest.java | 18 +-
.../dao/dstu3/FhirResourceDaoDstu3Test.java | 2 +-
...ResourceDaoDstu3UniqueSearchParamTest.java | 5 +-
.../ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java | 7 +-
.../r4/FhirResourceDaoR4CodeSystemTest.java | 6 +-
.../dao/r4/FhirResourceDaoR4CreateTest.java | 7 +-
...ourceDaoR4SearchCustomSearchParamTest.java | 6 +-
...urceDaoR4SearchWithLuceneDisabledTest.java | 5 +-
.../r4/FhirResourceDaoR4TerminologyTest.java | 19 +-
.../jpa/dao/r4/FhirResourceDaoR4Test.java | 57 ++-
...hirResourceDaoR4UniqueSearchParamTest.java | 27 +-
...hirResourceDaoR4UpdateTagSnapshotTest.java | 2 +-
.../dao/r4/FhirResourceDaoR4UpdateTest.java | 36 +-
.../fhir/jpa/dao/r4/FhirSystemDaoR4Test.java | 159 +++----
...rceProviderCustomSearchParamDstu3Test.java | 11 +-
...sourceProviderCustomSearchParamR4Test.java | 10 +-
.../search/SearchCoordinatorSvcImplTest.java | 2 +-
.../ResourceReindexingSvcImplTest.java | 262 ++++++++++
.../jpa/term/TerminologySvcImplDstu3Test.java | 8 -
.../uhn/fhir/jpa/migrate/DriverTypeEnum.java | 2 +-
.../ca/uhn/fhir/jpa/migrate/Migrator.java | 1 +
.../tasks/HapiFhirJpaMigrationTasks.java | 304 +++++++-----
.../tasks/HapiFhirJpaMigrationTasksTest.java | 4 +-
pom.xml | 2 +-
src/site/xdoc/doc_cli.xml | 42 +-
src/site/xdoc/doc_jpa.xml | 138 +++++-
64 files changed, 1873 insertions(+), 868 deletions(-)
create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceReindexJobDao.java
create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceReindexJobEntity.java
create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/IResourceReindexingSvc.java
create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java
delete mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/IReindexController.java
delete mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/ReindexController.java
create mode 100644 hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImplTest.java
diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseMigrateDatabaseCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseMigrateDatabaseCommand.java
index f6e9f2569ab..2685989c610 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseMigrateDatabaseCommand.java
+++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseMigrateDatabaseCommand.java
@@ -9,9 +9,9 @@ package ca.uhn.fhir.cli;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -25,15 +25,24 @@ import ca.uhn.fhir.jpa.migrate.Migrator;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
+import org.apache.commons.lang3.StringUtils;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
+import java.util.Set;
import java.util.stream.Collectors;
+import static org.apache.commons.lang3.StringUtils.defaultString;
+
public abstract class BaseMigrateDatabaseCommand extends BaseCommand {
private static final String MIGRATE_DATABASE = "migrate-database";
+ private Set myFlags;
+
+ protected Set getFlags() {
+ return myFlags;
+ }
@Override
public String getCommandDescription() {
@@ -68,6 +77,7 @@ public abstract class BaseMigrateDatabaseCommand extends BaseCom
addRequiredOption(retVal, "f", "from", "Version", "The database schema version to migrate FROM");
addRequiredOption(retVal, "t", "to", "Version", "The database schema version to migrate TO");
addRequiredOption(retVal, "d", "driver", "Driver", "The database driver to use (Options are " + driverOptions() + ")");
+ addOptionalOption(retVal, "x", "flags", "Flags", "A comma-separated list of any specific migration flags (these flags are version specific, see migrator documentation for details)");
return retVal;
}
@@ -97,6 +107,12 @@ public abstract class BaseMigrateDatabaseCommand extends BaseCom
boolean dryRun = theCommandLine.hasOption("r");
+ String flags = theCommandLine.getOptionValue("x");
+ myFlags = Arrays.stream(defaultString(flags).split(","))
+ .map(String::trim)
+ .filter(StringUtils::isNotBlank)
+ .collect(Collectors.toSet());
+
Migrator migrator = new Migrator();
migrator.setConnectionUrl(url);
migrator.setDriverType(driverType);
diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/HapiMigrateDatabaseCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/HapiMigrateDatabaseCommand.java
index ff7d39c0fda..6be5b241110 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/HapiMigrateDatabaseCommand.java
+++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/HapiMigrateDatabaseCommand.java
@@ -42,7 +42,7 @@ public class HapiMigrateDatabaseCommand extends BaseMigrateDatabaseCommand> tasks = new HapiFhirJpaMigrationTasks().getTasks(theFrom, theTo);
+ List> tasks = new HapiFhirJpaMigrationTasks(getFlags()).getTasks(theFrom, theTo);
tasks.forEach(theMigrator::addTask);
}
}
diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/HapiMigrateDatabaseCommandTest.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/HapiMigrateDatabaseCommandTest.java
index 7051011ff58..aaee5c3e4aa 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/HapiMigrateDatabaseCommandTest.java
+++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/HapiMigrateDatabaseCommandTest.java
@@ -7,14 +7,26 @@ import org.apache.commons.io.IOUtils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.springframework.jdbc.core.JdbcTemplate;
+import org.springframework.jdbc.core.support.AbstractLobCreatingPreparedStatementCallback;
+import org.springframework.jdbc.support.lob.DefaultLobHandler;
+import org.springframework.jdbc.support.lob.LobCreator;
import java.io.File;
import java.io.IOException;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+import java.sql.Timestamp;
+import java.sql.Types;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
+import java.util.Map;
import static org.apache.commons.lang3.StringUtils.isBlank;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
public class HapiMigrateDatabaseCommandTest {
@@ -25,39 +37,20 @@ public class HapiMigrateDatabaseCommandTest {
}
@Test
- public void testMigrate() throws IOException {
+ public void testMigrate_340_350() throws IOException {
File directory = new File("target/migrator_derby_test_340_350");
if (directory.exists()) {
FileUtils.deleteDirectory(directory);
}
- String url = "jdbc:derby:directory:target/migrator_derby_test_340_350;create=true";
+ String url = "jdbc:derby:directory:" + directory.getAbsolutePath() + ";create=true";
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newConnectionProperties(url, "", "");
- String script = IOUtils.toString(HapiMigrateDatabaseCommandTest.class.getResourceAsStream("/persistence_create_derby107_340.sql"), Charsets.UTF_8);
- List scriptStatements = new ArrayList<>(Arrays.asList(script.split("\n")));
- for (int i = 0; i < scriptStatements.size(); i++) {
- String nextStatement = scriptStatements.get(i);
- if (isBlank(nextStatement)) {
- scriptStatements.remove(i);
- i--;
- continue;
- }
+ String initSql = "/persistence_create_derby107_340.sql";
+ executeSqlStatements(connectionProperties, initSql);
- nextStatement = nextStatement.trim();
- while (nextStatement.endsWith(";")) {
- nextStatement = nextStatement.substring(0, nextStatement.length() - 1);
- }
- scriptStatements.set(i, nextStatement);
- }
-
- connectionProperties.getTxTemplate().execute(t -> {
- for (String next : scriptStatements) {
- connectionProperties.newJdbcTemplate().execute(next);
- }
- return null;
- });
+ seedDatabase340(connectionProperties);
ourLog.info("**********************************************");
ourLog.info("Done Setup, Starting Dry Run...");
@@ -75,6 +68,13 @@ public class HapiMigrateDatabaseCommandTest {
};
App.main(args);
+ connectionProperties.getTxTemplate().execute(t -> {
+ JdbcTemplate jdbcTemplate = connectionProperties.newJdbcTemplate();
+ List> values = jdbcTemplate.queryForList("SELECT * FROM hfj_spidx_token");
+ assertFalse(values.get(0).keySet().contains("HASH_IDENTITY"));
+ return null;
+ });
+
ourLog.info("**********************************************");
ourLog.info("Done Setup, Starting Migration...");
ourLog.info("**********************************************");
@@ -89,5 +89,245 @@ public class HapiMigrateDatabaseCommandTest {
"-t", "V3_5_0"
};
App.main(args);
+
+ connectionProperties.getTxTemplate().execute(t -> {
+ JdbcTemplate jdbcTemplate = connectionProperties.newJdbcTemplate();
+ List> values = jdbcTemplate.queryForList("SELECT * FROM hfj_spidx_token");
+ assertEquals(1, values.size());
+ assertEquals("identifier", values.get(0).get("SP_NAME"));
+ assertEquals("12345678", values.get(0).get("SP_VALUE"));
+ assertTrue(values.get(0).keySet().contains("HASH_IDENTITY"));
+ assertEquals(7001889285610424179L, values.get(0).get("HASH_IDENTITY"));
+ return null;
+ });
}
+
+ @Test
+ public void testMigrate_340_360() throws IOException {
+
+ File directory = new File("target/migrator_derby_test_340_360");
+ if (directory.exists()) {
+ FileUtils.deleteDirectory(directory);
+ }
+
+ String url = "jdbc:derby:directory:" + directory.getAbsolutePath() + ";create=true";
+ DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newConnectionProperties(url, "", "");
+
+ String initSql = "/persistence_create_derby107_340.sql";
+ executeSqlStatements(connectionProperties, initSql);
+
+ seedDatabase340(connectionProperties);
+
+ ourLog.info("**********************************************");
+ ourLog.info("Done Setup, Starting Migration...");
+ ourLog.info("**********************************************");
+
+ String[] args = new String[]{
+ "migrate-database",
+ "-d", "DERBY_EMBEDDED",
+ "-u", url,
+ "-n", "",
+ "-p", "",
+ "-f", "V3_4_0",
+ "-t", "V3_6_0"
+ };
+ App.main(args);
+
+ connectionProperties.getTxTemplate().execute(t -> {
+ JdbcTemplate jdbcTemplate = connectionProperties.newJdbcTemplate();
+ List> values = jdbcTemplate.queryForList("SELECT * FROM hfj_spidx_token");
+ assertEquals(1, values.size());
+ assertEquals("identifier", values.get(0).get("SP_NAME"));
+ assertEquals("12345678", values.get(0).get("SP_VALUE"));
+ assertTrue(values.get(0).keySet().contains("HASH_IDENTITY"));
+ assertEquals(7001889285610424179L, values.get(0).get("HASH_IDENTITY"));
+ return null;
+ });
+ }
+
+ private void seedDatabase340(DriverTypeEnum.ConnectionProperties theConnectionProperties) {
+ theConnectionProperties.getTxTemplate().execute(t -> {
+ JdbcTemplate jdbcTemplate = theConnectionProperties.newJdbcTemplate();
+
+ jdbcTemplate.execute(
+ "insert into HFJ_RESOURCE (RES_DELETED_AT, RES_VERSION, FORCED_ID_PID, HAS_TAGS, RES_PUBLISHED, RES_UPDATED, SP_HAS_LINKS, HASH_SHA256, SP_INDEX_STATUS, RES_LANGUAGE, SP_CMPSTR_UNIQ_PRESENT, SP_COORDS_PRESENT, SP_DATE_PRESENT, SP_NUMBER_PRESENT, SP_QUANTITY_PRESENT, SP_STRING_PRESENT, SP_TOKEN_PRESENT, SP_URI_PRESENT, RES_PROFILE, RES_TYPE, RES_VER, RES_ID) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
+ new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
+ @Override
+ protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
+ thePs.setNull(1, Types.TIMESTAMP);
+ thePs.setString(2, "R4");
+ thePs.setNull(3, Types.BIGINT);
+ thePs.setBoolean(4, false);
+ thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
+ thePs.setTimestamp(6, new Timestamp(System.currentTimeMillis()));
+ thePs.setBoolean(7, false);
+ thePs.setNull(8, Types.VARCHAR);
+ thePs.setLong(9, 1L);
+ thePs.setNull(10, Types.VARCHAR);
+ thePs.setBoolean(11, false);
+ thePs.setBoolean(12, false);
+ thePs.setBoolean(13, false);
+ thePs.setBoolean(14, false);
+ thePs.setBoolean(15, false);
+ thePs.setBoolean(16, false);
+ thePs.setBoolean(17, false);
+ thePs.setBoolean(18, false);
+ thePs.setNull(19, Types.VARCHAR);
+ thePs.setString(20, "Patient");
+ thePs.setLong(21, 1L);
+ thePs.setLong(22, 1L);
+ }
+ }
+ );
+
+ jdbcTemplate.execute(
+ "insert into HFJ_RES_VER (RES_DELETED_AT, RES_VERSION, FORCED_ID_PID, HAS_TAGS, RES_PUBLISHED, RES_UPDATED, RES_ENCODING, RES_TEXT, RES_ID, RES_TYPE, RES_VER, PID) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
+ new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
+ @Override
+ protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
+ thePs.setNull(1, Types.TIMESTAMP);
+ thePs.setString(2, "R4");
+ thePs.setNull(3, Types.BIGINT);
+ thePs.setBoolean(4, false);
+ thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
+ thePs.setTimestamp(6, new Timestamp(System.currentTimeMillis()));
+ thePs.setString(7, "JSON");
+ theLobCreator.setBlobAsBytes(thePs, 8, "{\"resourceType\":\"Patient\"}".getBytes(Charsets.US_ASCII));
+ thePs.setLong(9, 1L);
+ thePs.setString(10, "Patient");
+ thePs.setLong(11, 1L);
+ thePs.setLong(12, 1L);
+ }
+ }
+ );
+
+ jdbcTemplate.execute(
+ "insert into HFJ_SPIDX_STRING (SP_MISSING, SP_NAME, RES_ID, RES_TYPE, SP_UPDATED, SP_VALUE_EXACT, SP_VALUE_NORMALIZED, SP_ID) values (?, ?, ?, ?, ?, ?, ?, ?)",
+ new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
+ @Override
+ protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
+ thePs.setBoolean(1, false);
+ thePs.setString(2, "given");
+ thePs.setLong(3, 1L); // res-id
+ thePs.setString(4, "Patient");
+ thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
+ thePs.setString(6, "ROBERT");
+ thePs.setString(7, "Robert");
+ thePs.setLong(8, 1L);
+ }
+ }
+ );
+
+ jdbcTemplate.execute(
+ "insert into HFJ_SPIDX_TOKEN (SP_MISSING, SP_NAME, RES_ID, RES_TYPE, SP_UPDATED, SP_SYSTEM, SP_VALUE, SP_ID) values (?, ?, ?, ?, ?, ?, ?, ?)",
+ new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
+ @Override
+ protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
+ thePs.setBoolean(1, false);
+ thePs.setString(2, "identifier");
+ thePs.setLong(3, 1L); // res-id
+ thePs.setString(4, "Patient");
+ thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
+ thePs.setString(6, "http://foo");
+ thePs.setString(7, "12345678");
+ thePs.setLong(8, 1L);
+ }
+ }
+ );
+
+ jdbcTemplate.execute(
+ "insert into HFJ_SPIDX_DATE (SP_MISSING, SP_NAME, RES_ID, RES_TYPE, SP_UPDATED, SP_VALUE_HIGH, SP_VALUE_LOW, SP_ID) values (?, ?, ?, ?, ?, ?, ?, ?)",
+ new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
+ @Override
+ protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
+ thePs.setBoolean(1, false);
+ thePs.setString(2, "birthdate");
+ thePs.setLong(3, 1L); // res-id
+ thePs.setString(4, "Patient");
+ thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
+ thePs.setTimestamp(6, new Timestamp(1000000000L)); // value high
+ thePs.setTimestamp(7, new Timestamp(1000000000L)); // value low
+ thePs.setLong(8, 1L);
+ }
+ }
+ );
+
+ return null;
+ });
+
+ }
+
+
+ @Test
+ public void testMigrate_340_350_NoMigrateHashes() throws IOException {
+
+ File directory = new File("target/migrator_derby_test_340_350_nmh");
+ if (directory.exists()) {
+ FileUtils.deleteDirectory(directory);
+ }
+
+ String url = "jdbc:derby:directory:" + directory.getAbsolutePath() + ";create=true";
+ DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newConnectionProperties(url, "", "");
+
+ String initSql = "/persistence_create_derby107_340.sql";
+ executeSqlStatements(connectionProperties, initSql);
+
+ seedDatabase340(connectionProperties);
+
+ ourLog.info("**********************************************");
+ ourLog.info("Done Setup, Starting Migration...");
+ ourLog.info("**********************************************");
+
+ String[] args = new String[]{
+ "migrate-database",
+ "-d", "DERBY_EMBEDDED",
+ "-u", url,
+ "-n", "",
+ "-p", "",
+ "-f", "V3_4_0",
+ "-t", "V3_5_0",
+ "-x", "no-migrate-350-hashes"
+ };
+ App.main(args);
+
+ connectionProperties.getTxTemplate().execute(t -> {
+ JdbcTemplate jdbcTemplate = connectionProperties.newJdbcTemplate();
+ List> values = jdbcTemplate.queryForList("SELECT * FROM hfj_spidx_token");
+ assertEquals(1, values.size());
+ assertEquals("identifier", values.get(0).get("SP_NAME"));
+ assertEquals("12345678", values.get(0).get("SP_VALUE"));
+ assertEquals(null, values.get(0).get("HASH_IDENTITY"));
+ return null;
+ });
+
+ }
+
+ private void executeSqlStatements(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theInitSql) throws
+ IOException {
+ String script = IOUtils.toString(HapiMigrateDatabaseCommandTest.class.getResourceAsStream(theInitSql), Charsets.UTF_8);
+ List scriptStatements = new ArrayList<>(Arrays.asList(script.split("\n")));
+ for (int i = 0; i < scriptStatements.size(); i++) {
+ String nextStatement = scriptStatements.get(i);
+ if (isBlank(nextStatement)) {
+ scriptStatements.remove(i);
+ i--;
+ continue;
+ }
+
+ nextStatement = nextStatement.trim();
+ while (nextStatement.endsWith(";")) {
+ nextStatement = nextStatement.substring(0, nextStatement.length() - 1);
+ }
+ scriptStatements.set(i, nextStatement);
+ }
+
+ theConnectionProperties.getTxTemplate().execute(t -> {
+ for (String next : scriptStatements) {
+ theConnectionProperties.newJdbcTemplate().execute(next);
+ }
+ return null;
+ });
+
+ }
+
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java
index 3aa4f830363..b3a7b55eaec 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java
@@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.config;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -25,6 +25,8 @@ import ca.uhn.fhir.i18n.HapiLocalizer;
import ca.uhn.fhir.jpa.dao.DaoRegistry;
import ca.uhn.fhir.jpa.provider.SubscriptionTriggeringProvider;
import ca.uhn.fhir.jpa.search.*;
+import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
+import ca.uhn.fhir.jpa.search.reindex.ResourceReindexingSvcImpl;
import ca.uhn.fhir.jpa.search.warm.CacheWarmingSvcImpl;
import ca.uhn.fhir.jpa.search.warm.ICacheWarmingSvc;
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
@@ -32,8 +34,6 @@ import ca.uhn.fhir.jpa.sp.SearchParamPresenceSvcImpl;
import ca.uhn.fhir.jpa.subscription.email.SubscriptionEmailInterceptor;
import ca.uhn.fhir.jpa.subscription.resthook.SubscriptionRestHookInterceptor;
import ca.uhn.fhir.jpa.subscription.websocket.SubscriptionWebsocketInterceptor;
-import ca.uhn.fhir.jpa.util.IReindexController;
-import ca.uhn.fhir.jpa.util.ReindexController;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.jpa.HibernatePersistenceProvider;
import org.hibernate.query.criteria.LiteralHandlingMode;
@@ -60,6 +60,7 @@ import javax.annotation.Nonnull;
import java.util.Map;
import java.util.concurrent.ScheduledExecutorService;
+
@Configuration
@EnableScheduling
@EnableJpaRepositories(basePackages = "ca.uhn.fhir.jpa.dao.data")
@@ -150,11 +151,6 @@ public abstract class BaseConfig implements SchedulingConfigurer {
return new HibernateJpaDialect();
}
- @Bean
- public IReindexController reindexController() {
- return new ReindexController();
- }
-
@Bean()
public ScheduledExecutorService scheduledExecutorService() {
ScheduledExecutorFactoryBean b = new ScheduledExecutorFactoryBean();
@@ -163,7 +159,7 @@ public abstract class BaseConfig implements SchedulingConfigurer {
return b.getObject();
}
- @Bean(name="mySubscriptionTriggeringProvider")
+ @Bean(name = "mySubscriptionTriggeringProvider")
@Lazy
public SubscriptionTriggeringProvider subscriptionTriggeringProvider() {
return new SubscriptionTriggeringProvider();
@@ -215,6 +211,11 @@ public abstract class BaseConfig implements SchedulingConfigurer {
return retVal;
}
+ @Bean
+ public IResourceReindexingSvc resourceReindexingSvc() {
+ return new ResourceReindexingSvcImpl();
+ }
+
public static void configureEntityManagerFactory(LocalContainerEntityManagerFactoryBean theFactory, FhirContext theCtx) {
theFactory.setJpaDialect(hibernateJpaDialect(theCtx.getLocalizer()));
theFactory.setPackagesToScan("ca.uhn.fhir.jpa.entity");
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
index d369de228f2..37b46b3fb76 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
@@ -712,20 +712,6 @@ public abstract class BaseHapiFhirDao implements IDao,
return dao;
}
- protected IFhirResourceDao> getDaoOrThrowException(Class extends IBaseResource> theClass) {
- IFhirResourceDao extends IBaseResource> retVal = getDao(theClass);
- if (retVal == null) {
- List supportedResourceTypes = getDaos()
- .keySet()
- .stream()
- .map(t -> myContext.getResourceDefinition(t).getName())
- .sorted()
- .collect(Collectors.toList());
- throw new InvalidRequestException("Unable to process request, this server does not know how to handle resources of type " + getContext().getResourceDefinition(theClass).getName() + " - Can handle: " + supportedResourceTypes);
- }
- return retVal;
- }
-
private Map, IFhirResourceDao>> getDaos() {
if (myResourceTypeToDao == null) {
Map, IFhirResourceDao>> resourceTypeToDao = new HashMap<>();
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
index 7d7e65ca449..29ab5fd8d91 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
@@ -29,10 +29,10 @@ import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
import ca.uhn.fhir.jpa.entity.*;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
+import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
import ca.uhn.fhir.jpa.util.DeleteConflict;
import ca.uhn.fhir.jpa.util.ExpungeOptions;
import ca.uhn.fhir.jpa.util.ExpungeOutcome;
-import ca.uhn.fhir.jpa.util.IReindexController;
import ca.uhn.fhir.jpa.util.jsonpatch.JsonPatchUtils;
import ca.uhn.fhir.jpa.util.xmlpatch.XmlPatchUtils;
import ca.uhn.fhir.model.api.*;
@@ -42,7 +42,6 @@ import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.ParameterUtil;
import ca.uhn.fhir.rest.param.QualifierDetails;
-import ca.uhn.fhir.rest.server.RestfulServerUtils;
import ca.uhn.fhir.rest.server.exceptions.*;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
@@ -91,8 +90,6 @@ public abstract class BaseHapiFhirResourceDao extends B
private String mySecondaryPrimaryKeyParamName;
@Autowired
private ISearchParamRegistry mySearchParamRegistry;
- @Autowired
- private IReindexController myReindexController;
@Override
public void addTag(IIdType theId, TagTypeEnum theTagType, String theScheme, String theTerm, String theLabel) {
@@ -624,22 +621,21 @@ public abstract class BaseHapiFhirResourceDao extends B
TransactionTemplate txTemplate = new TransactionTemplate(myPlatformTransactionManager);
txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRED);
- Integer updatedCount = txTemplate.execute(new TransactionCallback() {
- @Override
- public @NonNull
- Integer doInTransaction(@Nonnull TransactionStatus theStatus) {
- return myResourceTableDao.markResourcesOfTypeAsRequiringReindexing(resourceType);
- }
+ txTemplate.execute(t->{
+ myResourceReindexingSvc.markAllResourcesForReindexing(resourceType);
+ return null;
});
- ourLog.debug("Marked {} resources for reindexing", updatedCount);
+ ourLog.debug("Marked resources of type {} for reindexing", resourceType);
}
}
mySearchParamRegistry.requestRefresh();
- myReindexController.requestReindex();
}
+ @Autowired
+ private IResourceReindexingSvc myResourceReindexingSvc;
+
@Override
public MT metaAddOperation(IIdType theResourceId, MT theMetaAdd, RequestDetails theRequestDetails) {
// Notify interceptors
@@ -727,6 +723,7 @@ public abstract class BaseHapiFhirResourceDao extends B
return retVal;
}
+ @SuppressWarnings("JpaQlInspection")
@Override
public MT metaGetOperation(Class theType, RequestDetails theRequestDetails) {
// Notify interceptors
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java
index 70188beca0e..1db7fc848ad 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java
@@ -3,42 +3,28 @@ package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
-import ca.uhn.fhir.jpa.entity.ForcedId;
-import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.util.ExpungeOptions;
import ca.uhn.fhir.jpa.util.ExpungeOutcome;
-import ca.uhn.fhir.jpa.util.ReindexFailureException;
import ca.uhn.fhir.jpa.util.ResourceCountCache;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.RequestDetails;
-import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
-import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
import ca.uhn.fhir.util.StopWatch;
-import org.apache.commons.lang3.concurrent.BasicThreadFactory;
-import org.hibernate.search.util.impl.Executors;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
-import org.springframework.data.domain.PageRequest;
import org.springframework.transaction.PlatformTransactionManager;
-import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
-import org.springframework.transaction.support.TransactionCallback;
-import org.springframework.transaction.support.TransactionCallbackWithoutResult;
-import org.springframework.transaction.support.TransactionTemplate;
-import javax.annotation.Nonnull;
import javax.annotation.Nullable;
-import javax.persistence.Query;
-import java.util.*;
-import java.util.concurrent.*;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
import java.util.concurrent.locks.ReentrantLock;
-import static org.apache.commons.lang3.StringUtils.isBlank;
-
/*
* #%L
* HAPI FHIR JPA Server
@@ -48,9 +34,9 @@ import static org.apache.commons.lang3.StringUtils.isBlank;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -76,71 +62,7 @@ public abstract class BaseHapiFhirSystemDao extends BaseHapiFhirDao idsToReindex = txTemplate.execute(theStatus -> {
- int maxResult = 500;
- if (theCount != null) {
- maxResult = Math.min(theCount, 2000);
- }
- maxResult = Math.max(maxResult, 10);
-
- ourLog.debug("Beginning indexing query with maximum {}", maxResult);
- return myResourceTableDao
- .findIdsOfResourcesRequiringReindexing(new PageRequest(0, maxResult))
- .getContent();
- });
-
- // If no IDs need reindexing, we're good here
- if (idsToReindex.isEmpty()) {
- return 0;
- }
-
- // Reindex
- StopWatch sw = new StopWatch();
-
- // Execute each reindex in a task within a threadpool
- int threadCount = getConfig().getReindexThreadCount();
- RejectedExecutionHandler rejectHandler = new Executors.BlockPolicy();
- ThreadPoolExecutor executor = new ThreadPoolExecutor(threadCount, threadCount,
- 0L, TimeUnit.MILLISECONDS,
- new LinkedBlockingQueue<>(),
- myReindexingThreadFactory,
- rejectHandler
- );
- List> futures = new ArrayList<>();
- for (Long nextId : idsToReindex) {
- futures.add(executor.submit(new ResourceReindexingTask(nextId)));
- }
- for (Future> next : futures) {
- try {
- next.get();
- } catch (Exception e) {
- throw new InternalErrorException("Failed to reindex: ", e);
- }
- }
- executor.shutdown();
-
- ourLog.info("Reindexed {} resources in {} threads - {}ms/resource", idsToReindex.size(), threadCount, sw.getMillisPerOperation(idsToReindex.size()));
- return idsToReindex.size();
- }
@Override
@Transactional(propagation = Propagation.REQUIRED)
@@ -182,165 +104,5 @@ public abstract class BaseHapiFhirSystemDao extends BaseHapiFhirDao() {
- @Override
- public Void doInTransaction(@Nonnull TransactionStatus theStatus) {
- ourLog.info("Marking resource with PID {} as indexing_failed", new Object[] {theId});
- Query q = myEntityManager.createQuery("UPDATE ResourceTable t SET t.myIndexStatus = :status WHERE t.myId = :id");
- q.setParameter("status", INDEX_STATUS_INDEXING_FAILED);
- q.setParameter("id", theId);
- q.executeUpdate();
-
- q = myEntityManager.createQuery("DELETE FROM ResourceTag t WHERE t.myResourceId = :id");
- q.setParameter("id", theId);
- q.executeUpdate();
-
- q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamCoords t WHERE t.myResourcePid = :id");
- q.setParameter("id", theId);
- q.executeUpdate();
-
- q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamDate t WHERE t.myResourcePid = :id");
- q.setParameter("id", theId);
- q.executeUpdate();
-
- q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamNumber t WHERE t.myResourcePid = :id");
- q.setParameter("id", theId);
- q.executeUpdate();
-
- q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamQuantity t WHERE t.myResourcePid = :id");
- q.setParameter("id", theId);
- q.executeUpdate();
-
- q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamString t WHERE t.myResourcePid = :id");
- q.setParameter("id", theId);
- q.executeUpdate();
-
- q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamToken t WHERE t.myResourcePid = :id");
- q.setParameter("id", theId);
- q.executeUpdate();
-
- q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamUri t WHERE t.myResourcePid = :id");
- q.setParameter("id", theId);
- q.executeUpdate();
-
- q = myEntityManager.createQuery("DELETE FROM ResourceLink t WHERE t.mySourceResourcePid = :id");
- q.setParameter("id", theId);
- q.executeUpdate();
-
- q = myEntityManager.createQuery("DELETE FROM ResourceLink t WHERE t.myTargetResourcePid = :id");
- q.setParameter("id", theId);
- q.executeUpdate();
-
- return null;
- }
- });
- }
-
- @Override
- @Transactional(propagation = Propagation.NEVER)
- public Integer performReindexingPass(final Integer theCount) {
- if (getConfig().isStatusBasedReindexingDisabled()) {
- return -1;
- }
- if (!myReindexLock.tryLock()) {
- return -1;
- }
- try {
- return doPerformReindexingPass(theCount);
- } catch (ReindexFailureException e) {
- ourLog.warn("Reindexing failed for resource {}", e.getResourceId());
- markResourceAsIndexingFailed(e.getResourceId());
- return -1;
- } finally {
- myReindexLock.unlock();
- }
- }
-
- private class ResourceReindexingTask implements Runnable {
- private final Long myNextId;
-
- public ResourceReindexingTask(Long theNextId) {
- myNextId = theNextId;
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public void run() {
- TransactionTemplate txTemplate = new TransactionTemplate(myTxManager);
- txTemplate.afterPropertiesSet();
-
- Throwable reindexFailure;
- try {
- reindexFailure = txTemplate.execute(new TransactionCallback() {
- @Override
- public Throwable doInTransaction(TransactionStatus theStatus) {
- ResourceTable resourceTable = myResourceTableDao.findById(myNextId).orElseThrow(IllegalStateException::new);
-
- try {
- /*
- * This part is because from HAPI 1.5 - 1.6 we changed the format of forced ID to be "type/id" instead of just "id"
- */
- ForcedId forcedId = resourceTable.getForcedId();
- if (forcedId != null) {
- if (isBlank(forcedId.getResourceType())) {
- ourLog.info("Updating resource {} forcedId type to {}", forcedId.getForcedId(), resourceTable.getResourceType());
- forcedId.setResourceType(resourceTable.getResourceType());
- myForcedIdDao.save(forcedId);
- }
- }
-
- final IBaseResource resource = toResource(resourceTable, false);
-
- Class extends IBaseResource> resourceClass = getContext().getResourceDefinition(resourceTable.getResourceType()).getImplementingClass();
- @SuppressWarnings("rawtypes") final IFhirResourceDao dao = getDaoOrThrowException(resourceClass);
- dao.reindex(resource, resourceTable);
- return null;
-
- } catch (Exception e) {
- ourLog.error("Failed to index resource {}: {}", resourceTable.getIdDt(), e.toString(), e);
- theStatus.setRollbackOnly();
- return e;
- }
- }
- });
- } catch (ResourceVersionConflictException e) {
- /*
- * We reindex in multiple threads, so it's technically possible that two threads try
- * to index resources that cause a constraint error now (i.e. because a unique index has been
- * added that didn't previously exist). In this case, one of the threads would succeed and
- * not get this error, so we'll let the other one fail and try
- * again later.
- */
- ourLog.info("Failed to reindex {} because of a version conflict. Leaving in unindexed state: {}", e.getMessage());
- reindexFailure = null;
- }
-
- if (reindexFailure != null) {
- txTemplate.execute(new TransactionCallbackWithoutResult() {
- @Override
- protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
- ourLog.info("Setting resource PID[{}] status to ERRORED", myNextId);
- myResourceTableDao.updateStatusToErrored(myNextId);
- }
- });
- }
- }
- }
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoRegistry.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoRegistry.java
index 144bbe63353..a8524be8d56 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoRegistry.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoRegistry.java
@@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.dao;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -22,14 +22,17 @@ package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
-import org.apache.commons.lang3.Validate;
+import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
+import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
+import java.util.stream.Collectors;
public class DaoRegistry implements ApplicationContextAware {
private ApplicationContext myAppCtx;
@@ -55,11 +58,23 @@ public class DaoRegistry implements ApplicationContextAware {
public IFhirResourceDao> getResourceDao(String theResourceName) {
IFhirResourceDao> retVal = getResourceNameToResourceDao().get(theResourceName);
- Validate.notNull(retVal, "No DAO exists for resource type %s - Have: %s", theResourceName, myResourceNameToResourceDao);
+ if (retVal == null) {
+ List supportedResourceTypes = getResourceNameToResourceDao()
+ .keySet()
+ .stream()
+ .sorted()
+ .collect(Collectors.toList());
+ throw new InvalidRequestException("Unable to process request, this server does not know how to handle resources of type " + theResourceName + " - Can handle: " + supportedResourceTypes);
+ }
return retVal;
}
+ public IFhirResourceDao getResourceDao(Class theResourceType) {
+ String resourceName = myCtx.getResourceDefinition(theResourceType).getName();
+ return (IFhirResourceDao) getResourceDao(resourceName);
+ }
+
private Map> getResourceNameToResourceDao() {
Map> retVal = myResourceNameToResourceDao;
if (retVal == null || retVal.isEmpty()) {
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirSystemDaoDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirSystemDaoDstu2.java
index ff8899edf98..4c2dcf968bc 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirSystemDaoDstu2.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirSystemDaoDstu2.java
@@ -81,6 +81,8 @@ public class FhirSystemDaoDstu2 extends BaseHapiFhirSystemDao {
@Autowired
private PlatformTransactionManager myTxManager;
+ @Autowired
+ private DaoRegistry myDaoRegistry;
private Bundle batch(final RequestDetails theRequestDetails, Bundle theRequest) {
ourLog.info("Beginning batch with {} resources", theRequest.getEntry().size());
@@ -363,7 +365,7 @@ public class FhirSystemDaoDstu2 extends BaseHapiFhirSystemDao {
case POST: {
// CREATE
@SuppressWarnings("rawtypes")
- IFhirResourceDao resourceDao = getDaoOrThrowException(res.getClass());
+ IFhirResourceDao resourceDao = myDaoRegistry.getResourceDao(res.getClass());
res.setId((String) null);
DaoMethodOutcome outcome;
outcome = resourceDao.create(res, nextReqEntry.getRequest().getIfNoneExist(), false, theRequestDetails);
@@ -403,7 +405,7 @@ public class FhirSystemDaoDstu2 extends BaseHapiFhirSystemDao {
case PUT: {
// UPDATE
@SuppressWarnings("rawtypes")
- IFhirResourceDao resourceDao = getDaoOrThrowException(res.getClass());
+ IFhirResourceDao resourceDao = myDaoRegistry.getResourceDao(res.getClass());
DaoMethodOutcome outcome;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IDao.java
index fd47335fb25..81c67874815 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IDao.java
@@ -1,10 +1,5 @@
package ca.uhn.fhir.jpa.dao;
-import java.util.Collection;
-import java.util.Set;
-
-import org.hl7.fhir.instance.model.api.IBaseResource;
-
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
@@ -13,6 +8,10 @@ import ca.uhn.fhir.jpa.entity.IBaseResourceEntity;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.ResourceTag;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+
+import java.util.Collection;
+import java.util.Set;
/*
* #%L
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFhirSystemDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFhirSystemDao.java
index 8d75646bb01..b629dd37992 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFhirSystemDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFhirSystemDao.java
@@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.dao;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -53,13 +53,6 @@ public interface IFhirSystemDao extends IDao {
IBundleProvider history(Date theDate, Date theUntil, RequestDetails theRequestDetails);
- /**
- * Marks all indexes as needing fresh indexing
- *
- * @return Returns the number of affected rows
- */
- int markAllResourcesForReindexing();
-
/**
* Not supported for DSTU1
*
@@ -67,8 +60,6 @@ public interface IFhirSystemDao extends IDao {
*/
MT metaGetOperation(RequestDetails theRequestDetails);
- Integer performReindexingPass(Integer theCount);
-
T transaction(RequestDetails theRequestDetails, T theResources);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/ISearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/ISearchBuilder.java
index 06281a3e022..0857ca7dac7 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/ISearchBuilder.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/ISearchBuilder.java
@@ -43,7 +43,7 @@ public interface ISearchBuilder {
FhirContext theContext, IDao theDao);
Set loadIncludes(IDao theCallingDao, FhirContext theContext, EntityManager theEntityManager, Collection theMatches, Set theRevIncludes, boolean theReverseMode,
- DateRangeParam theLastUpdated);
+ DateRangeParam theLastUpdated, String theSearchIdOrDescription);
/**
* How many results may be fetched at once
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java
index d798fcd3853..7d457e2b381 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java
@@ -1958,7 +1958,7 @@ public class SearchBuilder implements ISearchBuilder {
*/
@Override
public HashSet loadIncludes(IDao theCallingDao, FhirContext theContext, EntityManager theEntityManager, Collection theMatches, Set theRevIncludes,
- boolean theReverseMode, DateRangeParam theLastUpdated) {
+ boolean theReverseMode, DateRangeParam theLastUpdated, String theSearchIdOrDescription) {
if (theMatches.size() == 0) {
return new HashSet<>();
}
@@ -2080,7 +2080,7 @@ public class SearchBuilder implements ISearchBuilder {
nextRoundMatches = pidsToInclude;
} while (includes.size() > 0 && nextRoundMatches.size() > 0 && addedSomeThisRound);
- ourLog.info("Loaded {} {} in {} rounds and {} ms", allAdded.size(), theReverseMode ? "_revincludes" : "_includes", roundCounts, w.getMillisAndRestart());
+ ourLog.info("Loaded {} {} in {} rounds and {} ms for search {}", allAdded.size(), theReverseMode ? "_revincludes" : "_includes", roundCounts, w.getMillisAndRestart(), theSearchIdOrDescription);
return allAdded;
}
@@ -2316,7 +2316,7 @@ public class SearchBuilder implements ISearchBuilder {
myCurrentOffset = end;
Collection pidsToScan = myCurrentPids.subList(start, end);
Set includes = Collections.singleton(new Include("*", true));
- Set newPids = loadIncludes(myCallingDao, myContext, myEntityManager, pidsToScan, includes, false, myParams.getLastUpdated());
+ Set newPids = loadIncludes(myCallingDao, myContext, myEntityManager, pidsToScan, includes, false, myParams.getLastUpdated(), mySearchUuid);
myCurrentIterator = newPids.iterator();
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessor.java
index 554a3e58723..bec7998d5cc 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessor.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessor.java
@@ -83,6 +83,8 @@ public class TransactionProcessor {
private FhirContext myContext;
@Autowired
private ITransactionProcessorVersionAdapter myVersionAdapter;
+ @Autowired
+ private DaoRegistry myDaoRegistry;
public static boolean isPlaceholder(IIdType theId) {
if (theId != null && theId.getValue() != null) {
@@ -749,7 +751,7 @@ public class TransactionProcessor {
}
private IFhirResourceDao getDaoOrThrowException(Class extends IBaseResource> theClass) {
- return myDao.getDaoOrThrowException(theClass);
+ return myDaoRegistry.getResourceDao(theClass);
}
protected void flushJpaSession() {
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceReindexJobDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceReindexJobDao.java
new file mode 100644
index 00000000000..b1903ece2b9
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceReindexJobDao.java
@@ -0,0 +1,58 @@
+package ca.uhn.fhir.jpa.dao.data;
+
+import ca.uhn.fhir.jpa.entity.ResourceReindexJobEntity;
+import org.springframework.data.domain.Pageable;
+import org.springframework.data.jpa.repository.JpaRepository;
+import org.springframework.data.jpa.repository.Modifying;
+import org.springframework.data.jpa.repository.Query;
+import org.springframework.data.repository.query.Param;
+
+import java.util.Collection;
+import java.util.Date;
+import java.util.List;
+
+/*
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2018 University Health Network
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+public interface IResourceReindexJobDao extends JpaRepository {
+
+ @Modifying
+ @Query("UPDATE ResourceReindexJobEntity j SET j.myDeleted = true WHERE j.myResourceType = :type")
+ void markAllOfTypeAsDeleted(@Param("type") String theType);
+
+ @Modifying
+ @Query("UPDATE ResourceReindexJobEntity j SET j.myDeleted = true")
+ void markAllOfTypeAsDeleted();
+
+ @Modifying
+ @Query("UPDATE ResourceReindexJobEntity j SET j.myDeleted = true WHERE j.myId = :pid")
+ void markAsDeletedById(@Param("pid") Long theId);
+
+ @Query("SELECT j FROM ResourceReindexJobEntity j WHERE j.myDeleted = :deleted")
+ List findAll(Pageable thePage, @Param("deleted") boolean theDeleted);
+
+ @Modifying
+ @Query("UPDATE ResourceReindexJobEntity j SET j.mySuspendedUntil = :suspendedUntil")
+ void setSuspendedUntil(@Param("suspendedUntil") Date theSuspendedUntil);
+
+ @Modifying
+ @Query("UPDATE ResourceReindexJobEntity j SET j.myThresholdLow = :low WHERE j.myId = :id")
+ void setThresholdLow(@Param("id") Long theId, @Param("low") Date theLow);
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceTableDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceTableDao.java
index 3737cdcbb5d..a5986473d52 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceTableDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceTableDao.java
@@ -1,6 +1,5 @@
package ca.uhn.fhir.jpa.dao.data;
-import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
@@ -9,6 +8,7 @@ import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
+import java.util.Date;
import java.util.List;
import java.util.Map;
@@ -21,9 +21,9 @@ import java.util.Map;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -43,17 +43,16 @@ public interface IResourceTableDao extends JpaRepository {
@Query("SELECT t.myId FROM ResourceTable t WHERE t.myId = :resid AND t.myResourceType = :restype AND t.myDeleted IS NOT NULL")
Slice findIdsOfDeletedResourcesOfType(Pageable thePageable, @Param("resid") Long theResourceId, @Param("restype") String theResourceName);
- @Query("SELECT t.myId FROM ResourceTable t WHERE t.myIndexStatus IS NULL")
- Slice findIdsOfResourcesRequiringReindexing(Pageable thePageable);
-
- @Query("SELECT t.myResourceType as type, COUNT(*) as count FROM ResourceTable t GROUP BY t.myResourceType")
+ @Query("SELECT t.myResourceType as type, COUNT(t.myResourceType) as count FROM ResourceTable t GROUP BY t.myResourceType")
List> getResourceCounts();
- @Modifying
- @Query("UPDATE ResourceTable r SET r.myIndexStatus = null WHERE r.myResourceType = :restype")
- int markResourcesOfTypeAsRequiringReindexing(@Param("restype") String theResourceType);
+ @Query("SELECT t.myId FROM ResourceTable t WHERE t.myUpdated >= :low AND t.myUpdated <= :high ORDER BY t.myUpdated ASC")
+ Slice findIdsOfResourcesWithinUpdatedRangeOrderedFromOldest(Pageable thePage,@Param("low") Date theLow, @Param("high")Date theHigh);
+
+ @Query("SELECT t.myId FROM ResourceTable t WHERE t.myUpdated >= :low AND t.myUpdated <= :high AND t.myResourceType = :restype ORDER BY t.myUpdated ASC")
+ Slice findIdsOfResourcesWithinUpdatedRangeOrderedFromOldest(Pageable thePage,@Param("restype") String theResourceType, @Param("low") Date theLow, @Param("high")Date theHigh);
@Modifying
- @Query("UPDATE ResourceTable r SET r.myIndexStatus = " + BaseHapiFhirDao.INDEX_STATUS_INDEXING_FAILED + " WHERE r.myId = :resid")
- void updateStatusToErrored(@Param("resid") Long theId);
+ @Query("UPDATE ResourceTable t SET t.myIndexStatus = :status WHERE t.myId = :id")
+ void updateIndexStatus(@Param("id") Long theId, @Param("status") Long theIndexStatus);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IndexingSupport.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IndexingSupport.java
index 26a7163758a..4c121819f4c 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IndexingSupport.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IndexingSupport.java
@@ -20,11 +20,15 @@ package ca.uhn.fhir.jpa.dao.index;
* #L%
*/
+import java.util.Collection;
import java.util.Map;
import java.util.Set;
import javax.persistence.EntityManager;
+import ca.uhn.fhir.jpa.entity.BaseHasResource;
+import ca.uhn.fhir.jpa.entity.IBaseResourceEntity;
+import ca.uhn.fhir.jpa.entity.ResourceTag;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
@@ -50,4 +54,5 @@ public interface IndexingSupport {
public Long translateForcedIdToPid(String theResourceName, String theResourceId);
public String toResourceName(Class extends IBaseResource> theResourceType);
public IResourceIndexedCompositeStringUniqueDao getResourceIndexedCompositeStringUniqueDao();
+
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceReindexJobEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceReindexJobEntity.java
new file mode 100644
index 00000000000..cefbd92388b
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceReindexJobEntity.java
@@ -0,0 +1,113 @@
+package ca.uhn.fhir.jpa.entity;
+
+/*
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2018 University Health Network
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+import com.google.common.annotations.VisibleForTesting;
+
+import javax.persistence.*;
+import java.io.Serializable;
+import java.util.Date;
+
+@Entity
+@Table(name = "HFJ_RES_REINDEX_JOB")
+public class ResourceReindexJobEntity implements Serializable {
+ @Id
+ @SequenceGenerator(name = "SEQ_RES_REINDEX_JOB", sequenceName = "SEQ_RES_REINDEX_JOB")
+ @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_RES_REINDEX_JOB")
+ @Column(name = "PID")
+ private Long myId;
+ @Column(name = "RES_TYPE", nullable = true)
+ private String myResourceType;
+ /**
+ * Inclusive
+ */
+ @Column(name = "UPDATE_THRESHOLD_HIGH", nullable = false)
+ @Temporal(TemporalType.TIMESTAMP)
+ private Date myThresholdHigh;
+ @Column(name = "JOB_DELETED", nullable = false)
+ private boolean myDeleted;
+ /**
+ * Inclusive
+ */
+ @Column(name = "UPDATE_THRESHOLD_LOW", nullable = true)
+ @Temporal(TemporalType.TIMESTAMP)
+ private Date myThresholdLow;
+ @Column(name = "SUSPENDED_UNTIL", nullable = true)
+ @Temporal(TemporalType.TIMESTAMP)
+ private Date mySuspendedUntil;
+
+ public Date getSuspendedUntil() {
+ return mySuspendedUntil;
+ }
+
+ public void setSuspendedUntil(Date theSuspendedUntil) {
+ mySuspendedUntil = theSuspendedUntil;
+ }
+
+ /**
+ * Inclusive
+ */
+ public Date getThresholdLow() {
+ return myThresholdLow;
+ }
+
+ /**
+ * Inclusive
+ */
+ public void setThresholdLow(Date theThresholdLow) {
+ myThresholdLow = theThresholdLow;
+ }
+
+ public String getResourceType() {
+ return myResourceType;
+ }
+
+ public void setResourceType(String theResourceType) {
+ myResourceType = theResourceType;
+ }
+
+ /**
+ * Inclusive
+ */
+ public Date getThresholdHigh() {
+ return myThresholdHigh;
+ }
+
+ /**
+ * Inclusive
+ */
+ public void setThresholdHigh(Date theThresholdHigh) {
+ myThresholdHigh = theThresholdHigh;
+ }
+
+ public Long getId() {
+ return myId;
+ }
+
+ @VisibleForTesting
+ public void setIdForUnitTest(long theId) {
+ myId = theId;
+ }
+
+ public void setDeleted(boolean theDeleted) {
+ myDeleted = theDeleted;
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceTable.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceTable.java
index d8a7645459b..e30a9b83438 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceTable.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceTable.java
@@ -564,9 +564,7 @@ public class ResourceTable extends BaseHasResource implements Serializable {
retVal.setPublished(getPublished());
retVal.setUpdated(getUpdated());
-// retVal.setEncoding(getEncoding());
retVal.setFhirVersion(getFhirVersion());
-// retVal.setResource(getResource());
retVal.setDeleted(getDeleted());
retVal.setForcedId(getForcedId());
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaSystemProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaSystemProvider.java
index 89483e7ccb1..889bd2099fa 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaSystemProvider.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaSystemProvider.java
@@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.provider;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.provider;
*/
import ca.uhn.fhir.jpa.dao.IFhirSystemDao;
+import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
import ca.uhn.fhir.jpa.util.ExpungeOptions;
import ca.uhn.fhir.jpa.util.ExpungeOutcome;
import ca.uhn.fhir.rest.annotation.At;
@@ -31,6 +32,7 @@ import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.DateRangeParam;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.hl7.fhir.r4.model.Parameters;
+import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Required;
import javax.servlet.http.HttpServletRequest;
@@ -42,11 +44,17 @@ public class BaseJpaSystemProvider extends BaseJpaProvider {
public static final String PERFORM_REINDEXING_PASS = "$perform-reindexing-pass";
private IFhirSystemDao myDao;
+ @Autowired
+ private IResourceReindexingSvc myResourceReindexingSvc;
public BaseJpaSystemProvider() {
// nothing
}
+ protected IResourceReindexingSvc getResourceReindexingSvc() {
+ return myResourceReindexingSvc;
+ }
+
protected Parameters doExpunge(IPrimitiveType extends Integer> theLimit, IPrimitiveType extends Boolean> theExpungeDeletedResources, IPrimitiveType extends Boolean> theExpungeOldVersions, IPrimitiveType extends Boolean> theExpungeEverything) {
ExpungeOptions options = createExpungeOptions(theLimit, theExpungeDeletedResources, theExpungeOldVersions, theExpungeEverything);
ExpungeOutcome outcome = getDao().expunge(options);
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaSystemProviderDstu2Plus.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaSystemProviderDstu2Plus.java
index 2d0f3e92876..1b6eb10678c 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaSystemProviderDstu2Plus.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaSystemProviderDstu2Plus.java
@@ -34,11 +34,11 @@ public abstract class BaseJpaSystemProviderDstu2Plus extends BaseJpaSyste
@OperationParam(name = "status")
})
public IBaseResource markAllResourcesForReindexing() {
- Integer count = getDao().markAllResourcesForReindexing();
+ getResourceReindexingSvc().markAllResourcesForReindexing();
IBaseParameters retVal = ParametersUtil.newInstance(getContext());
- IPrimitiveType> string = ParametersUtil.createString(getContext(), "Marked " + count + " resources");
+ IPrimitiveType> string = ParametersUtil.createString(getContext(), "Marked resources");
ParametersUtil.addParameterToParameters(getContext(), retVal, "status", string);
return retVal;
@@ -48,7 +48,7 @@ public abstract class BaseJpaSystemProviderDstu2Plus extends BaseJpaSyste
@OperationParam(name = "status")
})
public IBaseResource performReindexingPass() {
- Integer count = getDao().performReindexingPass(1000);
+ Integer count = getResourceReindexingSvc().runReindexingPass();
IBaseParameters retVal = ParametersUtil.newInstance(getContext());
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProvider.java
index 27177cc5c05..83e5268973f 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProvider.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProvider.java
@@ -276,8 +276,8 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
protected List toResourceList(ISearchBuilder sb, List pidsSubList) {
Set includedPids = new HashSet<>();
if (mySearchEntity.getSearchType() == SearchTypeEnum.SEARCH) {
- includedPids.addAll(sb.loadIncludes(myDao, myContext, myEntityManager, pidsSubList, mySearchEntity.toRevIncludesList(), true, mySearchEntity.getLastUpdated()));
- includedPids.addAll(sb.loadIncludes(myDao, myContext, myEntityManager, pidsSubList, mySearchEntity.toIncludesList(), false, mySearchEntity.getLastUpdated()));
+ includedPids.addAll(sb.loadIncludes(myDao, myContext, myEntityManager, pidsSubList, mySearchEntity.toRevIncludesList(), true, mySearchEntity.getLastUpdated(), myUuid));
+ includedPids.addAll(sb.loadIncludes(myDao, myContext, myEntityManager, pidsSubList, mySearchEntity.toIncludesList(), false, mySearchEntity.getLastUpdated(), myUuid));
}
// Execute the query and make sure we return distinct results
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java
index d546ecc9677..478ad89ef53 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java
@@ -313,8 +313,8 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
* individually for pages as we return them to clients
*/
final Set includedPids = new HashSet<>();
- includedPids.addAll(sb.loadIncludes(theCallingDao, myContext, myEntityManager, pids, theParams.getRevIncludes(), true, theParams.getLastUpdated()));
- includedPids.addAll(sb.loadIncludes(theCallingDao, myContext, myEntityManager, pids, theParams.getIncludes(), false, theParams.getLastUpdated()));
+ includedPids.addAll(sb.loadIncludes(theCallingDao, myContext, myEntityManager, pids, theParams.getRevIncludes(), true, theParams.getLastUpdated(), "(synchronous)"));
+ includedPids.addAll(sb.loadIncludes(theCallingDao, myContext, myEntityManager, pids, theParams.getIncludes(), false, theParams.getLastUpdated(), "(synchronous)"));
List resources = new ArrayList<>();
sb.loadResourcesByPid(pids, resources, includedPids, false, myEntityManager, myContext, theCallingDao);
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/IResourceReindexingSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/IResourceReindexingSvc.java
new file mode 100644
index 00000000000..18d7671bfdc
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/IResourceReindexingSvc.java
@@ -0,0 +1,34 @@
+package ca.uhn.fhir.jpa.search.reindex;
+
+public interface IResourceReindexingSvc {
+
+ /**
+ * Marks all indexes as needing fresh indexing
+ */
+ void markAllResourcesForReindexing();
+
+ /**
+ * Marks all indexes of the given type as needing fresh indexing
+ */
+ void markAllResourcesForReindexing(String theType);
+
+ /**
+ * Called automatically by the job scheduler
+ *
+ * @return Returns null if the system did not attempt to perform a pass because one was
+ * already proceeding. Otherwise, returns the number of resources affected.
+ */
+ Integer runReindexingPass();
+
+ /**
+ * Does the same thing as {@link #runReindexingPass()} but makes sure to perform at
+ * least one pass even if one is half finished
+ */
+ Integer forceReindexingPass();
+
+ /**
+ * Cancels all running and future reindexing jobs. This is mainly intended
+ * to be used by unit tests.
+ */
+ void cancelAndPurgeAllJobs();
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java
new file mode 100644
index 00000000000..dd09abdab37
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java
@@ -0,0 +1,450 @@
+package ca.uhn.fhir.jpa.search.reindex;
+
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.context.RuntimeResourceDefinition;
+import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
+import ca.uhn.fhir.jpa.dao.DaoConfig;
+import ca.uhn.fhir.jpa.dao.DaoRegistry;
+import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
+import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
+import ca.uhn.fhir.jpa.dao.data.IResourceReindexJobDao;
+import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
+import ca.uhn.fhir.jpa.entity.ForcedId;
+import ca.uhn.fhir.jpa.entity.ResourceReindexJobEntity;
+import ca.uhn.fhir.jpa.entity.ResourceTable;
+import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
+import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
+import ca.uhn.fhir.util.StopWatch;
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.commons.lang3.Validate;
+import org.apache.commons.lang3.concurrent.BasicThreadFactory;
+import org.apache.commons.lang3.time.DateUtils;
+import org.hibernate.search.util.impl.Executors;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.data.domain.PageRequest;
+import org.springframework.data.domain.Slice;
+import org.springframework.scheduling.annotation.Scheduled;
+import org.springframework.transaction.PlatformTransactionManager;
+import org.springframework.transaction.TransactionDefinition;
+import org.springframework.transaction.support.TransactionCallback;
+import org.springframework.transaction.support.TransactionTemplate;
+
+import javax.annotation.PostConstruct;
+import javax.persistence.EntityManager;
+import javax.persistence.PersistenceContext;
+import javax.persistence.PersistenceContextType;
+import javax.persistence.Query;
+import javax.transaction.Transactional;
+import java.util.Collection;
+import java.util.Date;
+import java.util.List;
+import java.util.concurrent.*;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.locks.ReentrantLock;
+import java.util.stream.Collectors;
+
+import static org.apache.commons.lang3.StringUtils.isBlank;
+import static org.apache.commons.lang3.StringUtils.isNotBlank;
+
+public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
+
+ private static final Date BEGINNING_OF_TIME = new Date(0);
+ private static final Logger ourLog = LoggerFactory.getLogger(ResourceReindexingSvcImpl.class);
+ private final ReentrantLock myIndexingLock = new ReentrantLock();
+ @Autowired
+ private IResourceReindexJobDao myReindexJobDao;
+ @Autowired
+ private DaoConfig myDaoConfig;
+ @Autowired
+ private PlatformTransactionManager myTxManager;
+ private TransactionTemplate myTxTemplate;
+ private ThreadFactory myReindexingThreadFactory = new BasicThreadFactory.Builder().namingPattern("ResourceReindex-%d").build();
+ private ThreadPoolExecutor myTaskExecutor;
+ @Autowired
+ private IResourceTableDao myResourceTableDao;
+ @Autowired
+ private DaoRegistry myDaoRegistry;
+ @Autowired
+ private IForcedIdDao myForcedIdDao;
+ @Autowired
+ private FhirContext myContext;
+ @PersistenceContext(type = PersistenceContextType.TRANSACTION)
+ private EntityManager myEntityManager;
+
+ @VisibleForTesting
+ void setReindexJobDaoForUnitTest(IResourceReindexJobDao theReindexJobDao) {
+ myReindexJobDao = theReindexJobDao;
+ }
+
+ @VisibleForTesting
+ void setDaoConfigForUnitTest(DaoConfig theDaoConfig) {
+ myDaoConfig = theDaoConfig;
+ }
+
+ @VisibleForTesting
+ void setTxManagerForUnitTest(PlatformTransactionManager theTxManager) {
+ myTxManager = theTxManager;
+ }
+
+ @VisibleForTesting
+ void setResourceTableDaoForUnitTest(IResourceTableDao theResourceTableDao) {
+ myResourceTableDao = theResourceTableDao;
+ }
+
+ @VisibleForTesting
+ void setDaoRegistryForUnitTest(DaoRegistry theDaoRegistry) {
+ myDaoRegistry = theDaoRegistry;
+ }
+
+ @VisibleForTesting
+ void setForcedIdDaoForUnitTest(IForcedIdDao theForcedIdDao) {
+ myForcedIdDao = theForcedIdDao;
+ }
+
+ @VisibleForTesting
+ void setContextForUnitTest(FhirContext theContext) {
+ myContext = theContext;
+ }
+
+ @PostConstruct
+ public void start() {
+ myTxTemplate = new TransactionTemplate(myTxManager);
+ initExecutor();
+ }
+
+ private void initExecutor() {
+ // Create the threadpool executor used for reindex jobs
+ int reindexThreadCount = myDaoConfig.getReindexThreadCount();
+ RejectedExecutionHandler rejectHandler = new Executors.BlockPolicy();
+ myTaskExecutor = new ThreadPoolExecutor(0, reindexThreadCount,
+ 0L, TimeUnit.MILLISECONDS,
+ new LinkedBlockingQueue<>(100),
+ myReindexingThreadFactory,
+ rejectHandler
+ );
+ }
+
+ @Override
+ @Transactional(Transactional.TxType.REQUIRED)
+ public void markAllResourcesForReindexing() {
+ markAllResourcesForReindexing(null);
+ }
+
+ @Override
+ @Transactional(Transactional.TxType.REQUIRED)
+ public void markAllResourcesForReindexing(String theType) {
+ String typeDesc;
+ if (isNotBlank(theType)) {
+ myReindexJobDao.markAllOfTypeAsDeleted(theType);
+ typeDesc = theType;
+ } else {
+ myReindexJobDao.markAllOfTypeAsDeleted();
+ typeDesc = "(any)";
+ }
+
+ ResourceReindexJobEntity job = new ResourceReindexJobEntity();
+ job.setResourceType(theType);
+ job.setThresholdHigh(DateUtils.addMinutes(new Date(), 5));
+ job = myReindexJobDao.saveAndFlush(job);
+
+ ourLog.info("Marking all resources of type {} for reindexing - Got job ID[{}]", typeDesc, job.getId());
+ }
+
+ @Override
+ @Transactional(Transactional.TxType.NEVER)
+ @Scheduled(fixedDelay = 10 * DateUtils.MILLIS_PER_SECOND)
+ public Integer runReindexingPass() {
+ if (myIndexingLock.tryLock()) {
+ try {
+ return doReindexingPassInsideLock();
+ } finally {
+ myIndexingLock.unlock();
+ }
+ }
+ return null;
+ }
+
+ private Integer doReindexingPassInsideLock() {
+ expungeJobsMarkedAsDeleted();
+ return runReindexJobs();
+ }
+
+ @Override
+ public Integer forceReindexingPass() {
+ myIndexingLock.lock();
+ try {
+ return doReindexingPassInsideLock();
+ } finally {
+ myIndexingLock.unlock();
+ }
+ }
+
+ @Override
+ public void cancelAndPurgeAllJobs() {
+ ourLog.info("Cancelling and purging all resource reindexing jobs");
+ myTxTemplate.execute(t -> {
+ myReindexJobDao.markAllOfTypeAsDeleted();
+ return null;
+ });
+
+ myTaskExecutor.shutdown();
+ initExecutor();
+
+ expungeJobsMarkedAsDeleted();
+ }
+
+ private Integer runReindexJobs() {
+ Collection jobs = myTxTemplate.execute(t -> myReindexJobDao.findAll(PageRequest.of(0, 10), false));
+ assert jobs != null;
+
+ int count = 0;
+ for (ResourceReindexJobEntity next : jobs) {
+
+ if (next.getThresholdHigh().getTime() < System.currentTimeMillis()) {
+ markJobAsDeleted(next);
+ continue;
+ }
+
+ count += runReindexJob(next);
+ }
+ return count;
+ }
+
+ private void markJobAsDeleted(ResourceReindexJobEntity next) {
+ myTxTemplate.execute(t -> {
+ myReindexJobDao.markAsDeletedById(next.getId());
+ return null;
+ });
+ }
+
+ private int runReindexJob(ResourceReindexJobEntity theJob) {
+ if (theJob.getSuspendedUntil() != null) {
+ if (theJob.getSuspendedUntil().getTime() > System.currentTimeMillis()) {
+ return 0;
+ }
+ }
+
+ ourLog.info("Performing reindex pass for JOB[{}]", theJob.getId());
+ StopWatch sw = new StopWatch();
+ AtomicInteger counter = new AtomicInteger();
+
+ // Calculate range
+ Date low = theJob.getThresholdLow() != null ? theJob.getThresholdLow() : BEGINNING_OF_TIME;
+ Date high = theJob.getThresholdHigh();
+
+ // Query for resources within threshold
+ Slice range = myTxTemplate.execute(t -> {
+ PageRequest page = PageRequest.of(0, 10000);
+ if (isNotBlank(theJob.getResourceType())) {
+ return myResourceTableDao.findIdsOfResourcesWithinUpdatedRangeOrderedFromOldest(page, theJob.getResourceType(), low, high);
+ } else {
+ return myResourceTableDao.findIdsOfResourcesWithinUpdatedRangeOrderedFromOldest(page, low, high);
+ }
+ });
+ Validate.notNull(range);
+ int count = range.getNumberOfElements();
+
+ // Submit each resource requiring reindexing
+ List> futures = range
+ .stream()
+ .map(t -> myTaskExecutor.submit(new ResourceReindexingTask(t, counter)))
+ .collect(Collectors.toList());
+
+ Date latestDate = null;
+ boolean haveMultipleDates = false;
+ for (Future next : futures) {
+ Date nextDate;
+ try {
+ nextDate = next.get();
+ } catch (Exception e) {
+ ourLog.error("Failure reindexing", e);
+ Date suspendedUntil = DateUtils.addMinutes(new Date(), 1);
+ myTxTemplate.execute(t -> {
+ myReindexJobDao.setSuspendedUntil(suspendedUntil);
+ return null;
+ });
+ return counter.get();
+ }
+
+ if (nextDate != null) {
+ if (latestDate != null) {
+ if (latestDate.getTime() != nextDate.getTime()) {
+ haveMultipleDates = true;
+ }
+ }
+ if (latestDate == null || latestDate.getTime() < nextDate.getTime()) {
+ latestDate = new Date(nextDate.getTime());
+ }
+ }
+ }
+
+ // Just in case we end up in some sort of infinite loop. This shouldn't happen, and couldn't really
+ // happen unless there were 10000 resources with the exact same update time down to the
+ // millisecond.
+ Date newLow;
+ if (latestDate == null) {
+ markJobAsDeleted(theJob);
+ return 0;
+ }
+ if (latestDate.getTime() == low.getTime()) {
+ ourLog.error("Final pass time for reindex JOB[{}] has same ending low value: {}", theJob.getId(), latestDate);
+ newLow = new Date(latestDate.getTime() + 1);
+ } else if (!haveMultipleDates) {
+ newLow = new Date(latestDate.getTime() + 1);
+ } else {
+ newLow = latestDate;
+ }
+
+ myTxTemplate.execute(t -> {
+ myReindexJobDao.setThresholdLow(theJob.getId(), newLow);
+ return null;
+ });
+
+ ourLog.info("Completed pass of reindex JOB[{}] - Indexed {} resources in {} ({} / sec) - Have indexed until: {}", theJob.getId(), count, sw.toString(), sw.formatThroughput(count, TimeUnit.SECONDS), theJob.getThresholdLow());
+ return counter.get();
+ }
+
+ private void expungeJobsMarkedAsDeleted() {
+ myTxTemplate.execute(t -> {
+ Collection toDelete = myReindexJobDao.findAll(PageRequest.of(0, 10), true);
+ toDelete.forEach(job -> {
+ ourLog.info("Purging deleted job[{}]", job.getId());
+ myReindexJobDao.deleteById(job.getId());
+ });
+ return null;
+ });
+ }
+
+ @SuppressWarnings("JpaQlInspection")
+ private void markResourceAsIndexingFailed(final long theId) {
+ TransactionTemplate txTemplate = new TransactionTemplate(myTxManager);
+ txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
+ txTemplate.execute((TransactionCallback) theStatus -> {
+ ourLog.info("Marking resource with PID {} as indexing_failed", new Object[]{theId});
+
+ myResourceTableDao.updateIndexStatus(theId, BaseHapiFhirDao.INDEX_STATUS_INDEXING_FAILED);
+
+ Query q = myEntityManager.createQuery("DELETE FROM ResourceTag t WHERE t.myResourceId = :id");
+ q.setParameter("id", theId);
+ q.executeUpdate();
+
+ q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamCoords t WHERE t.myResourcePid = :id");
+ q.setParameter("id", theId);
+ q.executeUpdate();
+
+ q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamDate t WHERE t.myResourcePid = :id");
+ q.setParameter("id", theId);
+ q.executeUpdate();
+
+ q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamNumber t WHERE t.myResourcePid = :id");
+ q.setParameter("id", theId);
+ q.executeUpdate();
+
+ q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamQuantity t WHERE t.myResourcePid = :id");
+ q.setParameter("id", theId);
+ q.executeUpdate();
+
+ q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamString t WHERE t.myResourcePid = :id");
+ q.setParameter("id", theId);
+ q.executeUpdate();
+
+ q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamToken t WHERE t.myResourcePid = :id");
+ q.setParameter("id", theId);
+ q.executeUpdate();
+
+ q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamUri t WHERE t.myResourcePid = :id");
+ q.setParameter("id", theId);
+ q.executeUpdate();
+
+ q = myEntityManager.createQuery("DELETE FROM ResourceLink t WHERE t.mySourceResourcePid = :id");
+ q.setParameter("id", theId);
+ q.executeUpdate();
+
+ q = myEntityManager.createQuery("DELETE FROM ResourceLink t WHERE t.myTargetResourcePid = :id");
+ q.setParameter("id", theId);
+ q.executeUpdate();
+
+ return null;
+ });
+ }
+
+ private class ResourceReindexingTask implements Callable {
+ private final Long myNextId;
+ private final AtomicInteger myCounter;
+ private Date myUpdated;
+
+ ResourceReindexingTask(Long theNextId, AtomicInteger theCounter) {
+ myNextId = theNextId;
+ myCounter = theCounter;
+ }
+
+
+ @SuppressWarnings("unchecked")
+ private void doReindex(ResourceTable theResourceTable, T theResource) {
+ RuntimeResourceDefinition resourceDefinition = myContext.getResourceDefinition(theResource.getClass());
+ Class resourceClass = (Class) resourceDefinition.getImplementingClass();
+ final IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceClass);
+ dao.reindex(theResource, theResourceTable);
+
+ myCounter.incrementAndGet();
+ }
+
+ @Override
+ public Date call() {
+ Throwable reindexFailure;
+ try {
+ reindexFailure = myTxTemplate.execute(t -> {
+ ResourceTable resourceTable = myResourceTableDao.findById(myNextId).orElseThrow(IllegalStateException::new);
+ myUpdated = resourceTable.getUpdatedDate();
+
+ try {
+ /*
+ * This part is because from HAPI 1.5 - 1.6 we changed the format of forced ID to be "type/id" instead of just "id"
+ */
+ ForcedId forcedId = resourceTable.getForcedId();
+ if (forcedId != null) {
+ if (isBlank(forcedId.getResourceType())) {
+ ourLog.info("Updating resource {} forcedId type to {}", forcedId.getForcedId(), resourceTable.getResourceType());
+ forcedId.setResourceType(resourceTable.getResourceType());
+ myForcedIdDao.save(forcedId);
+ }
+ }
+
+ IFhirResourceDao> dao = myDaoRegistry.getResourceDao(resourceTable.getResourceType());
+ IBaseResource resource = dao.toResource(resourceTable, false);
+ if (resource == null) {
+ throw new InternalErrorException("Could not find resource version " + resourceTable.getIdDt().toUnqualified().getValue() + " in database");
+ }
+ doReindex(resourceTable, resource);
+ return null;
+
+ } catch (Exception e) {
+ ourLog.error("Failed to index resource {}: {}", resourceTable.getIdDt(), e.toString(), e);
+ t.setRollbackOnly();
+ return e;
+ }
+ });
+ } catch (ResourceVersionConflictException e) {
+ /*
+ * We reindex in multiple threads, so it's technically possible that two threads try
+ * to index resources that cause a constraint error now (i.e. because a unique index has been
+ * added that didn't previously exist). In this case, one of the threads would succeed and
+ * not get this error, so we'll let the other one fail and try
+ * again later.
+ */
+ ourLog.info("Failed to reindex {} because of a version conflict. Leaving in unindexed state: {}", e.getMessage());
+ reindexFailure = null;
+ }
+
+ if (reindexFailure != null) {
+ ourLog.info("Setting resource PID[{}] status to ERRORED", myNextId);
+ markResourceAsIndexingFailed(myNextId);
+ }
+
+ return myUpdated;
+ }
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/IReindexController.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/IReindexController.java
deleted file mode 100644
index b9935057aef..00000000000
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/IReindexController.java
+++ /dev/null
@@ -1,34 +0,0 @@
-package ca.uhn.fhir.jpa.util;
-
-/*-
- * #%L
- * HAPI FHIR JPA Server
- * %%
- * Copyright (C) 2014 - 2018 University Health Network
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-public interface IReindexController {
-
- /**
- * This method is called automatically by the scheduler
- */
- void performReindexingPass();
-
- /**
- * This method requests that the reindex process happen as soon as possible
- */
- void requestReindex();
-}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/ReindexController.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/ReindexController.java
deleted file mode 100644
index 1ed136f8791..00000000000
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/ReindexController.java
+++ /dev/null
@@ -1,119 +0,0 @@
-package ca.uhn.fhir.jpa.util;
-
-/*-
- * #%L
- * HAPI FHIR JPA Server
- * %%
- * Copyright (C) 2014 - 2018 University Health Network
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import ca.uhn.fhir.jpa.dao.DaoConfig;
-import ca.uhn.fhir.jpa.dao.IFhirSystemDao;
-import org.apache.commons.lang3.time.DateUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.scheduling.annotation.Scheduled;
-import org.springframework.transaction.annotation.Propagation;
-import org.springframework.transaction.annotation.Transactional;
-
-import java.util.concurrent.Semaphore;
-
-public class ReindexController implements IReindexController {
-
- private static final Logger ourLog = LoggerFactory.getLogger(ReindexController.class);
- private final Semaphore myReindexingLock = new Semaphore(1);
- @Autowired
- private DaoConfig myDaoConfig;
- @Autowired
- private IFhirSystemDao, ?> mySystemDao;
- private Long myDontReindexUntil;
-
- /**
- * This method is called once per minute to perform any required re-indexing.
- *
- * If nothing if found that requires reindexing, the query will not fire again for
- * a longer amount of time.
- *
- * During most passes this will just check and find that there are no resources
- * requiring re-indexing. In that case the method just returns immediately.
- * If the search finds that some resources require reindexing, the system will
- * do a bunch of reindexing and then return.
- */
- @Scheduled(fixedDelay = DateUtils.MILLIS_PER_MINUTE)
- @Transactional(propagation = Propagation.NEVER)
- @Override
- public void performReindexingPass() {
- if (myDaoConfig.isSchedulingDisabled() || myDaoConfig.isStatusBasedReindexingDisabled()) {
- return;
- }
-
- synchronized (this) {
- if (myDontReindexUntil != null && myDontReindexUntil > System.currentTimeMillis()) {
- return;
- }
- }
-
- if (!myReindexingLock.tryAcquire()) {
- ourLog.trace("Not going to reindex in parallel threads");
- return;
- }
- Integer count;
- try {
- count = mySystemDao.performReindexingPass(100);
-
- for (int i = 0; i < 50 && count != null && count != 0; i++) {
- count = mySystemDao.performReindexingPass(100);
- try {
- Thread.sleep(DateUtils.MILLIS_PER_SECOND);
- } catch (InterruptedException e) {
- break;
- }
- }
- } catch (Exception e) {
- ourLog.error("Failure during reindex", e);
- count = -1;
- } finally {
- myReindexingLock.release();
- }
-
- synchronized (this) {
- if (count == null) {
- ourLog.info("Reindex pass complete, no remaining resource to index");
- myDontReindexUntil = System.currentTimeMillis() + DateUtils.MILLIS_PER_HOUR;
- } else if (count == -1) {
- // Reindexing failed
- myDontReindexUntil = System.currentTimeMillis() + DateUtils.MILLIS_PER_HOUR;
- } else {
- ourLog.info("Reindex pass complete, {} remaining resource to index", count);
- myDontReindexUntil = null;
- }
- }
-
- }
-
- /**
- * Calling this will cause a reindex loop to be triggered sooner that it would otherwise
- */
- @Override
- public void requestReindex() {
- synchronized (this) {
- myDontReindexUntil = null;
- }
- }
-
-
-}
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java
index 1d86d6e2070..2b93bbb04bf 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java
@@ -4,7 +4,6 @@ import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
import ca.uhn.fhir.validation.ResultSeverityEnum;
import net.ttddyy.dsproxy.listener.SingleQueryCountHolder;
-import net.ttddyy.dsproxy.listener.ThreadQueryCountHolder;
import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel;
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
import org.apache.commons.dbcp2.BasicDataSource;
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java
index 5e7d3cc046c..518aa6bd195 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java
@@ -6,6 +6,7 @@ import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.search.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
+import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
import ca.uhn.fhir.jpa.term.VersionIndependentConcept;
import ca.uhn.fhir.jpa.util.ExpungeOptions;
@@ -33,7 +34,9 @@ import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.*;
-import org.mockito.Mockito;
+import org.mockito.Answers;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.transaction.PlatformTransactionManager;
@@ -75,6 +78,7 @@ public abstract class BaseJpaTest {
@Rule
public LoggingRule myLoggingRule = new LoggingRule();
+ @Mock(answer = Answers.RETURNS_DEEP_STUBS)
protected ServletRequestDetails mySrd;
protected ArrayList myServerInterceptorList;
protected IRequestOperationCallback myRequestOperationCallback = mock(IRequestOperationCallback.class);
@@ -89,7 +93,7 @@ public abstract class BaseJpaTest {
@After
public void afterValidateNoTransaction() {
PlatformTransactionManager txManager = getTxManager();
- if (txManager != null) {
+ if (txManager instanceof JpaTransactionManager) {
JpaTransactionManager hibernateTxManager = (JpaTransactionManager) txManager;
SessionFactory sessionFactory = (SessionFactory) hibernateTxManager.getEntityManagerFactory();
AtomicBoolean isReadOnly = new AtomicBoolean();
@@ -114,8 +118,9 @@ public abstract class BaseJpaTest {
}
@Before
- public void beforeCreateSrd() {
- mySrd = mock(ServletRequestDetails.class, Mockito.RETURNS_DEEP_STUBS);
+ public void beforeInitMocks() {
+ MockitoAnnotations.initMocks(this);
+
when(mySrd.getRequestOperationCallback()).thenReturn(myRequestOperationCallback);
myServerInterceptorList = new ArrayList<>();
when(mySrd.getServer().getInterceptors()).thenReturn(myServerInterceptorList);
@@ -355,8 +360,9 @@ public abstract class BaseJpaTest {
return bundleStr;
}
- public static void purgeDatabase(DaoConfig theDaoConfig, IFhirSystemDao, ?> theSystemDao, ISearchParamPresenceSvc theSearchParamPresenceSvc, ISearchCoordinatorSvc theSearchCoordinatorSvc, ISearchParamRegistry theSearchParamRegistry) {
+ public static void purgeDatabase(DaoConfig theDaoConfig, IFhirSystemDao, ?> theSystemDao, IResourceReindexingSvc theResourceReindexingSvc, ISearchCoordinatorSvc theSearchCoordinatorSvc, ISearchParamRegistry theSearchParamRegistry) {
theSearchCoordinatorSvc.cancelAllActiveSearches();
+ theResourceReindexingSvc.cancelAndPurgeAllJobs();
boolean expungeEnabled = theDaoConfig.isExpungeEnabled();
theDaoConfig.setExpungeEnabled(true);
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java
index a1b45f8c4b5..b50ddc44e98 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java
@@ -12,6 +12,7 @@ import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.provider.JpaSystemProviderDstu2;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.search.ISearchCoordinatorSvc;
+import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
import ca.uhn.fhir.jpa.util.ResourceCountCache;
import ca.uhn.fhir.model.dstu2.composite.CodeableConceptDt;
@@ -42,7 +43,7 @@ import javax.persistence.EntityManager;
import java.io.IOException;
import java.io.InputStream;
-import static org.junit.Assert.*;
+import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
@RunWith(SpringJUnit4ClassRunner.class)
@@ -56,6 +57,8 @@ public abstract class BaseJpaDstu2Test extends BaseJpaTest {
@Autowired
protected ApplicationContext myAppCtx;
@Autowired
+ protected IResourceReindexingSvc myResourceReindexingSvc;
+ @Autowired
@Qualifier("myAppointmentDaoDstu2")
protected IFhirResourceDao myAppointmentDao;
@Autowired
@@ -197,7 +200,7 @@ public abstract class BaseJpaDstu2Test extends BaseJpaTest {
@Before
@Transactional()
public void beforePurgeDatabase() throws InterruptedException {
- purgeDatabase(myDaoConfig, mySystemDao, mySearchParamPresenceSvc, mySearchCoordinatorSvc, mySearchParamRegistry);
+ purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry);
}
@Before
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2InterceptorTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2InterceptorTest.java
index 3f904baf7b8..c9626785a91 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2InterceptorTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2InterceptorTest.java
@@ -23,7 +23,7 @@ import org.mockito.stubbing.Answer;
import static org.hamcrest.Matchers.containsString;
import static org.junit.Assert.*;
-import static org.mockito.Matchers.any;
+import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.*;
public class FhirResourceDaoDstu2InterceptorTest extends BaseJpaDstu2Test {
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2SearchCustomSearchParamTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2SearchCustomSearchParamTest.java
index 152c8edd74a..ecddb19c6f2 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2SearchCustomSearchParamTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2SearchCustomSearchParamTest.java
@@ -988,7 +988,9 @@ public class FhirResourceDaoDstu2SearchCustomSearchParamTest extends BaseJpaDstu
mySearchParameterDao.delete(spId, mySrd);
mySearchParamRegsitry.forceRefresh();
- mySystemDao.performReindexingPass(100);
+ myResourceReindexingSvc.markAllResourcesForReindexing();
+ myResourceReindexingSvc.forceReindexingPass();
+ myResourceReindexingSvc.forceReindexingPass();
// Try with custom gender SP
map = new SearchParameterMap();
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2Test.java
index 72d2776a1df..c979ebc840a 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2Test.java
@@ -3,7 +3,7 @@ package ca.uhn.fhir.jpa.dao.dstu2;
import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
-import static org.mockito.Matchers.eq;
+import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.*;
import java.io.IOException;
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java
index 9737d3593fb..49c587bca56 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java
@@ -12,6 +12,7 @@ import ca.uhn.fhir.jpa.provider.dstu3.JpaSystemProviderDstu3;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.search.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.search.IStaleSearchDeletingSvc;
+import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
import ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl;
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
@@ -67,6 +68,10 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest {
@Qualifier("myResourceCountsCache")
protected ResourceCountCache myResourceCountsCache;
@Autowired
+ protected IResourceReindexingSvc myResourceReindexingSvc;
+ @Autowired
+ protected IResourceReindexJobDao myResourceReindexJobDao;
+ @Autowired
@Qualifier("myCoverageDaoDstu3")
protected IFhirResourceDao myCoverageDao;
@Autowired
@@ -294,8 +299,8 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest {
@Before
@Transactional()
- public void beforePurgeDatabase() throws InterruptedException {
- purgeDatabase(myDaoConfig, mySystemDao, mySearchParamPresenceSvc, mySearchCoordinatorSvc, mySearchParamRegsitry);
+ public void beforePurgeDatabase() {
+ purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegsitry);
}
@Before
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3CodeSystemTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3CodeSystemTest.java
index 36dd96b73d0..1c84d1a3868 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3CodeSystemTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3CodeSystemTest.java
@@ -30,10 +30,9 @@ public class FhirResourceDaoDstu3CodeSystemTest extends BaseJpaDstu3Test {
CodeSystem cs = myFhirCtx.newJsonParser().parseResource(CodeSystem.class, input);
myCodeSystemDao.create(cs, mySrd);
-
- mySystemDao.markAllResourcesForReindexing();
- int outcome = mySystemDao.performReindexingPass(100);
+ myResourceReindexingSvc.markAllResourcesForReindexing();
+ int outcome= myResourceReindexingSvc.forceReindexingPass();
assertNotEquals(-1, outcome); // -1 means there was a failure
myTermSvc.saveDeferred();
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3InterceptorTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3InterceptorTest.java
index 170c6a40dc8..69f72cf4004 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3InterceptorTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3InterceptorTest.java
@@ -24,7 +24,7 @@ import org.mockito.stubbing.Answer;
import static org.hamcrest.Matchers.containsString;
import static org.junit.Assert.*;
-import static org.mockito.Matchers.any;
+import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.*;
public class FhirResourceDaoDstu3InterceptorTest extends BaseJpaDstu3Test {
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3SearchCustomSearchParamTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3SearchCustomSearchParamTest.java
index 7eb293d9418..995189d6b33 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3SearchCustomSearchParamTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3SearchCustomSearchParamTest.java
@@ -994,7 +994,8 @@ public class FhirResourceDaoDstu3SearchCustomSearchParamTest extends BaseJpaDstu
mySearchParameterDao.delete(spId, mySrd);
mySearchParamRegsitry.forceRefresh();
- mySystemDao.performReindexingPass(100);
+ myResourceReindexingSvc.forceReindexingPass();
+ myResourceReindexingSvc.forceReindexingPass();
// Try with custom gender SP
map = new SearchParameterMap();
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3SearchWithLuceneDisabledTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3SearchWithLuceneDisabledTest.java
index f1635fbdb0c..0580a74b70a 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3SearchWithLuceneDisabledTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3SearchWithLuceneDisabledTest.java
@@ -5,6 +5,7 @@ import ca.uhn.fhir.jpa.config.TestDstu3WithoutLuceneConfig;
import ca.uhn.fhir.jpa.dao.*;
import ca.uhn.fhir.jpa.provider.dstu3.JpaSystemProviderDstu3;
import ca.uhn.fhir.jpa.search.ISearchCoordinatorSvc;
+import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
@@ -144,11 +145,13 @@ public class FhirResourceDaoDstu3SearchWithLuceneDisabledTest extends BaseJpaTes
@Autowired
@Qualifier("myJpaValidationSupportChainDstu3")
private IValidationSupport myValidationSupport;
+ @Autowired
+ private IResourceReindexingSvc myResourceReindexingSvc;
@Before
public void beforePurgeDatabase() {
runInTransaction(() -> {
- purgeDatabase(myDaoConfig, mySystemDao, mySearchParamPresenceSvc, mySearchCoordinatorSvc, mySearchParamRegistry);
+ purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry);
});
}
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3TerminologyTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3TerminologyTest.java
index 1355c5ded91..aa5031b91c5 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3TerminologyTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3TerminologyTest.java
@@ -487,10 +487,10 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test {
createExternalCsAndLocalVs();
- mySystemDao.markAllResourcesForReindexing();
+ myResourceReindexingSvc.markAllResourcesForReindexing();
+ myResourceReindexingSvc.forceReindexingPass();
+ myResourceReindexingSvc.forceReindexingPass();
- mySystemDao.performReindexingPass(100);
- mySystemDao.performReindexingPass(100);
myHapiTerminologySvc.saveDeferred();
myHapiTerminologySvc.saveDeferred();
myHapiTerminologySvc.saveDeferred();
@@ -729,17 +729,17 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test {
include.setSystem(URL_MY_CODE_SYSTEM);
include.addConcept().setCode("ZZZZ");
- mySystemDao.markAllResourcesForReindexing();
- mySystemDao.performReindexingPass(null);
+ myResourceReindexingSvc.markAllResourcesForReindexing();
+ myResourceReindexingSvc.forceReindexingPass();
+ myResourceReindexingSvc.forceReindexingPass();
myTermSvc.saveDeferred();
- mySystemDao.performReindexingPass(null);
myTermSvc.saveDeferred();
// Again
- mySystemDao.markAllResourcesForReindexing();
- mySystemDao.performReindexingPass(null);
+ myResourceReindexingSvc.markAllResourcesForReindexing();
+ myResourceReindexingSvc.forceReindexingPass();
+ myResourceReindexingSvc.forceReindexingPass();
myTermSvc.saveDeferred();
- mySystemDao.performReindexingPass(null);
myTermSvc.saveDeferred();
}
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3Test.java
index 7bfd175aec8..f1851844cff 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3Test.java
@@ -44,7 +44,7 @@ import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
-import static org.mockito.Matchers.eq;
+import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.*;
@SuppressWarnings({"unchecked", "deprecation"})
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3UniqueSearchParamTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3UniqueSearchParamTest.java
index be99f609aa3..bab9da7fd53 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3UniqueSearchParamTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3UniqueSearchParamTest.java
@@ -222,8 +222,9 @@ public class FhirResourceDaoDstu3UniqueSearchParamTest extends BaseJpaDstu3Test
createUniqueIndexCoverageBeneficiary();
- mySystemDao.markAllResourcesForReindexing();
- mySystemDao.performReindexingPass(1000);
+ myResourceReindexingSvc.markAllResourcesForReindexing();
+ myResourceReindexingSvc.forceReindexingPass();
+ myResourceReindexingSvc.forceReindexingPass();
List uniques = myResourceIndexedCompositeStringUniqueDao.findAll();
assertEquals(uniques.toString(), 1, uniques.size());
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java
index bbfc58077fd..60303b57138 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java
@@ -11,6 +11,7 @@ import ca.uhn.fhir.jpa.provider.r4.JpaSystemProviderR4;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.search.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.search.IStaleSearchDeletingSvc;
+import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
import ca.uhn.fhir.jpa.search.warm.ICacheWarmingSvc;
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
import ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl;
@@ -213,6 +214,8 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
@Autowired
protected ISearchIncludeDao mySearchIncludeDao;
@Autowired
+ protected IResourceReindexJobDao myResourceReindexJobDao;
+ @Autowired
@Qualifier("mySearchParameterDaoR4")
protected IFhirResourceDao mySearchParameterDao;
@Autowired
@@ -237,6 +240,8 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
@Qualifier("mySystemDaoR4")
protected IFhirSystemDao mySystemDao;
@Autowired
+ protected IResourceReindexingSvc myResourceReindexingSvc;
+ @Autowired
@Qualifier("mySystemProviderR4")
protected JpaSystemProviderR4 mySystemProvider;
@Autowired
@@ -314,7 +319,7 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
@Transactional()
public void beforePurgeDatabase() throws InterruptedException {
final EntityManager entityManager = this.myEntityManager;
- purgeDatabase(myDaoConfig, mySystemDao, mySearchParamPresenceSvc, mySearchCoordinatorSvc, mySearchParamRegsitry);
+ purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegsitry);
}
@Before
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CodeSystemTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CodeSystemTest.java
index f334e53d7ac..085decf3de3 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CodeSystemTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CodeSystemTest.java
@@ -30,10 +30,8 @@ public class FhirResourceDaoR4CodeSystemTest extends BaseJpaR4Test {
CodeSystem cs = myFhirCtx.newJsonParser().parseResource(CodeSystem.class, input);
myCodeSystemDao.create(cs, mySrd);
-
- mySystemDao.markAllResourcesForReindexing();
-
- int outcome = mySystemDao.performReindexingPass(100);
+ myResourceReindexingSvc.markAllResourcesForReindexing();
+ int outcome = myResourceReindexingSvc.runReindexingPass();
assertNotEquals(-1, outcome); // -1 means there was a failure
myTermSvc.saveDeferred();
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CreateTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CreateTest.java
index 4006322d6bc..5abc521dcb6 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CreateTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CreateTest.java
@@ -5,10 +5,7 @@ import ca.uhn.fhir.jpa.dao.SearchParameterMap;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.util.TestUtil;
import org.hl7.fhir.instance.model.api.IIdType;
-import org.hl7.fhir.r4.model.Bundle;
-import org.hl7.fhir.r4.model.IdType;
-import org.hl7.fhir.r4.model.Organization;
-import org.hl7.fhir.r4.model.Patient;
+import org.hl7.fhir.r4.model.*;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Test;
@@ -83,6 +80,8 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
Patient p = new Patient();
p.setId(IdType.newRandomUuid());
p.addName().setFamily("FAM");
+ p.setActive(true);
+ p.setBirthDateElement(new DateType("2011-01-01"));
p.getManagingOrganization().setReference(org.getId());
Bundle input = new Bundle();
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchCustomSearchParamTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchCustomSearchParamTest.java
index 6c988c367e7..07a9514c186 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchCustomSearchParamTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchCustomSearchParamTest.java
@@ -146,8 +146,8 @@ public class FhirResourceDaoR4SearchCustomSearchParamTest extends BaseJpaR4Test
mySearchParameterDao.create(fooSp, mySrd);
- assertEquals(1, mySystemDao.performReindexingPass(100).intValue());
- assertEquals(0, mySystemDao.performReindexingPass(100).intValue());
+ assertEquals(1, myResourceReindexingSvc.forceReindexingPass().intValue());
+ assertEquals(0, myResourceReindexingSvc.forceReindexingPass().intValue());
}
@@ -1171,7 +1171,7 @@ public class FhirResourceDaoR4SearchCustomSearchParamTest extends BaseJpaR4Test
mySearchParameterDao.delete(spId, mySrd);
mySearchParamRegsitry.forceRefresh();
- mySystemDao.performReindexingPass(100);
+ myResourceReindexingSvc.forceReindexingPass();
// Try with custom gender SP
map = new SearchParameterMap();
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithLuceneDisabledTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithLuceneDisabledTest.java
index 650d4f90b11..dedc619d5b2 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithLuceneDisabledTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithLuceneDisabledTest.java
@@ -4,6 +4,7 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.config.TestR4WithoutLuceneConfig;
import ca.uhn.fhir.jpa.dao.*;
import ca.uhn.fhir.jpa.search.ISearchCoordinatorSvc;
+import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
@@ -89,11 +90,13 @@ public class FhirResourceDaoR4SearchWithLuceneDisabledTest extends BaseJpaTest {
private IValidationSupport myValidationSupport;
@Autowired
private IFhirSystemDao mySystemDao;
+ @Autowired
+ private IResourceReindexingSvc myResourceReindexingSvc;
@Before
@Transactional()
public void beforePurgeDatabase() {
- purgeDatabase(myDaoConfig, mySystemDao, mySearchParamPresenceSvc, mySearchCoordinatorSvc, mySearchParamRegistry);
+ purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry);
}
@Before
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyTest.java
index c0593617ad9..cd134eeb08c 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyTest.java
@@ -539,10 +539,9 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test {
createExternalCsAndLocalVs();
- mySystemDao.markAllResourcesForReindexing();
-
- mySystemDao.performReindexingPass(100);
- mySystemDao.performReindexingPass(100);
+ myResourceReindexingSvc.markAllResourcesForReindexing();
+ myResourceReindexingSvc.forceReindexingPass();
+ myResourceReindexingSvc.forceReindexingPass();
myHapiTerminologySvc.saveDeferred();
myHapiTerminologySvc.saveDeferred();
myHapiTerminologySvc.saveDeferred();
@@ -851,17 +850,17 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test {
include.setSystem(URL_MY_CODE_SYSTEM);
include.addConcept().setCode("ZZZZ");
- mySystemDao.markAllResourcesForReindexing();
- mySystemDao.performReindexingPass(null);
+ myResourceReindexingSvc.markAllResourcesForReindexing();
+ myResourceReindexingSvc.forceReindexingPass();
+ myResourceReindexingSvc.forceReindexingPass();
myTermSvc.saveDeferred();
- mySystemDao.performReindexingPass(null);
myTermSvc.saveDeferred();
// Again
- mySystemDao.markAllResourcesForReindexing();
- mySystemDao.performReindexingPass(null);
+ myResourceReindexingSvc.markAllResourcesForReindexing();
+ myResourceReindexingSvc.forceReindexingPass();
+ myResourceReindexingSvc.forceReindexingPass();
myTermSvc.saveDeferred();
- mySystemDao.performReindexingPass(null);
myTermSvc.saveDeferred();
}
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java
index 2b94014d187..e6f1b19f35f 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java
@@ -53,7 +53,7 @@ import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
-import static org.mockito.Matchers.eq;
+import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.*;
@SuppressWarnings({"unchecked", "deprecation", "Duplicates"})
@@ -162,6 +162,9 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
runInTransaction(() -> {
assertThat(myResourceIndexedSearchParamTokenDao.countForResourceId(id1.getIdPartAsLong()), greaterThan(0));
+ Optional tableOpt = myResourceTableDao.findById(id1.getIdPartAsLong());
+ assertTrue(tableOpt.isPresent());
+ assertEquals(BaseHapiFhirDao.INDEX_STATUS_INDEXED, tableOpt.get().getIndexStatus().longValue());
});
runInTransaction(() -> {
@@ -170,10 +173,16 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
ResourceTable table = tableOpt.get();
table.setIndexStatus(null);
table.setDeleted(new Date());
+ table = myResourceTableDao.saveAndFlush(table);
+ ResourceHistoryTable newHistory = table.toHistory();
+ ResourceHistoryTable currentHistory = myResourceHistoryTableDao.findForIdAndVersion(table.getId(), 1L);
+ newHistory.setEncoding(currentHistory.getEncoding());
+ newHistory.setResource(currentHistory.getResource());
+ myResourceHistoryTableDao.save(newHistory);
});
- mySystemDao.performReindexingPass(1000);
- mySystemDao.performReindexingPass(1000);
+ myResourceReindexingSvc.markAllResourcesForReindexing();
+ myResourceReindexingSvc.runReindexingPass();
runInTransaction(() -> {
Optional tableOpt = myResourceTableDao.findById(id1.getIdPartAsLong());
@@ -185,6 +194,48 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
}
+ @Test
+ public void testMissingVersionsAreReindexed() {
+ myDaoConfig.setSchedulingDisabled(true);
+
+ Patient pt1 = new Patient();
+ pt1.setActive(true);
+ pt1.addName().setFamily("FAM");
+ IIdType id1 = myPatientDao.create(pt1).getId().toUnqualifiedVersionless();
+
+ runInTransaction(() -> {
+ assertThat(myResourceIndexedSearchParamTokenDao.countForResourceId(id1.getIdPartAsLong()), greaterThan(0));
+ Optional tableOpt = myResourceTableDao.findById(id1.getIdPartAsLong());
+ assertTrue(tableOpt.isPresent());
+ assertEquals(BaseHapiFhirDao.INDEX_STATUS_INDEXED, tableOpt.get().getIndexStatus().longValue());
+ });
+
+ /*
+ * This triggers a new version in the HFJ_RESOURCE table, but
+ * we do not create the corresponding entry in the HFJ_RES_VER
+ * table.
+ */
+ runInTransaction(() -> {
+ Optional tableOpt = myResourceTableDao.findById(id1.getIdPartAsLong());
+ assertTrue(tableOpt.isPresent());
+ ResourceTable table = tableOpt.get();
+ table.setIndexStatus(null);
+ table.setDeleted(new Date());
+ myResourceTableDao.saveAndFlush(table);
+ });
+
+ myResourceReindexingSvc.markAllResourcesForReindexing();
+ myResourceReindexingSvc.runReindexingPass();
+
+ runInTransaction(() -> {
+ Optional tableOpt = myResourceTableDao.findById(id1.getIdPartAsLong());
+ assertTrue(tableOpt.isPresent());
+ assertEquals(BaseHapiFhirDao.INDEX_STATUS_INDEXING_FAILED, tableOpt.get().getIndexStatus().longValue());
+ assertThat(myResourceIndexedSearchParamTokenDao.countForResourceId(id1.getIdPartAsLong()), not(greaterThan(0)));
+ });
+
+
+ }
@Test
public void testCantSearchForDeletedResourceByLanguageOrTag() {
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UniqueSearchParamTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UniqueSearchParamTest.java
index bd063d48975..96540b89d92 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UniqueSearchParamTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UniqueSearchParamTest.java
@@ -449,9 +449,9 @@ public class FhirResourceDaoR4UniqueSearchParamTest extends BaseJpaR4Test {
createUniqueObservationSubjectDateCode();
- mySystemDao.markAllResourcesForReindexing();
- mySystemDao.performReindexingPass(1000);
- mySystemDao.performReindexingPass(1000);
+ myResourceReindexingSvc.markAllResourcesForReindexing();
+ myResourceReindexingSvc.forceReindexingPass();
+ myResourceReindexingSvc.forceReindexingPass();
List uniques = myResourceIndexedCompositeStringUniqueDao.findAll();
assertEquals(uniques.toString(), 1, uniques.size());
@@ -462,9 +462,9 @@ public class FhirResourceDaoR4UniqueSearchParamTest extends BaseJpaR4Test {
assertEquals(1, mySearchParamRegsitry.getActiveUniqueSearchParams("Observation").size());
- assertEquals(7, mySystemDao.markAllResourcesForReindexing());
- mySystemDao.performReindexingPass(1000);
- mySystemDao.performReindexingPass(1000);
+ myResourceReindexingSvc.markAllResourcesForReindexing();
+ myResourceReindexingSvc.forceReindexingPass();
+ myResourceReindexingSvc.forceReindexingPass();
uniques = myResourceIndexedCompositeStringUniqueDao.findAll();
assertEquals(uniques.toString(), 1, uniques.size());
@@ -557,8 +557,9 @@ public class FhirResourceDaoR4UniqueSearchParamTest extends BaseJpaR4Test {
createUniqueIndexCoverageBeneficiary();
- mySystemDao.markAllResourcesForReindexing();
- mySystemDao.performReindexingPass(1000);
+ myResourceReindexingSvc.markAllResourcesForReindexing();
+ myResourceReindexingSvc.forceReindexingPass();
+ myResourceReindexingSvc.forceReindexingPass();
List uniques = myResourceIndexedCompositeStringUniqueDao.findAll();
assertEquals(uniques.toString(), 1, uniques.size());
@@ -1119,8 +1120,9 @@ public class FhirResourceDaoR4UniqueSearchParamTest extends BaseJpaR4Test {
pt2.setActive(false);
myPatientDao.create(pt1).getId().toUnqualifiedVersionless();
- mySystemDao.markAllResourcesForReindexing();
- mySystemDao.performReindexingPass(1000);
+ myResourceReindexingSvc.markAllResourcesForReindexing();
+ myResourceReindexingSvc.forceReindexingPass();
+ myResourceReindexingSvc.forceReindexingPass();
List uniques = myResourceIndexedCompositeStringUniqueDao.findAll();
assertEquals(uniques.toString(), 1, uniques.size());
@@ -1129,8 +1131,9 @@ public class FhirResourceDaoR4UniqueSearchParamTest extends BaseJpaR4Test {
myResourceIndexedCompositeStringUniqueDao.deleteAll();
- mySystemDao.markAllResourcesForReindexing();
- mySystemDao.performReindexingPass(1000);
+ myResourceReindexingSvc.markAllResourcesForReindexing();
+ myResourceReindexingSvc.forceReindexingPass();
+ myResourceReindexingSvc.forceReindexingPass();
uniques = myResourceIndexedCompositeStringUniqueDao.findAll();
assertEquals(uniques.toString(), 1, uniques.size());
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UpdateTagSnapshotTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UpdateTagSnapshotTest.java
index 9f727ea1aa4..1d3f9e7f381 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UpdateTagSnapshotTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UpdateTagSnapshotTest.java
@@ -10,7 +10,7 @@ import org.junit.AfterClass;
import org.junit.Test;
import static org.junit.Assert.*;
-import static org.mockito.Matchers.eq;
+import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.when;
public class FhirResourceDaoR4UpdateTagSnapshotTest extends BaseJpaR4Test {
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UpdateTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UpdateTest.java
index 728a75f6950..267eb4aa5ec 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UpdateTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UpdateTest.java
@@ -1,20 +1,5 @@
package ca.uhn.fhir.jpa.dao.r4;
-import static org.hamcrest.Matchers.*;
-import static org.junit.Assert.*;
-import static org.mockito.Matchers.eq;
-import static org.mockito.Mockito.reset;
-import static org.mockito.Mockito.verify;
-
-import java.util.*;
-
-import net.ttddyy.dsproxy.QueryCountHolder;
-import org.hl7.fhir.r4.model.*;
-import org.hl7.fhir.instance.model.api.IBaseResource;
-import org.hl7.fhir.instance.model.api.IIdType;
-import org.junit.*;
-import org.mockito.ArgumentCaptor;
-
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
import ca.uhn.fhir.model.primitive.InstantDt;
@@ -22,11 +7,30 @@ import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.StringParam;
-import ca.uhn.fhir.rest.server.exceptions.*;
+import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
+import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
+import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
+import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
import ca.uhn.fhir.util.TestUtil;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+import org.hl7.fhir.instance.model.api.IIdType;
+import org.hl7.fhir.r4.model.*;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.mockito.ArgumentCaptor;
import org.springframework.test.context.TestPropertySource;
+import java.util.*;
+
+import static org.hamcrest.Matchers.*;
+import static org.junit.Assert.*;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.reset;
+import static org.mockito.Mockito.verify;
+
@TestPropertySource(properties = {
"scheduling_disabled=true"
})
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java
index 5f9770f98b1..4f7378285f9 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java
@@ -51,11 +51,6 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirSystemDaoR4Test.class);
- @AfterClass
- public static void afterClassClearContext() {
- TestUtil.clearAllStaticFieldsForUnitTest();
- }
-
@After
public void after() {
myDaoConfig.setAllowInlineMatchUrlReferences(false);
@@ -175,7 +170,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
fail();
return null;
}
-
+
@Test
public void testTransactionReSavesPreviouslyDeletedResources() {
@@ -238,7 +233,6 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
myPatientDao.read(new IdType("Patient/pt"));
}
-
@Test
public void testResourceCounts() {
@@ -534,69 +528,43 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
vs.setUrl("http://foo");
myValueSetDao.create(vs, mySrd);
- ResourceTable entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback() {
- @Override
- public ResourceTable doInTransaction(TransactionStatus theStatus) {
- return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
- }
- });
+ ResourceTable entity = new TransactionTemplate(myTxManager).execute(t -> myEntityManager.find(ResourceTable.class, id.getIdPartAsLong()));
assertEquals(Long.valueOf(1), entity.getIndexStatus());
- mySystemDao.markAllResourcesForReindexing();
+ myResourceReindexingSvc.markAllResourcesForReindexing();
+ myResourceReindexingSvc.forceReindexingPass();
- entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback() {
- @Override
- public ResourceTable doInTransaction(TransactionStatus theStatus) {
- return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
- }
- });
- assertEquals(null, entity.getIndexStatus());
-
- mySystemDao.performReindexingPass(null);
-
- entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback() {
- @Override
- public ResourceTable doInTransaction(TransactionStatus theStatus) {
- return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
- }
- });
+ entity = new TransactionTemplate(myTxManager).execute(t -> myEntityManager.find(ResourceTable.class, id.getIdPartAsLong()));
assertEquals(Long.valueOf(1), entity.getIndexStatus());
// Just make sure this doesn't cause a choke
- mySystemDao.performReindexingPass(100000);
+ myResourceReindexingSvc.forceReindexingPass();
// Try making the resource unparseable
TransactionTemplate template = new TransactionTemplate(myTxManager);
template.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
- template.execute(new TransactionCallback() {
- @Override
- public ResourceTable doInTransaction(TransactionStatus theStatus) {
- ResourceHistoryTable resourceHistoryTable = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), id.getVersionIdPartAsLong());
- resourceHistoryTable.setEncoding(ResourceEncodingEnum.JSON);
- try {
- resourceHistoryTable.setResource("{\"resourceType\":\"FOO\"}".getBytes("UTF-8"));
- } catch (UnsupportedEncodingException e) {
- throw new Error(e);
- }
- myResourceHistoryTableDao.save(resourceHistoryTable);
-
- ResourceTable table = myResourceTableDao.findById(id.getIdPartAsLong()).orElseThrow(IllegalStateException::new);
- table.setIndexStatus(null);
- myResourceTableDao.save(table);
-
- return null;
+ template.execute((TransactionCallback) t -> {
+ ResourceHistoryTable resourceHistoryTable = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), id.getVersionIdPartAsLong());
+ resourceHistoryTable.setEncoding(ResourceEncodingEnum.JSON);
+ try {
+ resourceHistoryTable.setResource("{\"resourceType\":\"FOO\"}".getBytes("UTF-8"));
+ } catch (UnsupportedEncodingException e) {
+ throw new Error(e);
}
+ myResourceHistoryTableDao.save(resourceHistoryTable);
+
+ ResourceTable table = myResourceTableDao.findById(id.getIdPartAsLong()).orElseThrow(IllegalStateException::new);
+ table.setIndexStatus(null);
+ myResourceTableDao.save(table);
+
+ return null;
});
- mySystemDao.performReindexingPass(null);
+ myResourceReindexingSvc.markAllResourcesForReindexing();
+ myResourceReindexingSvc.forceReindexingPass();
- entity = new TransactionTemplate(myTxManager).execute(new TransactionCallback() {
- @Override
- public ResourceTable doInTransaction(TransactionStatus theStatus) {
- return myEntityManager.find(ResourceTable.class, id.getIdPartAsLong());
- }
- });
+ entity = new TransactionTemplate(myTxManager).execute(theStatus -> myEntityManager.find(ResourceTable.class, id.getIdPartAsLong()));
assertEquals(Long.valueOf(2), entity.getIndexStatus());
}
@@ -3119,6 +3087,44 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
assertEquals(1, found.size().intValue());
}
+ @Test
+ public void testTransactionWithRelativeOidIds() {
+ Bundle res = new Bundle();
+ res.setType(BundleType.TRANSACTION);
+
+ Patient p1 = new Patient();
+ p1.setId("urn:oid:0.1.2.3");
+ p1.addIdentifier().setSystem("system").setValue("testTransactionWithRelativeOidIds01");
+ res.addEntry().setResource(p1).getRequest().setMethod(HTTPVerb.POST).setUrl("Patient");
+
+ Observation o1 = new Observation();
+ o1.addIdentifier().setSystem("system").setValue("testTransactionWithRelativeOidIds02");
+ o1.setSubject(new Reference("urn:oid:0.1.2.3"));
+ res.addEntry().setResource(o1).getRequest().setMethod(HTTPVerb.POST).setUrl("Observation");
+
+ Observation o2 = new Observation();
+ o2.addIdentifier().setSystem("system").setValue("testTransactionWithRelativeOidIds03");
+ o2.setSubject(new Reference("urn:oid:0.1.2.3"));
+ res.addEntry().setResource(o2).getRequest().setMethod(HTTPVerb.POST).setUrl("Observation");
+
+ Bundle resp = mySystemDao.transaction(mySrd, res);
+
+ ourLog.info(myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(resp));
+
+ assertEquals(BundleType.TRANSACTIONRESPONSE, resp.getTypeElement().getValue());
+ assertEquals(3, resp.getEntry().size());
+
+ assertTrue(resp.getEntry().get(0).getResponse().getLocation(), new IdType(resp.getEntry().get(0).getResponse().getLocation()).getIdPart().matches("^[0-9]+$"));
+ assertTrue(resp.getEntry().get(1).getResponse().getLocation(), new IdType(resp.getEntry().get(1).getResponse().getLocation()).getIdPart().matches("^[0-9]+$"));
+ assertTrue(resp.getEntry().get(2).getResponse().getLocation(), new IdType(resp.getEntry().get(2).getResponse().getLocation()).getIdPart().matches("^[0-9]+$"));
+
+ o1 = myObservationDao.read(new IdType(resp.getEntry().get(1).getResponse().getLocation()), mySrd);
+ o2 = myObservationDao.read(new IdType(resp.getEntry().get(2).getResponse().getLocation()), mySrd);
+ assertThat(o1.getSubject().getReferenceElement().getValue(), endsWith("Patient/" + p1.getIdElement().getIdPart()));
+ assertThat(o2.getSubject().getReferenceElement().getValue(), endsWith("Patient/" + p1.getIdElement().getIdPart()));
+
+ }
+
//
//
// /**
@@ -3221,44 +3227,6 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
//
// }
- @Test
- public void testTransactionWithRelativeOidIds() {
- Bundle res = new Bundle();
- res.setType(BundleType.TRANSACTION);
-
- Patient p1 = new Patient();
- p1.setId("urn:oid:0.1.2.3");
- p1.addIdentifier().setSystem("system").setValue("testTransactionWithRelativeOidIds01");
- res.addEntry().setResource(p1).getRequest().setMethod(HTTPVerb.POST).setUrl("Patient");
-
- Observation o1 = new Observation();
- o1.addIdentifier().setSystem("system").setValue("testTransactionWithRelativeOidIds02");
- o1.setSubject(new Reference("urn:oid:0.1.2.3"));
- res.addEntry().setResource(o1).getRequest().setMethod(HTTPVerb.POST).setUrl("Observation");
-
- Observation o2 = new Observation();
- o2.addIdentifier().setSystem("system").setValue("testTransactionWithRelativeOidIds03");
- o2.setSubject(new Reference("urn:oid:0.1.2.3"));
- res.addEntry().setResource(o2).getRequest().setMethod(HTTPVerb.POST).setUrl("Observation");
-
- Bundle resp = mySystemDao.transaction(mySrd, res);
-
- ourLog.info(myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(resp));
-
- assertEquals(BundleType.TRANSACTIONRESPONSE, resp.getTypeElement().getValue());
- assertEquals(3, resp.getEntry().size());
-
- assertTrue(resp.getEntry().get(0).getResponse().getLocation(), new IdType(resp.getEntry().get(0).getResponse().getLocation()).getIdPart().matches("^[0-9]+$"));
- assertTrue(resp.getEntry().get(1).getResponse().getLocation(), new IdType(resp.getEntry().get(1).getResponse().getLocation()).getIdPart().matches("^[0-9]+$"));
- assertTrue(resp.getEntry().get(2).getResponse().getLocation(), new IdType(resp.getEntry().get(2).getResponse().getLocation()).getIdPart().matches("^[0-9]+$"));
-
- o1 = myObservationDao.read(new IdType(resp.getEntry().get(1).getResponse().getLocation()), mySrd);
- o2 = myObservationDao.read(new IdType(resp.getEntry().get(2).getResponse().getLocation()), mySrd);
- assertThat(o1.getSubject().getReferenceElement().getValue(), endsWith("Patient/" + p1.getIdElement().getIdPart()));
- assertThat(o2.getSubject().getReferenceElement().getValue(), endsWith("Patient/" + p1.getIdElement().getIdPart()));
-
- }
-
/**
* This is not the correct way to do it, but we'll allow it to be lenient
*/
@@ -3471,4 +3439,9 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
}
+ @AfterClass
+ public static void afterClassClearContext() {
+ TestUtil.clearAllStaticFieldsForUnitTest();
+ }
+
}
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderCustomSearchParamDstu3Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderCustomSearchParamDstu3Test.java
index 1730f0d8523..907792f406e 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderCustomSearchParamDstu3Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderCustomSearchParamDstu3Test.java
@@ -7,6 +7,7 @@ import static org.junit.Assert.*;
import java.io.IOException;
import java.util.*;
+import ca.uhn.fhir.jpa.entity.ResourceReindexJobEntity;
import org.apache.commons.io.IOUtils;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
@@ -236,11 +237,11 @@ public class ResourceProviderCustomSearchParamDstu3Test extends BaseResourceProv
fooSp.setStatus(org.hl7.fhir.dstu3.model.Enumerations.PublicationStatus.ACTIVE);
mySearchParameterDao.create(fooSp, mySrd);
- res = myResourceTableDao.findById(patId.getIdPartAsLong()).orElseThrow(IllegalStateException::new);
- assertEquals(null, res.getIndexStatus());
- res = myResourceTableDao.findById(obsId.getIdPartAsLong()).orElseThrow(IllegalStateException::new);
- assertEquals(BaseHapiFhirDao.INDEX_STATUS_INDEXED, res.getIndexStatus().longValue());
-
+ runInTransaction(()->{
+ List allJobs = myResourceReindexJobDao.findAll();
+ assertEquals(1, allJobs.size());
+ assertEquals("Patient", allJobs.get(0).getResourceType());
+ });
}
@Test
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderCustomSearchParamR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderCustomSearchParamR4Test.java
index 19e36cdb5a3..40e449caec9 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderCustomSearchParamR4Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderCustomSearchParamR4Test.java
@@ -7,6 +7,7 @@ import static org.junit.Assert.*;
import java.io.IOException;
import java.util.*;
+import ca.uhn.fhir.jpa.entity.ResourceReindexJobEntity;
import org.apache.commons.io.IOUtils;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
@@ -236,10 +237,11 @@ public class ResourceProviderCustomSearchParamR4Test extends BaseResourceProvide
fooSp.setStatus(org.hl7.fhir.r4.model.Enumerations.PublicationStatus.ACTIVE);
mySearchParameterDao.create(fooSp, mySrd);
- res = myResourceTableDao.findById(patId.getIdPartAsLong()).orElseThrow(IllegalStateException::new);
- assertEquals(null, res.getIndexStatus());
- res = myResourceTableDao.findById(obsId.getIdPartAsLong()).orElseThrow(IllegalStateException::new);
- assertEquals(BaseHapiFhirDao.INDEX_STATUS_INDEXED, res.getIndexStatus().longValue());
+ runInTransaction(()->{
+ List allJobs = myResourceReindexJobDao.findAll();
+ assertEquals(1, allJobs.size());
+ assertEquals("Patient", allJobs.get(0).getResourceType());
+ });
}
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImplTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImplTest.java
index 91d277b53e1..8ed92937952 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImplTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImplTest.java
@@ -193,7 +193,7 @@ public class SearchCoordinatorSvcImplTest {
});
when(mySearchDao.findByUuid(any())).thenAnswer(t -> myCurrentSearch);
IFhirResourceDao dao = myCallingDao;
- when(myDaoRegistry.getResourceDao(any())).thenReturn(dao);
+ when(myDaoRegistry.getResourceDao(any(String.class))).thenReturn(dao);
resources = result.getResources(0, 100000);
assertEquals(790, resources.size());
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImplTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImplTest.java
new file mode 100644
index 00000000000..1549aea1b6a
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImplTest.java
@@ -0,0 +1,262 @@
+package ca.uhn.fhir.jpa.search.reindex;
+
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.jpa.dao.BaseJpaTest;
+import ca.uhn.fhir.jpa.dao.DaoConfig;
+import ca.uhn.fhir.jpa.dao.DaoRegistry;
+import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
+import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
+import ca.uhn.fhir.jpa.dao.data.IResourceReindexJobDao;
+import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
+import ca.uhn.fhir.jpa.entity.ResourceReindexJobEntity;
+import ca.uhn.fhir.jpa.entity.ResourceTable;
+import org.apache.commons.lang3.time.DateUtils;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+import org.hl7.fhir.r4.model.Observation;
+import org.hl7.fhir.r4.model.Patient;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Captor;
+import org.mockito.Mock;
+import org.springframework.data.domain.PageRequest;
+import org.springframework.data.domain.SliceImpl;
+import org.springframework.transaction.PlatformTransactionManager;
+
+import java.util.*;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.*;
+
+
+public class ResourceReindexingSvcImplTest extends BaseJpaTest {
+
+ private static FhirContext ourCtx = FhirContext.forR4();
+
+ @Mock
+ private PlatformTransactionManager myTxManager;
+
+ private ResourceReindexingSvcImpl mySvc;
+ private DaoConfig myDaoConfig;
+
+ @Mock
+ private DaoRegistry myDaoRegistry;
+ @Mock
+ private IForcedIdDao myForcedIdDao;
+ @Mock
+ private IResourceReindexJobDao myReindexJobDao;
+ @Mock
+ private IResourceTableDao myResourceTableDao;
+ @Mock
+ private IFhirResourceDao myResourceDao;
+ @Captor
+ private ArgumentCaptor myIdCaptor;
+ @Captor
+ private ArgumentCaptor myPageRequestCaptor;
+ @Captor
+ private ArgumentCaptor myTypeCaptor;
+ @Captor
+ private ArgumentCaptor myLowCaptor;
+ @Captor
+ private ArgumentCaptor myHighCaptor;
+ private ResourceReindexJobEntity mySingleJob;
+
+ @Override
+ protected FhirContext getContext() {
+ return ourCtx;
+ }
+
+ @Override
+ protected PlatformTransactionManager getTxManager() {
+ return myTxManager;
+ }
+
+ @Before
+ public void before() {
+ myDaoConfig = new DaoConfig();
+ myDaoConfig.setReindexThreadCount(2);
+
+ mySvc = new ResourceReindexingSvcImpl();
+ mySvc.setContextForUnitTest(ourCtx);
+ mySvc.setDaoConfigForUnitTest(myDaoConfig);
+ mySvc.setDaoRegistryForUnitTest(myDaoRegistry);
+ mySvc.setForcedIdDaoForUnitTest(myForcedIdDao);
+ mySvc.setReindexJobDaoForUnitTest(myReindexJobDao);
+ mySvc.setResourceTableDaoForUnitTest(myResourceTableDao);
+ mySvc.setTxManagerForUnitTest(myTxManager);
+ mySvc.start();
+ }
+
+ @Test
+ public void testMarkJobsPastThresholdAsDeleted() {
+ mockNothingToExpunge();
+ mockSingleReindexingJob(null);
+ mockFourResourcesNeedReindexing();
+ mockFetchFourResources();
+
+ mySingleJob.setThresholdHigh(DateUtils.addMinutes(new Date(), -1));
+
+ mySvc.forceReindexingPass();
+
+ verify(myResourceTableDao, never()).findIdsOfResourcesWithinUpdatedRangeOrderedFromOldest(any(), any(), any());
+ verify(myResourceTableDao, never()).findIdsOfResourcesWithinUpdatedRangeOrderedFromOldest(any(), any(), any(), any());
+ verify(myReindexJobDao, times(1)).markAsDeletedById(myIdCaptor.capture());
+
+ assertEquals(123L, myIdCaptor.getValue().longValue());
+ }
+
+ @Test
+ public void testExpungeDeletedJobs() {
+ ResourceReindexJobEntity job = new ResourceReindexJobEntity();
+ job.setIdForUnitTest(123L);
+ job.setDeleted(true);
+ when(myReindexJobDao.findAll(any(), eq(true))).thenReturn(Arrays.asList(job));
+
+ mySvc.forceReindexingPass();
+
+ verify(myReindexJobDao, times(1)).deleteById(eq(123L));
+ }
+
+ @Test
+ public void testReindexPassAllResources() {
+ mockNothingToExpunge();
+ mockSingleReindexingJob(null);
+ mockFourResourcesNeedReindexing();
+ mockFetchFourResources();
+
+ int count = mySvc.forceReindexingPass();
+ assertEquals(4, count);
+
+ // Make sure we reindexed all 4 resources
+ verify(myResourceDao, times(4)).reindex(any(), any());
+
+ // Make sure we updated the low threshold
+ verify(myReindexJobDao, times(1)).setThresholdLow(myIdCaptor.capture(), myLowCaptor.capture());
+ assertEquals(123L, myIdCaptor.getValue().longValue());
+ assertEquals(40 * DateUtils.MILLIS_PER_DAY, myLowCaptor.getValue().getTime());
+
+ // Make sure we didn't do anything unexpected
+ verify(myReindexJobDao, times(1)).findAll(any(), eq(false));
+ verify(myReindexJobDao, times(1)).findAll(any(), eq(true));
+ verifyNoMoreInteractions(myReindexJobDao);
+ }
+
+ @Test
+ public void testReindexPassPatients() {
+ mockNothingToExpunge();
+ mockSingleReindexingJob("Patient");
+ // Mock resource fetch
+ List values = Arrays.asList(0L, 1L, 2L, 3L);
+ when(myResourceTableDao.findIdsOfResourcesWithinUpdatedRangeOrderedFromOldest(myPageRequestCaptor.capture(), myTypeCaptor.capture(), myLowCaptor.capture(), myHighCaptor.capture())).thenReturn(new SliceImpl<>(values));
+ // Mock fetching resources
+ long[] updatedTimes = new long[]{
+ 10 * DateUtils.MILLIS_PER_DAY,
+ 20 * DateUtils.MILLIS_PER_DAY,
+ 40 * DateUtils.MILLIS_PER_DAY,
+ 30 * DateUtils.MILLIS_PER_DAY,
+ };
+ String[] resourceTypes = new String[]{
+ "Patient",
+ "Patient",
+ "Patient",
+ "Patient"
+ };
+ List resources = Arrays.asList(
+ new Patient().setId("Patient/0"),
+ new Patient().setId("Patient/1"),
+ new Patient().setId("Patient/2"),
+ new Patient().setId("Patient/3")
+ );
+ mockWhenResourceTableFindById(updatedTimes, resourceTypes);
+ when(myDaoRegistry.getResourceDao(eq("Patient"))).thenReturn(myResourceDao);
+ when(myDaoRegistry.getResourceDao(eq(Patient.class))).thenReturn(myResourceDao);
+ when(myDaoRegistry.getResourceDao(eq("Observation"))).thenReturn(myResourceDao);
+ when(myDaoRegistry.getResourceDao(eq(Observation.class))).thenReturn(myResourceDao);
+ when(myResourceDao.toResource(any(), anyBoolean())).thenAnswer(t -> {
+ ResourceTable table = (ResourceTable) t.getArguments()[0];
+ Long id = table.getId();
+ return resources.get(id.intValue());
+ });
+
+ int count = mySvc.forceReindexingPass();
+ assertEquals(4, count);
+
+ // Make sure we reindexed all 4 resources
+ verify(myResourceDao, times(4)).reindex(any(), any());
+
+ // Make sure we updated the low threshold
+ verify(myReindexJobDao, times(1)).setThresholdLow(myIdCaptor.capture(), myLowCaptor.capture());
+ assertEquals(123L, myIdCaptor.getValue().longValue());
+ assertEquals(40 * DateUtils.MILLIS_PER_DAY, myLowCaptor.getValue().getTime());
+
+ // Make sure we didn't do anything unexpected
+ verify(myReindexJobDao, times(1)).findAll(any(), eq(false));
+ verify(myReindexJobDao, times(1)).findAll(any(), eq(true));
+ verifyNoMoreInteractions(myReindexJobDao);
+ }
+
+ private void mockWhenResourceTableFindById(long[] theUpdatedTimes, String[] theResourceTypes) {
+ when(myResourceTableDao.findById(any())).thenAnswer(t -> {
+ ResourceTable retVal = new ResourceTable();
+ Long id = (Long) t.getArguments()[0];
+ retVal.setId(id);
+ retVal.setResourceType(theResourceTypes[id.intValue()]);
+ retVal.setUpdated(new Date(theUpdatedTimes[id.intValue()]));
+ return Optional.of(retVal);
+ });
+ }
+
+ private void mockFetchFourResources() {
+ // Mock fetching resources
+ long[] updatedTimes = new long[]{
+ 10 * DateUtils.MILLIS_PER_DAY,
+ 20 * DateUtils.MILLIS_PER_DAY,
+ 40 * DateUtils.MILLIS_PER_DAY,
+ 30 * DateUtils.MILLIS_PER_DAY,
+ };
+ String[] resourceTypes = new String[]{
+ "Patient",
+ "Patient",
+ "Observation",
+ "Observation"
+ };
+ List resources = Arrays.asList(
+ new Patient().setId("Patient/0"),
+ new Patient().setId("Patient/1"),
+ new Observation().setId("Observation/2"),
+ new Observation().setId("Observation/3")
+ );
+ mockWhenResourceTableFindById(updatedTimes, resourceTypes);
+ when(myDaoRegistry.getResourceDao(eq("Patient"))).thenReturn(myResourceDao);
+ when(myDaoRegistry.getResourceDao(eq(Patient.class))).thenReturn(myResourceDao);
+ when(myDaoRegistry.getResourceDao(eq("Observation"))).thenReturn(myResourceDao);
+ when(myDaoRegistry.getResourceDao(eq(Observation.class))).thenReturn(myResourceDao);
+ when(myResourceDao.toResource(any(), anyBoolean())).thenAnswer(t -> {
+ ResourceTable table = (ResourceTable) t.getArguments()[0];
+ Long id = table.getId();
+ return resources.get(id.intValue());
+ });
+ }
+
+ private void mockFourResourcesNeedReindexing() {
+ // Mock resource fetch
+ List values = Arrays.asList(0L, 1L, 2L, 3L);
+ when(myResourceTableDao.findIdsOfResourcesWithinUpdatedRangeOrderedFromOldest(myPageRequestCaptor.capture(), myLowCaptor.capture(), myHighCaptor.capture())).thenReturn(new SliceImpl<>(values));
+ }
+
+ private void mockSingleReindexingJob(String theResourceType) {
+ // Mock the reindexing job
+ mySingleJob = new ResourceReindexJobEntity();
+ mySingleJob.setIdForUnitTest(123L);
+ mySingleJob.setThresholdHigh(DateUtils.addMinutes(new Date(), 1));
+ mySingleJob.setResourceType(theResourceType);
+ when(myReindexJobDao.findAll(any(), eq(false))).thenReturn(Arrays.asList(mySingleJob));
+ }
+
+ private void mockNothingToExpunge() {
+ // Nothing to expunge
+ when(myReindexJobDao.findAll(any(), eq(true))).thenReturn(new ArrayList<>());
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplDstu3Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplDstu3Test.java
index 86908943ce1..f46a7001a8f 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplDstu3Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplDstu3Test.java
@@ -550,14 +550,6 @@ public class TerminologySvcImplDstu3Test extends BaseJpaDstu3Test {
assertEquals("D1V", concept.getDesignation().get(0).getValue());
}
- @Test
- public void testReindexTerminology() {
- IIdType id = createCodeSystem();
-
- assertThat(mySystemDao.markAllResourcesForReindexing(), greaterThan(0));
-
- assertThat(mySystemDao.performReindexingPass(100), greaterThan(0));
- }
@Test
public void testStoreCodeSystemInvalidCyclicLoop() {
diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/DriverTypeEnum.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/DriverTypeEnum.java
index 98e4cd3a5b7..d15e4bc6027 100644
--- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/DriverTypeEnum.java
+++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/DriverTypeEnum.java
@@ -77,7 +77,7 @@ public enum DriverTypeEnum {
BasicDataSource dataSource = new BasicDataSource(){
@Override
public Connection getConnection() throws SQLException {
- ourLog.info("Creating new DB connection");
+ ourLog.debug("Creating new DB connection");
return super.getConnection();
}
};
diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/Migrator.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/Migrator.java
index c12d773dfa7..60792b73811 100644
--- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/Migrator.java
+++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/Migrator.java
@@ -92,4 +92,5 @@ public class Migrator {
ourLog.info("Finished migration of {} tasks", myTasks.size());
}
+
}
diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
index ef93f2e8f02..d25f3c496f5 100644
--- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
+++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
@@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.migrate.tasks;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -30,13 +30,26 @@ import ca.uhn.fhir.jpa.migrate.taskdef.CalculateHashesTask;
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
import ca.uhn.fhir.util.VersionEnum;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Optional;
+import java.util.Set;
+import java.util.stream.Collectors;
+
@SuppressWarnings({"UnstableApiUsage", "SqlNoDataSourceInspection", "SpellCheckingInspection"})
public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
+ private final Set myFlags;
+
/**
* Constructor
*/
- public HapiFhirJpaMigrationTasks() {
+ public HapiFhirJpaMigrationTasks(Set theFlags) {
+ myFlags = theFlags
+ .stream()
+ .map(FlagEnum::fromCommandLineValue)
+ .collect(Collectors.toSet());
+
init340();
init350();
init360();
@@ -60,6 +73,15 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
.addColumn("OPTLOCK_VERSION")
.nullable()
.type(BaseTableColumnTypeTask.ColumnTypeEnum.INT);
+
+ version.addTable("HFJ_RES_REINDEX_JOB")
+ .addSql(DriverTypeEnum.MSSQL_2012, "create table HFJ_RES_REINDEX_JOB (PID bigint not null, JOB_DELETED bit not null, RES_TYPE varchar(255), SUSPENDED_UNTIL datetime2, UPDATE_THRESHOLD_HIGH datetime2 not null, UPDATE_THRESHOLD_LOW datetime2, primary key (PID))")
+ .addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table HFJ_RES_REINDEX_JOB (PID bigint not null, JOB_DELETED boolean not null, RES_TYPE varchar(255), SUSPENDED_UNTIL timestamp, UPDATE_THRESHOLD_HIGH timestamp not null, UPDATE_THRESHOLD_LOW timestamp, primary key (PID))")
+ .addSql(DriverTypeEnum.MARIADB_10_1, "create table HFJ_RES_REINDEX_JOB (PID bigint not null, JOB_DELETED bit not null, RES_TYPE varchar(255), SUSPENDED_UNTIL datetime(6), UPDATE_THRESHOLD_HIGH datetime(6) not null, UPDATE_THRESHOLD_LOW datetime(6), primary key (PID))")
+ .addSql(DriverTypeEnum.POSTGRES_9_4, "persistence_create_postgres94.sql:create table HFJ_RES_REINDEX_JOB (PID int8 not null, JOB_DELETED boolean not null, RES_TYPE varchar(255), SUSPENDED_UNTIL timestamp, UPDATE_THRESHOLD_HIGH timestamp not null, UPDATE_THRESHOLD_LOW timestamp, primary key (PID))")
+ .addSql(DriverTypeEnum.MYSQL_5_7, " create table HFJ_RES_REINDEX_JOB (PID bigint not null, JOB_DELETED bit not null, RES_TYPE varchar(255), SUSPENDED_UNTIL datetime(6), UPDATE_THRESHOLD_HIGH datetime(6) not null, UPDATE_THRESHOLD_LOW datetime(6), primary key (PID))")
+ .addSql(DriverTypeEnum.ORACLE_12C, "create table HFJ_RES_REINDEX_JOB (PID number(19,0) not null, JOB_DELETED number(1,0) not null, RES_TYPE varchar2(255 char), SUSPENDED_UNTIL timestamp, UPDATE_THRESHOLD_HIGH timestamp not null, UPDATE_THRESHOLD_LOW timestamp, primary key (PID))");
+
}
private void init350() {
@@ -80,65 +102,69 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
// Indexes - Coords
Builder.BuilderWithTableName spidxCoords = version.onTable("HFJ_SPIDX_COORDS");
version.startSectionWithMessage("Starting work on table: " + spidxCoords.getTableName());
- spidxCoords
- .dropIndex("IDX_SP_COORDS");
spidxCoords
.addColumn("HASH_IDENTITY")
.nullable()
.type(AddColumnTask.ColumnTypeEnum.LONG);
- spidxCoords
- .addIndex("IDX_SP_COORDS_HASH")
- .unique(false)
- .withColumns("HASH_IDENTITY", "SP_LATITUDE", "SP_LONGITUDE");
- spidxCoords
- .addTask(new CalculateHashesTask()
- .setColumnName("HASH_IDENTITY")
- .addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")))
- );
+ if (!myFlags.contains(FlagEnum.NO_MIGRATE_HASHES)) {
+ spidxCoords
+ .dropIndex("IDX_SP_COORDS");
+ spidxCoords
+ .addIndex("IDX_SP_COORDS_HASH")
+ .unique(false)
+ .withColumns("HASH_IDENTITY", "SP_LATITUDE", "SP_LONGITUDE");
+ spidxCoords
+ .addTask(new CalculateHashesTask()
+ .setColumnName("HASH_IDENTITY")
+ .addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")))
+ );
+ }
// Indexes - Date
Builder.BuilderWithTableName spidxDate = version.onTable("HFJ_SPIDX_DATE");
version.startSectionWithMessage("Starting work on table: " + spidxDate.getTableName());
- spidxDate
- .dropIndex("IDX_SP_TOKEN");
spidxDate
.addColumn("HASH_IDENTITY")
.nullable()
.type(AddColumnTask.ColumnTypeEnum.LONG);
- spidxDate
- .addIndex("IDX_SP_DATE_HASH")
- .unique(false)
- .withColumns("HASH_IDENTITY", "SP_VALUE_LOW", "SP_VALUE_HIGH");
- spidxDate
- .addTask(new CalculateHashesTask()
- .setColumnName("HASH_IDENTITY")
- .addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")))
- );
+ if (!myFlags.contains(FlagEnum.NO_MIGRATE_HASHES)) {
+ spidxDate
+ .dropIndex("IDX_SP_TOKEN");
+ spidxDate
+ .addIndex("IDX_SP_DATE_HASH")
+ .unique(false)
+ .withColumns("HASH_IDENTITY", "SP_VALUE_LOW", "SP_VALUE_HIGH");
+ spidxDate
+ .addTask(new CalculateHashesTask()
+ .setColumnName("HASH_IDENTITY")
+ .addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")))
+ );
+ }
// Indexes - Number
Builder.BuilderWithTableName spidxNumber = version.onTable("HFJ_SPIDX_NUMBER");
version.startSectionWithMessage("Starting work on table: " + spidxNumber.getTableName());
- spidxNumber
- .dropIndex("IDX_SP_NUMBER");
spidxNumber
.addColumn("HASH_IDENTITY")
.nullable()
.type(AddColumnTask.ColumnTypeEnum.LONG);
- spidxNumber
- .addIndex("IDX_SP_NUMBER_HASH_VAL")
- .unique(false)
- .withColumns("HASH_IDENTITY", "SP_VALUE");
- spidxNumber
- .addTask(new CalculateHashesTask()
- .setColumnName("HASH_IDENTITY")
- .addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")))
- );
+ if (!myFlags.contains(FlagEnum.NO_MIGRATE_HASHES)) {
+ spidxNumber
+ .dropIndex("IDX_SP_NUMBER");
+ spidxNumber
+ .addIndex("IDX_SP_NUMBER_HASH_VAL")
+ .unique(false)
+ .withColumns("HASH_IDENTITY", "SP_VALUE");
+ spidxNumber
+ .addTask(new CalculateHashesTask()
+ .setColumnName("HASH_IDENTITY")
+ .addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")))
+ );
+ }
// Indexes - Quantity
Builder.BuilderWithTableName spidxQuantity = version.onTable("HFJ_SPIDX_QUANTITY");
version.startSectionWithMessage("Starting work on table: " + spidxQuantity.getTableName());
- spidxQuantity
- .dropIndex("IDX_SP_QUANTITY");
spidxQuantity
.addColumn("HASH_IDENTITY")
.nullable()
@@ -151,61 +177,63 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
.addColumn("HASH_IDENTITY_AND_UNITS")
.nullable()
.type(AddColumnTask.ColumnTypeEnum.LONG);
- spidxQuantity
- .addIndex("IDX_SP_QUANTITY_HASH")
- .unique(false)
- .withColumns("HASH_IDENTITY", "SP_VALUE");
- spidxQuantity
- .addIndex("IDX_SP_QUANTITY_HASH_UN")
- .unique(false)
- .withColumns("HASH_IDENTITY_AND_UNITS", "SP_VALUE");
- spidxQuantity
- .addIndex("IDX_SP_QUANTITY_HASH_SYSUN")
- .unique(false)
- .withColumns("HASH_IDENTITY_SYS_UNITS", "SP_VALUE");
- spidxQuantity
- .addTask(new CalculateHashesTask()
- .setColumnName("HASH_IDENTITY")
- .addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")))
- .addCalculator("HASH_IDENTITY_AND_UNITS", t -> ResourceIndexedSearchParamQuantity.calculateHashUnits(t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_UNITS")))
- .addCalculator("HASH_IDENTITY_SYS_UNITS", t -> ResourceIndexedSearchParamQuantity.calculateHashSystemAndUnits(t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_SYSTEM"), t.getString("SP_UNITS")))
- );
+ if (!myFlags.contains(FlagEnum.NO_MIGRATE_HASHES)) {
+ spidxQuantity
+ .dropIndex("IDX_SP_QUANTITY");
+ spidxQuantity
+ .addIndex("IDX_SP_QUANTITY_HASH")
+ .unique(false)
+ .withColumns("HASH_IDENTITY", "SP_VALUE");
+ spidxQuantity
+ .addIndex("IDX_SP_QUANTITY_HASH_UN")
+ .unique(false)
+ .withColumns("HASH_IDENTITY_AND_UNITS", "SP_VALUE");
+ spidxQuantity
+ .addIndex("IDX_SP_QUANTITY_HASH_SYSUN")
+ .unique(false)
+ .withColumns("HASH_IDENTITY_SYS_UNITS", "SP_VALUE");
+ spidxQuantity
+ .addTask(new CalculateHashesTask()
+ .setColumnName("HASH_IDENTITY")
+ .addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")))
+ .addCalculator("HASH_IDENTITY_AND_UNITS", t -> ResourceIndexedSearchParamQuantity.calculateHashUnits(t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_UNITS")))
+ .addCalculator("HASH_IDENTITY_SYS_UNITS", t -> ResourceIndexedSearchParamQuantity.calculateHashSystemAndUnits(t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_SYSTEM"), t.getString("SP_UNITS")))
+ );
+ }
// Indexes - String
Builder.BuilderWithTableName spidxString = version.onTable("HFJ_SPIDX_STRING");
version.startSectionWithMessage("Starting work on table: " + spidxString.getTableName());
- spidxString
- .dropIndex("IDX_SP_STRING");
spidxString
.addColumn("HASH_NORM_PREFIX")
.nullable()
.type(AddColumnTask.ColumnTypeEnum.LONG);
- spidxString
- .addIndex("IDX_SP_STRING_HASH_NRM")
- .unique(false)
- .withColumns("HASH_NORM_PREFIX", "SP_VALUE_NORMALIZED");
- spidxString
- .addColumn("HASH_EXACT")
- .nullable()
- .type(AddColumnTask.ColumnTypeEnum.LONG);
- spidxString
- .addIndex("IDX_SP_STRING_HASH_EXCT")
- .unique(false)
- .withColumns("HASH_EXACT");
- spidxString
- .addTask(new CalculateHashesTask()
- .setColumnName("HASH_NORM_PREFIX")
- .addCalculator("HASH_NORM_PREFIX", t -> ResourceIndexedSearchParamString.calculateHashNormalized(new DaoConfig(), t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_VALUE_NORMALIZED")))
- .addCalculator("HASH_EXACT", t -> ResourceIndexedSearchParamString.calculateHashExact(t.getResourceType(), t.getParamName(), t.getString("SP_VALUE_EXACT")))
- );
+ if (!myFlags.contains(FlagEnum.NO_MIGRATE_HASHES)) {
+ spidxString
+ .dropIndex("IDX_SP_STRING");
+ spidxString
+ .addIndex("IDX_SP_STRING_HASH_NRM")
+ .unique(false)
+ .withColumns("HASH_NORM_PREFIX", "SP_VALUE_NORMALIZED");
+ spidxString
+ .addColumn("HASH_EXACT")
+ .nullable()
+ .type(AddColumnTask.ColumnTypeEnum.LONG);
+ spidxString
+ .addIndex("IDX_SP_STRING_HASH_EXCT")
+ .unique(false)
+ .withColumns("HASH_EXACT");
+ spidxString
+ .addTask(new CalculateHashesTask()
+ .setColumnName("HASH_NORM_PREFIX")
+ .addCalculator("HASH_NORM_PREFIX", t -> ResourceIndexedSearchParamString.calculateHashNormalized(new DaoConfig(), t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_VALUE_NORMALIZED")))
+ .addCalculator("HASH_EXACT", t -> ResourceIndexedSearchParamString.calculateHashExact(t.getResourceType(), t.getParamName(), t.getString("SP_VALUE_EXACT")))
+ );
+ }
// Indexes - Token
Builder.BuilderWithTableName spidxToken = version.onTable("HFJ_SPIDX_TOKEN");
version.startSectionWithMessage("Starting work on table: " + spidxToken.getTableName());
- spidxToken
- .dropIndex("IDX_SP_TOKEN");
- spidxToken
- .dropIndex("IDX_SP_TOKEN_UNQUAL");
spidxToken
.addColumn("HASH_IDENTITY")
.nullable()
@@ -222,30 +250,36 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
.addColumn("HASH_VALUE")
.nullable()
.type(AddColumnTask.ColumnTypeEnum.LONG);
- spidxToken
- .addIndex("IDX_SP_TOKEN_HASH")
- .unique(false)
- .withColumns("HASH_IDENTITY");
- spidxToken
- .addIndex("IDX_SP_TOKEN_HASH_S")
- .unique(false)
- .withColumns("HASH_SYS");
- spidxToken
- .addIndex("IDX_SP_TOKEN_HASH_SV")
- .unique(false)
- .withColumns("HASH_SYS_AND_VALUE");
- spidxToken
- .addIndex("IDX_SP_TOKEN_HASH_V")
- .unique(false)
- .withColumns("HASH_VALUE");
- spidxToken
- .addTask(new CalculateHashesTask()
- .setColumnName("HASH_IDENTITY")
- .addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")))
- .addCalculator("HASH_SYS", t -> ResourceIndexedSearchParamToken.calculateHashSystem(t.getResourceType(), t.getParamName(), t.getString("SP_SYSTEM")))
- .addCalculator("HASH_SYS_AND_VALUE", t -> ResourceIndexedSearchParamToken.calculateHashSystemAndValue(t.getResourceType(), t.getParamName(), t.getString("SP_SYSTEM"), t.getString("SP_VALUE")))
- .addCalculator("HASH_VALUE", t -> ResourceIndexedSearchParamToken.calculateHashValue(t.getResourceType(), t.getParamName(), t.getString("SP_VALUE")))
- );
+ if (!myFlags.contains(FlagEnum.NO_MIGRATE_HASHES)) {
+ spidxToken
+ .dropIndex("IDX_SP_TOKEN");
+ spidxToken
+ .dropIndex("IDX_SP_TOKEN_UNQUAL");
+ spidxToken
+ .addIndex("IDX_SP_TOKEN_HASH")
+ .unique(false)
+ .withColumns("HASH_IDENTITY");
+ spidxToken
+ .addIndex("IDX_SP_TOKEN_HASH_S")
+ .unique(false)
+ .withColumns("HASH_SYS");
+ spidxToken
+ .addIndex("IDX_SP_TOKEN_HASH_SV")
+ .unique(false)
+ .withColumns("HASH_SYS_AND_VALUE");
+ spidxToken
+ .addIndex("IDX_SP_TOKEN_HASH_V")
+ .unique(false)
+ .withColumns("HASH_VALUE");
+ spidxToken
+ .addTask(new CalculateHashesTask()
+ .setColumnName("HASH_IDENTITY")
+ .addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")))
+ .addCalculator("HASH_SYS", t -> ResourceIndexedSearchParamToken.calculateHashSystem(t.getResourceType(), t.getParamName(), t.getString("SP_SYSTEM")))
+ .addCalculator("HASH_SYS_AND_VALUE", t -> ResourceIndexedSearchParamToken.calculateHashSystemAndValue(t.getResourceType(), t.getParamName(), t.getString("SP_SYSTEM"), t.getString("SP_VALUE")))
+ .addCalculator("HASH_VALUE", t -> ResourceIndexedSearchParamToken.calculateHashValue(t.getResourceType(), t.getParamName(), t.getString("SP_VALUE")))
+ );
+ }
// Indexes - URI
Builder.BuilderWithTableName spidxUri = version.onTable("HFJ_SPIDX_URI");
@@ -254,24 +288,26 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
.addColumn("HASH_IDENTITY")
.nullable()
.type(AddColumnTask.ColumnTypeEnum.LONG);
- spidxUri
- .addIndex("IDX_SP_URI_HASH_IDENTITY")
- .unique(false)
- .withColumns("HASH_IDENTITY", "SP_URI");
- spidxUri
- .addColumn("HASH_URI")
- .nullable()
- .type(AddColumnTask.ColumnTypeEnum.LONG);
- spidxUri
- .addIndex("IDX_SP_URI_HASH_URI")
- .unique(false)
- .withColumns("HASH_URI");
- spidxUri
- .addTask(new CalculateHashesTask()
- .setColumnName("HASH_IDENTITY")
- .addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")))
- .addCalculator("HASH_URI", t -> ResourceIndexedSearchParamUri.calculateHashUri(t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_URI")))
- );
+ if (!myFlags.contains(FlagEnum.NO_MIGRATE_HASHES)) {
+ spidxUri
+ .addIndex("IDX_SP_URI_HASH_IDENTITY")
+ .unique(false)
+ .withColumns("HASH_IDENTITY", "SP_URI");
+ spidxUri
+ .addColumn("HASH_URI")
+ .nullable()
+ .type(AddColumnTask.ColumnTypeEnum.LONG);
+ spidxUri
+ .addIndex("IDX_SP_URI_HASH_URI")
+ .unique(false)
+ .withColumns("HASH_URI");
+ spidxUri
+ .addTask(new CalculateHashesTask()
+ .setColumnName("HASH_IDENTITY")
+ .addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")))
+ .addCalculator("HASH_URI", t -> ResourceIndexedSearchParamUri.calculateHashUri(t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_URI")))
+ );
+ }
// Search Parameter Presence
Builder.BuilderWithTableName spp = version.onTable("HFJ_RES_PARAM_PRESENT");
@@ -492,5 +528,27 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
}
+ public enum FlagEnum {
+ NO_MIGRATE_HASHES("no-migrate-350-hashes");
+
+ private final String myCommandLineValue;
+
+ FlagEnum(String theCommandLineValue) {
+ myCommandLineValue = theCommandLineValue;
+ }
+
+ public String getCommandLineValue() {
+ return myCommandLineValue;
+ }
+
+ public static FlagEnum fromCommandLineValue(String theCommandLineValue) {
+ Optional retVal = Arrays.stream(values()).filter(t -> t.myCommandLineValue.equals(theCommandLineValue)).findFirst();
+ return retVal.orElseThrow(() -> {
+ List validValues = Arrays.stream(values()).map(t -> t.myCommandLineValue).sorted().collect(Collectors.toList());
+ return new IllegalArgumentException("Invalid flag \"" + theCommandLineValue + "\". Valid values: " + validValues);
+ });
+ }
+ }
+
}
diff --git a/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasksTest.java b/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasksTest.java
index 4ff02d95a2d..4d29e9588a1 100644
--- a/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasksTest.java
+++ b/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasksTest.java
@@ -2,11 +2,13 @@ package ca.uhn.fhir.jpa.migrate.tasks;
import org.junit.Test;
+import java.util.Collections;
+
public class HapiFhirJpaMigrationTasksTest {
@Test
public void testCreate() {
- new HapiFhirJpaMigrationTasks();
+ new HapiFhirJpaMigrationTasks(Collections.emptySet());
}
diff --git a/pom.xml b/pom.xml
index d5ff60b05fb..fcb53fa4161 100644
--- a/pom.xml
+++ b/pom.xml
@@ -2066,7 +2066,7 @@
org.apache.maven.plugins
maven-project-info-reports-plugin
- 2.8.1
+ 3.0.0
false
diff --git a/src/site/xdoc/doc_cli.xml b/src/site/xdoc/doc_cli.xml
index 8f65b8732d8..5598f02a7fa 100644
--- a/src/site/xdoc/doc_cli.xml
+++ b/src/site/xdoc/doc_cli.xml
@@ -142,42 +142,16 @@ Java HotSpot(TM) 64-Bit Server VM (build 25.60-b23, mixed mode)]]>
- When upgrading the JPA server from one version of HAPI FHIR to a newer version,
- often there will be changes to the database schema. The Migrate Database
- command can be used to perform a migration from one version to the next.
-
-
- Note that this feature was added in HAPI FHIR 3.5.0. It is not able to migrate
- from versions prior to HAPI FHIR 3.4.0. Please make a backup of your
- database before running this command!
-
-
- The following example shows how to use the migrator utility to migrate between two versions.
-
- ./hapi-fhir-cli migrate-database -d DERBY_EMBEDDED -u "jdbc:derby:directory:target/jpaserver_derby_files;create=true" -n "" -p "" -f V3_4_0 -t V3_5_0
-
-
- You may use the following command to get detailed help on the options:
-
- ./hapi-fhir-cli help migrate-database
-
-
- Note the arguments:
-
- -d [dialect]
- This indicates the database dialect to use. See the detailed help for a list of options
- -f [version]
- The version to migrate from
- -t [version]
- The version to migrate to
-
+ The migrate-database
command may be used to Migrate a database
+ schema when upgrading a
+ HAPI FHIR JPA project from one version of HAPI
+ FHIR to another version.
-
-
- Note that the Oracle JDBC drivers are not distributed in the Maven Central repository,
- so they are not included in HAPI FHIR. In order to use this command with an Oracle database,
- you will need to invoke the CLI as follows:
-
- java -cp hapi-fhir-cli.jar ca.uhn.fhir.cli.App migrate-database -d ORACLE_12C -u "[url]" -n "[username]" -p "[password]" -f V3_4_0 -t V3_5_0
-
+
+ See Upgrading HAPI FHIR JPA
+ for information on how to use this command.
+
diff --git a/src/site/xdoc/doc_jpa.xml b/src/site/xdoc/doc_jpa.xml
index a79c2d83689..7719d4c592a 100644
--- a/src/site/xdoc/doc_jpa.xml
+++ b/src/site/xdoc/doc_jpa.xml
@@ -366,7 +366,143 @@ delete from hfj_res_ver where res_id in (select res_id from hfj_resource where s
-->
-
+
+
+
+
+
+ HAPI FHIR JPA is a constantly evolving product, with new features being added to each
+ new version of the library. As a result, it is generally necessary to execute a database
+ migration as a part of an upgrade to HAPI FHIR.
+
+
+
+ When upgrading the JPA server from one version of HAPI FHIR to a newer version,
+ often there will be changes to the database schema. The Migrate Database
+ command can be used to perform a migration from one version to the next.
+
+
+
+ Note that this feature was added in HAPI FHIR 3.5.0. It is not able to migrate
+ from versions prior to HAPI FHIR 3.4.0. Please make a backup of your
+ database before running this command!
+
+
+ The following example shows how to use the migrator utility to migrate between two versions.
+
+ ./hapi-fhir-cli migrate-database -d DERBY_EMBEDDED -u "jdbc:derby:directory:target/jpaserver_derby_files;create=true" -n "" -p "" -f V3_4_0 -t V3_5_0
+
+
+ You may use the following command to get detailed help on the options:
+
+ ./hapi-fhir-cli help migrate-database
+
+
+ Note the arguments:
+
+ -d [dialect]
- This indicates the database dialect to use. See the detailed help for a list of options
+ -f [version]
- The version to migrate from
+ -t [version]
- The version to migrate to
+
+
+
+
+
+ Note that the Oracle JDBC drivers are not distributed in the Maven Central repository,
+ so they are not included in HAPI FHIR. In order to use this command with an Oracle database,
+ you will need to invoke the CLI as follows:
+
+ java -cp hapi-fhir-cli.jar ca.uhn.fhir.cli.App migrate-database -d ORACLE_12C -u "[url]" -n "[username]" -p "[password]" -f V3_4_0 -t V3_5_0
+
+
+
+
+ As of HAPI FHIR 3.5.0 a new mechanism for creating the JPA index tables (HFJ_SPIDX_xxx)
+ has been implemented. This new mechanism uses hashes in place of large multi-column
+ indexes. This improves both lookup times as well as required storage space. This change
+ also paves the way for future ability to provide efficient multi-tenant searches (which
+ is not yet implemented but is planned as an incremental improvement).
+
+
+ This change is not a lightweight change however, as it requires a rebuild of the
+ index tables in order to generate the hashes. This can take a long time on databases
+ that already have a large amount of data.
+
+
+ As a result, in HAPI FHIR JPA 3.6.0, an efficient way of upgrading existing databases
+ was added. Under this new scheme, columns for the hashes are added but values are not
+ calculated initially, database indexes are not modified on the HFJ_SPIDX_xxx tables,
+ and the previous columns are still used for searching as was the case in HAPI FHIR
+ JPA 3.4.0.
+
+
+ In order to perform a migration using this functionality, the following steps should
+ be followed:
+
+
+
+ Stop your running HAPI FHIR JPA instance (and remember to make a backup of your
+ database before proceeding with any changes!)
+
+
+ Modify your DaoConfig
to specify that hash-based searches should not be used, using
+ the following setting:
+ myDaoConfig.setDisableHashBasedSearches(true);
+
+
+ Make sure that you have your JPA settings configured to not automatically
+ create database indexes and columns using the following setting
+ in your JPA Properties:
+ extraProperties.put("hibernate.hbm2ddl.auto", "none");
+
+
+ Run the database migrator command, including the entry -x no-migrate-350-hashes
+ on the command line. For example:
+ ./hapi-fhir-cli migrate-database -d DERBY_EMBEDDED -u "jdbc:derby:directory:target/jpaserver_derby_files;create=true" -n "" -p "" -f V3_4_0 -t V3_6_0 -x no-migrate-350-hashes
+
+
+ Rebuild and start your HAPI FHIR JPA server. At this point you should have a working
+ HAPI FHIR JPA 3.6.0 server that is is still using HAPI FHIR 3.4.0 search indexes. Search hashes
+ will be generated for any newly created or updated data but existing data will have null
+ hashes.
+
+
+ With the system running, request a complete reindex of the data in the database using
+ an HTTP request such as the following:
+ GET /$mark-all-resources-for-reindexing
+ Note that this is a custom operation built into the HAPI FHIR JPA server. It should
+ be secured in a real deployment, so Authentication is likely required for this
+ call.
+
+
+ You can track the reindexing process by watching your server logs,
+ but also by using the following SQL executed directly against your database:
+
+ SELECT * FROM HFJ_RES_REINDEX_JOB
+ When this query no longer returns any rows, the reindexing process is complete.
+
+
+ At this time, HAPI FHIR should be stopped once again in order to convert it
+ to using the hash based indexes.
+
+
+ Modify your DaoConfig
to specify that hash-based searches are used, using
+ the following setting (this is the default setting, so it could also simply
+ be omitted):
+ myDaoConfig.setDisableHashBasedSearches(false);
+
+
+ Execute the migrator tool again, this time omitting the flag option, e.g.
+ ./hapi-fhir-cli migrate-database -d DERBY_EMBEDDED -u "jdbc:derby:directory:target/jpaserver_derby_files;create=true" -n "" -p "" -f V3_4_0 -t V3_6_0
+
+
+ Rebuild, and start HAPI FHIR JPA again.
+
+
+
+
+
+