Merge remote-tracking branch 'origin/master' into mm-20231121-pass-properties-through-remote-terminology-service-codesystem-lookup

This commit is contained in:
Martha Mitran 2023-12-01 16:26:40 -08:00
commit c36aa49a0c
127 changed files with 2631 additions and 294 deletions

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -93,6 +93,10 @@ public enum Pointcut implements IPointcut {
* <li>
* ca.uhn.fhir.rest.client.api.IRestfulClient - The client object making the request
* </li>
* <li>
* ca.uhn.fhir.rest.client.api.ClientResponseContext - Contains an IHttpRequest, an IHttpResponse, and an IRestfulClient
* and also allows the client to mutate the contained IHttpResponse
* </li>
* </ul>
* </p>
* Hook methods must return <code>void</code>.
@ -101,7 +105,8 @@ public enum Pointcut implements IPointcut {
void.class,
"ca.uhn.fhir.rest.client.api.IHttpRequest",
"ca.uhn.fhir.rest.client.api.IHttpResponse",
"ca.uhn.fhir.rest.client.api.IRestfulClient"),
"ca.uhn.fhir.rest.client.api.IRestfulClient",
"ca.uhn.fhir.rest.client.api.ClientResponseContext"),
/**
* <b>Server Hook:</b>

View File

@ -0,0 +1,103 @@
/*-
* #%L
* HAPI FHIR - Core Library
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.rest.client.api;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.api.Pointcut;
import org.hl7.fhir.instance.model.api.IBaseResource;
import java.util.Objects;
import java.util.StringJoiner;
/**
* Used to pass context to {@link Pointcut#CLIENT_RESPONSE}, including a mutable {@link IHttpResponse}
*/
public class ClientResponseContext {
private final IHttpRequest myHttpRequest;
private IHttpResponse myHttpResponse;
private final IRestfulClient myRestfulClient;
private final FhirContext myFhirContext;
private final Class<? extends IBaseResource> myReturnType;
public ClientResponseContext(
IHttpRequest myHttpRequest,
IHttpResponse theHttpResponse,
IRestfulClient myRestfulClient,
FhirContext theFhirContext,
Class<? extends IBaseResource> theReturnType) {
this.myHttpRequest = myHttpRequest;
this.myHttpResponse = theHttpResponse;
this.myRestfulClient = myRestfulClient;
this.myFhirContext = theFhirContext;
this.myReturnType = theReturnType;
}
public IHttpRequest getHttpRequest() {
return myHttpRequest;
}
public IHttpResponse getHttpResponse() {
return myHttpResponse;
}
public IRestfulClient getRestfulClient() {
return myRestfulClient;
}
public FhirContext getFhirContext() {
return myFhirContext;
}
public Class<? extends IBaseResource> getReturnType() {
return myReturnType;
}
public void setHttpResponse(IHttpResponse theHttpResponse) {
this.myHttpResponse = theHttpResponse;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ClientResponseContext that = (ClientResponseContext) o;
return Objects.equals(myHttpRequest, that.myHttpRequest)
&& Objects.equals(myHttpResponse, that.myHttpResponse)
&& Objects.equals(myRestfulClient, that.myRestfulClient)
&& Objects.equals(myFhirContext, that.myFhirContext)
&& Objects.equals(myReturnType, that.myReturnType);
}
@Override
public int hashCode() {
return Objects.hash(myHttpRequest, myHttpResponse, myRestfulClient, myFhirContext, myReturnType);
}
@Override
public String toString() {
return new StringJoiner(", ", ClientResponseContext.class.getSimpleName() + "[", "]")
.add("myHttpRequest=" + myHttpRequest)
.add("myHttpResponse=" + myHttpResponse)
.add("myRestfulClient=" + myRestfulClient)
.add("myFhirContext=" + myFhirContext)
.add("myReturnType=" + myReturnType)
.toString();
}
}

View File

@ -4,7 +4,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-bom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<packaging>pom</packaging>
<name>HAPI FHIR BOM</name>
@ -12,7 +12,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -8,6 +8,8 @@ import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
import ca.uhn.fhir.batch2.model.StatusEnum;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.interceptor.LoggingInterceptor;
import ca.uhn.fhir.system.HapiSystemProperties;
@ -66,6 +68,8 @@ public class BulkImportCommandIT {
private IJobCoordinator myJobCoordinator;
private final BulkDataImportProvider myProvider = new BulkDataImportProvider();
private final FhirContext myCtx = FhirContext.forR4Cached();
@Mock
private IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
@RegisterExtension
public RestfulServerExtension myRestfulServerExtension = new RestfulServerExtension(myCtx, myProvider)
.registerInterceptor(new LoggingInterceptor());
@ -77,6 +81,7 @@ public class BulkImportCommandIT {
public void beforeEach() throws IOException {
myProvider.setFhirContext(myCtx);
myProvider.setJobCoordinator(myJobCoordinator);
myProvider.setRequestPartitionHelperService(myRequestPartitionHelperSvc);
myTempDir = Files.createTempDirectory("hapifhir");
ourLog.info("Created temp directory: {}", myTempDir);
}
@ -123,7 +128,7 @@ public class BulkImportCommandIT {
await().until(() -> myRestfulServerExtension.getRequestContentTypes().size(), equalTo(2));
ourLog.info("Initiation requests complete");
verify(myJobCoordinator, timeout(10000).times(1)).startInstance(myStartCaptor.capture());
verify(myJobCoordinator, timeout(10000).times(1)).startInstance(any(RequestDetails.class), myStartCaptor.capture());
JobInstanceStartRequest startRequest = myStartCaptor.getValue();
BulkImportJobParameters jobParameters = startRequest.getParameters(BulkImportJobParameters.class);
@ -165,7 +170,7 @@ public class BulkImportCommandIT {
await().until(() -> myRestfulServerExtension.getRequestContentTypes().size(), equalTo(2));
ourLog.info("Initiation requests complete");
verify(myJobCoordinator, timeout(10000).times(1)).startInstance(myStartCaptor.capture());
verify(myJobCoordinator, timeout(10000).times(1)).startInstance(any(RequestDetails.class), myStartCaptor.capture());
JobInstanceStartRequest startRequest = myStartCaptor.getValue();
BulkImportJobParameters jobParameters = startRequest.getParameters(BulkImportJobParameters.class);
@ -206,7 +211,7 @@ public class BulkImportCommandIT {
await().until(() -> myRestfulServerExtension.getRequestContentTypes().size(), equalTo(2));
ourLog.info("Initiation requests complete");
verify(myJobCoordinator, timeout(10000).times(1)).startInstance(myStartCaptor.capture());
verify(myJobCoordinator, timeout(10000).times(1)).startInstance(any(RequestDetails.class), myStartCaptor.capture());
try{
JobInstanceStartRequest startRequest = myStartCaptor.getValue();

View File

@ -5,11 +5,14 @@ import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
import ca.uhn.fhir.jpa.util.RandomTextUtils;
import ca.uhn.fhir.system.HapiSystemProperties;
import com.google.common.base.Charsets;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.junit.jupiter.api.MethodOrderer;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestMethodOrder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.JdbcTemplate;
@ -35,10 +38,11 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
@TestMethodOrder(MethodOrderer.MethodName.class)
public class HapiFlywayMigrateDatabaseCommandTest {
private static final Logger ourLog = LoggerFactory.getLogger(HapiFlywayMigrateDatabaseCommandTest.class);
public static final String DB_DIRECTORY = "target/h2_test";
private final String myDbDirectory = "target/h2_test/" + RandomTextUtils.newSecureRandomAlphaNumericString(5);
static {
HapiSystemProperties.enableTestMode();
@ -252,12 +256,12 @@ public class HapiFlywayMigrateDatabaseCommandTest {
@Nonnull
private File getLocation(String theDatabaseName) throws IOException {
File directory = new File(DB_DIRECTORY);
File directory = new File(myDbDirectory);
if (directory.exists()) {
FileUtils.deleteDirectory(directory);
}
return new File(DB_DIRECTORY + "/" + theDatabaseName);
return new File(myDbDirectory + "/" + theDatabaseName);
}
private void seedDatabase340(DriverTypeEnum.ConnectionProperties theConnectionProperties) {

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -0,0 +1,135 @@
/*
* #%L
* HAPI FHIR - Client Framework
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.rest.client.apache;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.rest.client.api.IHttpResponse;
import ca.uhn.fhir.rest.client.impl.BaseHttpResponse;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.util.StopWatch;
import org.apache.commons.io.IOUtils;
import org.apache.http.client.methods.CloseableHttpResponse;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Map;
/**
* Process a modified copy of an existing {@link IHttpResponse} with a String containing new content.
* <p/>
* Meant to be used with custom interceptors that need to hijack an existing IHttpResponse with new content.
*/
public class ModifiedStringApacheHttpResponse extends BaseHttpResponse implements IHttpResponse {
private static final org.slf4j.Logger ourLog =
org.slf4j.LoggerFactory.getLogger(ModifiedStringApacheHttpResponse.class);
private boolean myEntityBuffered = false;
private final String myNewContent;
private final IHttpResponse myOrigHttpResponse;
private byte[] myEntityBytes = null;
public ModifiedStringApacheHttpResponse(
IHttpResponse theOrigHttpResponse, String theNewContent, StopWatch theResponseStopWatch) {
super(theResponseStopWatch);
myOrigHttpResponse = theOrigHttpResponse;
myNewContent = theNewContent;
}
@Override
public void bufferEntity() throws IOException {
if (myEntityBuffered) {
return;
}
try (InputStream respEntity = readEntity()) {
if (respEntity != null) {
try {
myEntityBytes = IOUtils.toByteArray(respEntity);
} catch (IllegalStateException exception) {
throw new InternalErrorException(Msg.code(2447) + exception);
}
myEntityBuffered = true;
}
}
}
@Override
public void close() {
if (myOrigHttpResponse instanceof CloseableHttpResponse) {
try {
((CloseableHttpResponse) myOrigHttpResponse).close();
} catch (IOException exception) {
ourLog.debug("Failed to close response", exception);
}
}
}
@Override
public Reader createReader() throws IOException {
return new InputStreamReader(readEntity(), StandardCharsets.UTF_8);
}
@Override
public Map<String, List<String>> getAllHeaders() {
return myOrigHttpResponse.getAllHeaders();
}
@Override
public List<String> getHeaders(String theName) {
return myOrigHttpResponse.getHeaders(theName);
}
@Override
public String getMimeType() {
return myOrigHttpResponse.getMimeType();
}
@Override
public StopWatch getRequestStopWatch() {
return myOrigHttpResponse.getRequestStopWatch();
}
@Override
public Object getResponse() {
return null;
}
@Override
public int getStatus() {
return myOrigHttpResponse.getStatus();
}
@Override
public String getStatusInfo() {
return myOrigHttpResponse.getStatusInfo();
}
@Override
public InputStream readEntity() {
if (myEntityBuffered) {
return new ByteArrayInputStream(myEntityBytes);
} else {
return new ByteArrayInputStream(myNewContent.getBytes());
}
}
}

View File

@ -36,6 +36,7 @@ import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.api.RequestFormatParamStyleEnum;
import ca.uhn.fhir.rest.api.SummaryEnum;
import ca.uhn.fhir.rest.client.api.ClientResponseContext;
import ca.uhn.fhir.rest.client.api.IHttpClient;
import ca.uhn.fhir.rest.client.api.IHttpRequest;
import ca.uhn.fhir.rest.client.api.IHttpResponse;
@ -352,12 +353,24 @@ public abstract class BaseClient implements IRestfulClient {
response = httpRequest.execute();
final Class<? extends IBaseResource> returnType = (binding instanceof ResourceResponseHandler)
? ((ResourceResponseHandler<? extends IBaseResource>) binding).getReturnType()
: null;
final ClientResponseContext clientResponseContext =
new ClientResponseContext(httpRequest, response, this, getFhirContext(), returnType);
HookParams responseParams = new HookParams();
responseParams.add(IHttpRequest.class, httpRequest);
responseParams.add(IHttpResponse.class, response);
responseParams.add(IRestfulClient.class, this);
responseParams.add(ClientResponseContext.class, clientResponseContext);
getInterceptorService().callHooks(Pointcut.CLIENT_RESPONSE, responseParams);
// Replace the contents of the response with whatever the hook returned, or the same response as before if
// it no-op'd
response = clientResponseContext.getHttpResponse();
String mimeType;
if (Constants.STATUS_HTTP_204_NO_CONTENT == response.getStatus()) {
mimeType = null;
@ -645,6 +658,10 @@ public abstract class BaseClient implements IRestfulClient {
myAllowHtmlResponse = theAllowHtmlResponse;
}
public Class<T> getReturnType() {
return myReturnType;
}
@Override
public T invokeClient(
String theResponseMimeType,

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -0,0 +1,7 @@
---
type: fix
issue: 5192
title: "Fixed a bug where search Bundles with `include` entries from an _include query parameter might
trigger a 'next' link to blank pages when
no more results `match` results are available.
"

View File

@ -0,0 +1,4 @@
---
type: add
issue: 5442
title: "The ValidatorResourceFetcher will now resolve canonical URL references as well as simple local references."

View File

@ -0,0 +1,6 @@
---
type: add
issue: 5502
jira: SMILE-7262
title: "It is now possible to mutate an HTTP response from the CLIENT_RESPONSE Pointcut, and pass this mutated response
to downstream processing."

View File

@ -0,0 +1,6 @@
---
type: fix
issue: 5511
title: "Previously, when creating an index as a part of a migration, if the index already existed with a different name
on Oracle, the migration would fail. This has been fixed so that the create index migration task now recovers with
a warning message if the index already exists with a different name."

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 5529
title: "When using a chained SearchParameter to search within a Bundle as [described here](https://smilecdr.com/docs/fhir_storage_relational/chained_searches_and_sorts.html#document-and-message-search-parameters), if the `Bundle.entry.fullUrl` was fully qualified but the reference was not, the search did not work. This has been corrected."

View File

@ -11,7 +11,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -977,27 +977,34 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
// Ugh. Only oracle supports using IDX_TAG_DEF_TP_CD_SYS to enforce this constraint. The others will
// create another index.
// For Sql Server, should change the index to be unique with include columns. Do this in 6.1
tagTable.dropIndex("20220429.8", "IDX_TAGDEF_TYPESYSCODE");
Map<DriverTypeEnum, String> addTagDefConstraint = new HashMap<>();
addTagDefConstraint.put(
DriverTypeEnum.H2_EMBEDDED,
"ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM)");
addTagDefConstraint.put(
DriverTypeEnum.MARIADB_10_1,
"ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM)");
addTagDefConstraint.put(
DriverTypeEnum.MSSQL_2012,
"ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM)");
addTagDefConstraint.put(
DriverTypeEnum.MYSQL_5_7,
"ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM)");
addTagDefConstraint.put(
DriverTypeEnum.ORACLE_12C,
"ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM)");
addTagDefConstraint.put(
DriverTypeEnum.POSTGRES_9_4,
"ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM)");
version.executeRawSql("20220429.9", addTagDefConstraint);
// tagTable.dropIndex("20220429.8", "IDX_TAGDEF_TYPESYSCODE");
// Map<DriverTypeEnum, String> addTagDefConstraint = new HashMap<>();
// addTagDefConstraint.put(
// DriverTypeEnum.H2_EMBEDDED,
// "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE,
// TAG_SYSTEM)");
// addTagDefConstraint.put(
// DriverTypeEnum.MARIADB_10_1,
// "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE,
// TAG_SYSTEM)");
// addTagDefConstraint.put(
// DriverTypeEnum.MSSQL_2012,
// "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE,
// TAG_SYSTEM)");
// addTagDefConstraint.put(
// DriverTypeEnum.MYSQL_5_7,
// "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE,
// TAG_SYSTEM)");
// addTagDefConstraint.put(
// DriverTypeEnum.ORACLE_12C,
// "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE,
// TAG_SYSTEM)");
// addTagDefConstraint.put(
// DriverTypeEnum.POSTGRES_9_4,
// "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE,
// TAG_SYSTEM)");
// version.executeRawSql("20220429.9", addTagDefConstraint);
version.addNop("20220429.9");
}
// Fix for https://github.com/hapifhir/hapi-fhir-jpaserver-starter/issues/328

View File

@ -125,7 +125,7 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
* of this class, since it's a prototype
*/
private Search mySearchEntity;
private String myUuid;
private final String myUuid;
private SearchCacheStatusEnum myCacheStatus;
private RequestPartitionId myRequestPartitionId;
@ -259,13 +259,21 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
final ISearchBuilder sb = mySearchBuilderFactory.newSearchBuilder(dao, resourceName, resourceType);
RequestPartitionId requestPartitionId = getRequestPartitionId();
final List<JpaPid> pidsSubList =
mySearchCoordinatorSvc.getResources(myUuid, theFromIndex, theToIndex, myRequest, requestPartitionId);
// we request 1 more resource than we need
// this is so we can be sure of when we hit the last page
// (when doing offset searches)
final List<JpaPid> pidsSubList = mySearchCoordinatorSvc.getResources(
myUuid, theFromIndex, theToIndex + 1, myRequest, requestPartitionId);
// max list size should be either the entire list, or from - to length
int maxSize = Math.min(theToIndex - theFromIndex, pidsSubList.size());
theResponsePageBuilder.setTotalRequestedResourcesFetched(pidsSubList.size());
List<JpaPid> firstBatchOfPids = pidsSubList.subList(0, maxSize);
List<IBaseResource> resources = myTxService
.withRequest(myRequest)
.withRequestPartitionId(requestPartitionId)
.execute(() -> {
return toResourceList(sb, pidsSubList, theResponsePageBuilder);
return toResourceList(sb, firstBatchOfPids, theResponsePageBuilder);
});
return resources;
@ -541,8 +549,8 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
// this can (potentially) change the results being returned.
int precount = resources.size();
resources = ServerInterceptorUtil.fireStoragePreshowResource(resources, myRequest, myInterceptorBroadcaster);
// we only care about omitted results from *this* page
theResponsePageBuilder.setToOmittedResourceCount(precount - resources.size());
// we only care about omitted results from this page
theResponsePageBuilder.setOmittedResourceCount(precount - resources.size());
theResponsePageBuilder.setResources(resources);
theResponsePageBuilder.setIncludedResourceCount(includedPidList.size());

View File

@ -73,16 +73,23 @@ public class PersistedJpaSearchFirstPageBundleProvider extends PersistedJpaBundl
mySearchTask.awaitInitialSync();
// request 1 more than we need to, in order to know if there are extra values
ourLog.trace("Fetching search resource PIDs from task: {}", mySearchTask.getClass());
final List<JpaPid> pids = mySearchTask.getResourcePids(theFromIndex, theToIndex);
final List<JpaPid> pids = mySearchTask.getResourcePids(theFromIndex, theToIndex + 1);
ourLog.trace("Done fetching search resource PIDs");
int countOfPids = pids.size();
;
int maxSize = Math.min(theToIndex - theFromIndex, countOfPids);
thePageBuilder.setTotalRequestedResourcesFetched(countOfPids);
RequestPartitionId requestPartitionId = getRequestPartitionId();
List<JpaPid> firstBatch = pids.subList(0, maxSize);
List<IBaseResource> retVal = myTxService
.withRequest(myRequest)
.withRequestPartitionId(requestPartitionId)
.execute(() -> toResourceList(mySearchBuilder, pids, thePageBuilder));
.execute(() -> toResourceList(mySearchBuilder, firstBatch, thePageBuilder));
long totalCountWanted = theToIndex - theFromIndex;
long totalCountMatch = (int) retVal.stream().filter(t -> !isInclude(t)).count();
@ -103,12 +110,15 @@ public class PersistedJpaSearchFirstPageBundleProvider extends PersistedJpaBundl
long remainingWanted = totalCountWanted - totalCountMatch;
long fromIndex = theToIndex - remainingWanted;
List<IBaseResource> remaining = super.getResources((int) fromIndex, theToIndex, thePageBuilder);
ResponsePage.ResponsePageBuilder pageBuilder = new ResponsePage.ResponsePageBuilder();
pageBuilder.setBundleProvider(this);
List<IBaseResource> remaining = super.getResources((int) fromIndex, theToIndex, pageBuilder);
remaining.forEach(t -> {
if (!existingIds.contains(t.getIdElement().getValue())) {
retVal.add(t);
}
});
thePageBuilder.combineWith(pageBuilder);
}
}
ourLog.trace("Loaded resources to return");

View File

@ -115,7 +115,7 @@ public class SynchronousSearchSvcImpl implements ISynchronousSearchSvc {
.execute(() -> {
// Load the results synchronously
final List<JpaPid> pids = new ArrayList<>();
List<JpaPid> pids = new ArrayList<>();
Long count = 0L;
if (wantCount) {
@ -145,8 +145,17 @@ public class SynchronousSearchSvcImpl implements ISynchronousSearchSvc {
return bundleProvider;
}
// if we have a count, we'll want to request
// additional resources
SearchParameterMap clonedParams = theParams.clone();
Integer requestedCount = clonedParams.getCount();
boolean hasACount = requestedCount != null;
if (hasACount) {
clonedParams.setCount(requestedCount.intValue() + 1);
}
try (IResultIterator<JpaPid> resultIter = theSb.createQuery(
theParams, searchRuntimeDetails, theRequestDetails, theRequestPartitionId)) {
clonedParams, searchRuntimeDetails, theRequestDetails, theRequestPartitionId)) {
while (resultIter.hasNext()) {
pids.add(resultIter.next());
if (theLoadSynchronousUpTo != null && pids.size() >= theLoadSynchronousUpTo) {
@ -162,6 +171,15 @@ public class SynchronousSearchSvcImpl implements ISynchronousSearchSvc {
throw new InternalErrorException(Msg.code(1164) + e);
}
// truncate the list we retrieved - if needed
int receivedResourceCount = -1;
if (hasACount) {
// we want the accurate received resource count
receivedResourceCount = pids.size();
int resourcesToReturn = Math.min(theParams.getCount(), pids.size());
pids = pids.subList(0, resourcesToReturn);
}
JpaPreResourceAccessDetails accessDetails = new JpaPreResourceAccessDetails(pids, () -> theSb);
HookParams params = new HookParams()
.add(IPreResourceAccessDetails.class, accessDetails)
@ -228,6 +246,9 @@ public class SynchronousSearchSvcImpl implements ISynchronousSearchSvc {
resources, theRequestDetails, myInterceptorBroadcaster);
SimpleBundleProvider bundleProvider = new SimpleBundleProvider(resources);
if (hasACount) {
bundleProvider.setTotalResourcesRequestedReturned(receivedResourceCount);
}
if (theParams.isOffsetQuery()) {
bundleProvider.setCurrentPageOffset(theParams.getOffset());
bundleProvider.setCurrentPageSize(theParams.getCount());

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -3,7 +3,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -3,7 +3,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -63,6 +63,7 @@ import org.hl7.fhir.instance.model.api.IBaseReference;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.hl7.fhir.r4.model.IdType;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
@ -2010,17 +2011,31 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
* references within a Bundle
*/
if (theAppContext instanceof IBaseBundle && isNotBlank(theUrl) && !theUrl.startsWith("#")) {
String unqualifiedVersionlessReference;
boolean isPlaceholderReference;
if (theUrl.startsWith("urn:")) {
isPlaceholderReference = true;
unqualifiedVersionlessReference = null;
} else {
isPlaceholderReference = false;
unqualifiedVersionlessReference =
new IdType(theUrl).toUnqualifiedVersionless().getValue();
}
List<BundleEntryParts> entries = BundleUtil.toListOfEntries(getContext(), (IBaseBundle) theAppContext);
for (BundleEntryParts next : entries) {
if (next.getResource() != null) {
if (theUrl.startsWith("urn:uuid:")) {
if (isPlaceholderReference) {
if (theUrl.equals(next.getUrl())
|| theUrl.equals(
next.getResource().getIdElement().getValue())) {
return (T) next.getResource();
}
} else {
if (theUrl.equals(next.getResource().getIdElement().getValue())) {
if (unqualifiedVersionlessReference.equals(next.getResource()
.getIdElement()
.toUnqualifiedVersionless()
.getValue())) {
return (T) next.getResource();
}
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -42,14 +42,17 @@ public class SynchronousSearchSvcImplTest extends BaseSearchSvc {
@Test
public void testSynchronousSearch() {
when(mySearchBuilderFactory.newSearchBuilder(any(), any(), any())).thenReturn(mySearchBuilder);
when(mySearchBuilderFactory.newSearchBuilder(any(), any(), any()))
.thenReturn(mySearchBuilder);
SearchParameterMap params = new SearchParameterMap();
List<JpaPid> pids = createPidSequence(800);
when(mySearchBuilder.createQuery(same(params), any(), any(), nullable(RequestPartitionId.class))).thenReturn(new BaseSearchSvc.ResultIterator(pids.iterator()));
when(mySearchBuilder.createQuery(any(SearchParameterMap.class), any(), any(), nullable(RequestPartitionId.class)))
.thenReturn(new BaseSearchSvc.ResultIterator(pids.iterator()));
doAnswer(loadPids()).when(mySearchBuilder).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any());
doAnswer(loadPids()).when(mySearchBuilder)
.loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any());
IBundleProvider result = mySynchronousSearchSvc.executeQuery( "Patient", params, RequestPartitionId.allPartitions());
assertNull(result.getUuid());
@ -71,8 +74,8 @@ public class SynchronousSearchSvcImplTest extends BaseSearchSvc {
params.setSearchTotalMode(SearchTotalModeEnum.ACCURATE);
List<JpaPid> pids = createPidSequence(30);
when(mySearchBuilder.createCountQuery(same(params), any(String.class),nullable(RequestDetails.class), nullable(RequestPartitionId.class))).thenReturn(20L);
when(mySearchBuilder.createQuery(same(params), any(), nullable(RequestDetails.class), nullable(RequestPartitionId.class))).thenReturn(new BaseSearchSvc.ResultIterator(pids.subList(10, 20).iterator()));
when(mySearchBuilder.createCountQuery(any(SearchParameterMap.class), any(String.class),nullable(RequestDetails.class), nullable(RequestPartitionId.class))).thenReturn(20L);
when(mySearchBuilder.createQuery(any(SearchParameterMap.class), any(), nullable(RequestDetails.class), nullable(RequestPartitionId.class))).thenReturn(new BaseSearchSvc.ResultIterator(pids.subList(10, 20).iterator()));
doAnswer(loadPids()).when(mySearchBuilder).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any());
@ -92,7 +95,8 @@ public class SynchronousSearchSvcImplTest extends BaseSearchSvc {
params.setLoadSynchronousUpTo(100);
List<JpaPid> pids = createPidSequence(800);
when(mySearchBuilder.createQuery(same(params), any(), nullable(RequestDetails.class), nullable(RequestPartitionId.class))).thenReturn(new BaseSearchSvc.ResultIterator(pids.iterator()));
when(mySearchBuilder.createQuery(any(SearchParameterMap.class), any(), nullable(RequestDetails.class), nullable(RequestPartitionId.class)))
.thenReturn(new BaseSearchSvc.ResultIterator(pids.iterator()));
pids = createPidSequence(110);
List<JpaPid> finalPids = pids;

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -29,11 +29,11 @@ import org.slf4j.LoggerFactory;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import javax.servlet.ServletException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.leftPad;
@ -54,7 +54,7 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest {
private List<String> myPatientIds;
private List<String> myObservationIdsOddOnly;
private List<String> myObservationIdsEvenOnly;
private List<String> myObservationIdsWithVersions;
private List<String> myObservationIdsWithoutVersions;
private List<String> myPatientIdsEvenOnly;
@AfterEach
@ -64,13 +64,16 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest {
}
@BeforeEach
public void before() throws ServletException {
@Override
public void beforeInitMocks() throws Exception {
super.beforeInitMocks();
RestfulServer restfulServer = new RestfulServer();
restfulServer.setPagingProvider(myPagingProvider);
when(mySrd.getServer()).thenReturn(restfulServer);
myStorageSettings.setSearchPreFetchThresholds(Arrays.asList(20, 50, 190));
restfulServer.setDefaultPageSize(null);
}
@Test
@ -147,6 +150,7 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest {
@Test
public void testSearchAndBlockSome_LoadSynchronous() {
// setup
create50Observations();
AtomicInteger hitCount = new AtomicInteger(0);
@ -281,6 +285,7 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest {
@Test
public void testSearchAndBlockSomeOnIncludes_LoadSynchronous() {
// setup
create50Observations();
AtomicInteger hitCount = new AtomicInteger(0);
@ -328,9 +333,8 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest {
* returned results because we create it then update it in create50Observations()
*/
assertEquals(1, hitCount.get());
assertEquals(myObservationIdsWithVersions.subList(90, myObservationIdsWithVersions.size()), sort(interceptedResourceIds));
assertEquals(sort(myObservationIdsWithoutVersions.subList(90, myObservationIdsWithoutVersions.size())), sort(interceptedResourceIds));
returnedIdValues.forEach(t -> assertTrue(new IdType(t).getIdPartAsLong() % 2 == 0));
}
@Test
@ -363,7 +367,7 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest {
private void create50Observations() {
myPatientIds = new ArrayList<>();
myObservationIds = new ArrayList<>();
myObservationIdsWithVersions = new ArrayList<>();
myObservationIdsWithoutVersions = new ArrayList<>();
Patient p = new Patient();
p.setActive(true);
@ -383,9 +387,9 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest {
final Observation obs1 = new Observation();
obs1.setStatus(Observation.ObservationStatus.FINAL);
obs1.addIdentifier().setSystem("urn:system").setValue("I" + leftPad("" + i, 5, '0'));
IIdType obs1id = myObservationDao.create(obs1).getId().toUnqualifiedVersionless();
IIdType obs1id = myObservationDao.create(obs1).getId();
myObservationIds.add(obs1id.toUnqualifiedVersionless().getValue());
myObservationIdsWithVersions.add(obs1id.toUnqualifiedVersionless().getValue());
myObservationIdsWithoutVersions.add(obs1id.toUnqualifiedVersionless().getValue());
obs1.setId(obs1id);
if (obs1id.getIdPartAsLong() % 2 == 0) {
@ -394,7 +398,7 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest {
obs1.getSubject().setReference(oddPid);
}
myObservationDao.update(obs1);
myObservationIdsWithVersions.add(obs1id.toUnqualifiedVersionless().getValue());
myObservationIdsWithoutVersions.add(obs1id.toUnqualifiedVersionless().getValue());
}
@ -483,14 +487,24 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest {
}
}
private static List<String> sort(List<String>... theLists) {
private List<String> sort(List<String>... theLists) {
return sort(id -> {
String idParsed = id.substring(id.indexOf("/") + 1);
if (idParsed.contains("/_history")) {
idParsed = idParsed.substring(0, idParsed.indexOf("/"));
}
return Long.parseLong(idParsed);
}, theLists);
}
private List<String> sort(Function<String, Long> theParser, List<String>... theLists) {
ArrayList<String> retVal = new ArrayList<>();
for (List<String> next : theLists) {
retVal.addAll(next);
}
retVal.sort((o0, o1) -> {
long i0 = Long.parseLong(o0.substring(o0.indexOf('/') + 1));
long i1 = Long.parseLong(o1.substring(o1.indexOf('/') + 1));
long i0 = theParser.apply(o0);
long i1 = theParser.apply(o1);
return (int) (i0 - i1);
});
return retVal;

View File

@ -114,6 +114,7 @@ import static org.hamcrest.Matchers.not;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.ArgumentMatchers.eq;
@ -1229,6 +1230,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
nextChunk.forEach(t -> foundIds.add(t.getIdElement().toUnqualifiedVersionless().getValue()));
}
assertEquals(ids.size(), foundIds.size());
ids.sort(new ComparableComparator<>());
foundIds.sort(new ComparableComparator<>());
assertEquals(ids, foundIds);
@ -1327,7 +1329,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
myCaptureQueriesListener.logSelectQueries();
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0"));
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("limit '5'"));
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("limit '6'"));
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
@ -1343,7 +1345,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
myCaptureQueriesListener.logSelectQueries();
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0"));
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("limit '5'"));
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("limit '6'"));
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("offset '5'"));
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
@ -1351,22 +1353,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
assertEquals(1, myCaptureQueriesListener.countCommits());
assertEquals(0, myCaptureQueriesListener.countRollbacks());
assertThat(outcome.getLink("next").getUrl(), containsString("Patient?_count=5&_offset=10&active=true"));
// Third page (no results)
myCaptureQueriesListener.clear();
outcome = myClient.search().forResource("Patient").where(Patient.ACTIVE.exactly().code("true")).offset(10).count(5).returnBundle(Bundle.class).execute();
assertThat(toUnqualifiedVersionlessIdValues(outcome).toString(), toUnqualifiedVersionlessIdValues(outcome), empty());
myCaptureQueriesListener.logSelectQueries();
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0"));
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("limit '5'"));
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("offset '10'"));
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
assertNull(outcome.getLink("next"));
}

View File

@ -5793,8 +5793,15 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
* [base]/Bundle?composition.patient.identifier=foo
*/
@ParameterizedTest
@CsvSource({"urn:uuid:5c34dc2c-9b5d-4ec1-b30b-3e2d4371508b", "Patient/ABC"})
public void testCreateAndSearchForFullyChainedSearchParameter(String thePatientId) {
@CsvSource({
"true , urn:uuid:5c34dc2c-9b5d-4ec1-b30b-3e2d4371508b , urn:uuid:5c34dc2c-9b5d-4ec1-b30b-3e2d4371508b",
"false, urn:uuid:5c34dc2c-9b5d-4ec1-b30b-3e2d4371508b , urn:uuid:5c34dc2c-9b5d-4ec1-b30b-3e2d4371508b",
"true , Patient/ABC , Patient/ABC ",
"false, Patient/ABC , Patient/ABC ",
"true , Patient/ABC , http://example.com/fhir/Patient/ABC ",
"false, Patient/ABC , http://example.com/fhir/Patient/ABC ",
})
public void testCreateAndSearchForFullyChainedSearchParameter(boolean theUseFullChainInName, String thePatientId, String theFullUrl) {
// Setup 1
myStorageSettings.setIndexMissingFields(JpaStorageSettings.IndexEnabledEnum.DISABLED);
@ -5819,13 +5826,18 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
composition.setSubject(new Reference(thePatientId));
Patient patient = new Patient();
patient.setId(new IdType(thePatientId));
patient.setId(new IdType(theFullUrl));
patient.addIdentifier().setSystem("http://foo").setValue("bar");
Bundle bundle = new Bundle();
bundle.setType(Bundle.BundleType.DOCUMENT);
bundle.addEntry().setResource(composition);
bundle.addEntry().setResource(patient);
bundle
.addEntry()
.setResource(composition);
bundle
.addEntry()
.setFullUrl(theFullUrl)
.setResource(patient);
myBundleDao.create(bundle, mySrd);
@ -5833,35 +5845,40 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
bundle2.setType(Bundle.BundleType.DOCUMENT);
myBundleDao.create(bundle2, mySrd);
// Verify 1
runInTransaction(() -> {
// Test
SearchParameterMap map;
if (theUseFullChainInName) {
map = SearchParameterMap.newSynchronous("composition.patient.identifier", new TokenParam("http://foo", "bar"));
} else {
map = SearchParameterMap.newSynchronous("composition", new ReferenceParam("patient.identifier", "http://foo|bar"));
}
IBundleProvider outcome = myBundleDao.search(map, mySrd);
// Verify
List<String> params = extractAllTokenIndexes();
assertThat(params.toString(), params, containsInAnyOrder(
"composition.patient.identifier http://foo|bar"
));
assertEquals(1, outcome.size());
}
private List<String> extractAllTokenIndexes() {
List<String> params = runInTransaction(() -> {
logAllTokenIndexes();
List<String> params = myResourceIndexedSearchParamTokenDao
return myResourceIndexedSearchParamTokenDao
.findAll()
.stream()
.filter(t -> t.getParamName().contains("."))
.map(t -> t.getParamName() + " " + t.getSystem() + "|" + t.getValue())
.toList();
assertThat(params.toString(), params, containsInAnyOrder(
"composition.patient.identifier http://foo|bar"
));
});
// Test 2
IBundleProvider outcome;
SearchParameterMap map = SearchParameterMap
.newSynchronous("composition.patient.identifier", new TokenParam("http://foo", "bar"));
outcome = myBundleDao.search(map, mySrd);
assertEquals(1, outcome.size());
map = SearchParameterMap
.newSynchronous("composition", new ReferenceParam("patient.identifier", "http://foo|bar"));
outcome = myBundleDao.search(map, mySrd);
assertEquals(1, outcome.size());
return params;
}
@Nested
public class TagBelowTests {

View File

@ -13,10 +13,7 @@ import org.junit.jupiter.api.Test;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.emptyOrNullString;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
@ -66,7 +63,7 @@ public class ForceOffsetSearchModeInterceptorTest extends BaseResourceProviderR4
myCaptureQueriesListener.logSelectQueries();
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0"));
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("limit '5'"));
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("limit '6'"));
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
@ -91,7 +88,7 @@ public class ForceOffsetSearchModeInterceptorTest extends BaseResourceProviderR4
myCaptureQueriesListener.logSelectQueries();
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0"));
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("limit '5'"));
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("limit '6'"));
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("offset '5'"));
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
@ -99,31 +96,7 @@ public class ForceOffsetSearchModeInterceptorTest extends BaseResourceProviderR4
assertEquals(1, myCaptureQueriesListener.countCommits());
assertEquals(0, myCaptureQueriesListener.countRollbacks());
assertThat(outcome.getLink("next").getUrl(), containsString("Patient?_count=5&_offset=10&active=true"));
// Third page (no results)
myCaptureQueriesListener.clear();
Bundle outcome3 = myClient
.search()
.forResource("Patient")
.where(Patient.ACTIVE.exactly().code("true"))
.offset(10)
.count(5)
.returnBundle(Bundle.class)
.execute();
assertThat(toUnqualifiedVersionlessIdValues(outcome3).toString(), toUnqualifiedVersionlessIdValues(outcome3), empty());
myCaptureQueriesListener.logSelectQueries();
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0"));
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("limit '5'"));
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("offset '10'"));
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
assertNull(outcome3.getLink("next"), () -> outcome3.getLink("next").getUrl());
assertNull(outcome.getLink("next"));
}
@Test
@ -148,11 +121,7 @@ public class ForceOffsetSearchModeInterceptorTest extends BaseResourceProviderR4
assertThat(secondPageBundle.getEntry(), hasSize(5));
Bundle thirdPageBundle = myClient.loadPage().next(secondPageBundle).execute();
assertThat(thirdPageBundle.getEntry(), hasSize(0));
assertNull(thirdPageBundle.getLink("next"), () -> thirdPageBundle.getLink("next").getUrl());
assertNull(secondPageBundle.getLink("next"));
}
@ -180,7 +149,7 @@ public class ForceOffsetSearchModeInterceptorTest extends BaseResourceProviderR4
myCaptureQueriesListener.logSelectQueries();
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0"));
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("limit '7'"));
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("limit '8'"));
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
@ -203,7 +172,7 @@ public class ForceOffsetSearchModeInterceptorTest extends BaseResourceProviderR4
myCaptureQueriesListener.logSelectQueries();
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0"));
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("limit '7'"));
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), containsString("limit '8'"));
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());

View File

@ -484,7 +484,6 @@ public class ResourceProviderCustomSearchParamR4Test extends BaseResourceProvide
*/
@Test
public void testCustomParameterMatchingManyValues() {
List<String> found = new ArrayList<>();
class Interceptor {
@ -496,7 +495,6 @@ public class ResourceProviderCustomSearchParamR4Test extends BaseResourceProvide
Interceptor interceptor = new Interceptor();
myInterceptorRegistry.registerInterceptor(interceptor);
try {
int textIndex = 0;
List<Long> ids = new ArrayList<>();
for (int i = 0; i < 200; i++) {
@ -549,9 +547,8 @@ public class ResourceProviderCustomSearchParamR4Test extends BaseResourceProvide
ourLog.info("Found: {}", found);
runInTransaction(() -> {
List currentResults = myEntityManager.createNativeQuery("select distinct resourceta0_.RES_ID as col_0_0_ from HFJ_RESOURCE resourceta0_ left outer join HFJ_SPIDX_STRING myparamsst1_ on resourceta0_.RES_ID=myparamsst1_.RES_ID where myparamsst1_.HASH_NORM_PREFIX='5901791607832193956' and (myparamsst1_.SP_VALUE_NORMALIZED like 'SECTION%') limit '500'").getResultList();
List currentResources = myEntityManager.createNativeQuery("select resourceta0_.RES_ID as col_0_0_ from HFJ_RESOURCE resourceta0_").getResultList();
List<?> currentResults = myEntityManager.createNativeQuery("select distinct resourceta0_.RES_ID as col_0_0_ from HFJ_RESOURCE resourceta0_ left outer join HFJ_SPIDX_STRING myparamsst1_ on resourceta0_.RES_ID=myparamsst1_.RES_ID where myparamsst1_.HASH_NORM_PREFIX='5901791607832193956' and (myparamsst1_.SP_VALUE_NORMALIZED like 'SECTION%') limit '500'").getResultList();
List<?> currentResources = myEntityManager.createNativeQuery("select resourceta0_.RES_ID as col_0_0_ from HFJ_RESOURCE resourceta0_").getResultList();
List<Search> searches = mySearchEntityDao.findAll();
assertEquals(1, searches.size());

View File

@ -1012,7 +1012,6 @@ public class ResourceProviderR4EverythingTest extends BaseResourceProviderR4Test
assertThat(ids, containsInAnyOrder("Patient/FOO", "Observation/BAZ"));
}
@Test
public void testPagingOverEverythingSet() throws InterruptedException {
Patient p = new Patient();

View File

@ -25,11 +25,13 @@ import ca.uhn.fhir.model.primitive.InstantDt;
import ca.uhn.fhir.model.primitive.UriDt;
import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.parser.StrictErrorHandler;
import ca.uhn.fhir.rest.api.CacheControlDirective;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.PreferReturnEnum;
import ca.uhn.fhir.rest.api.SearchTotalModeEnum;
import ca.uhn.fhir.rest.api.SummaryEnum;
import ca.uhn.fhir.rest.api.server.IRestfulServer;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.client.apache.ResourceEntity;
import ca.uhn.fhir.rest.client.api.IClientInterceptor;
@ -42,6 +44,7 @@ import ca.uhn.fhir.rest.gclient.NumberClientParam;
import ca.uhn.fhir.rest.gclient.StringClientParam;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
import ca.uhn.fhir.rest.server.IPagingProvider;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
@ -159,8 +162,10 @@ import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.CsvSource;
import org.junit.jupiter.params.provider.MethodSource;
import org.junit.jupiter.params.provider.ValueSource;
import org.mockito.Spy;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.util.AopTestUtils;
import org.springframework.transaction.TransactionStatus;
@ -220,6 +225,9 @@ import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
@SuppressWarnings("Duplicates")
public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
@ -255,6 +263,8 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
myStorageSettings.setUpdateWithHistoryRewriteEnabled(false);
myStorageSettings.setPreserveRequestIdInResourceBody(false);
when(myPagingProvider.canStoreSearchResults())
.thenCallRealMethod();
}
@BeforeEach
@ -2718,6 +2728,90 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
assertEquals(total + 1, ids.size());
}
@ParameterizedTest
@CsvSource({
"true,19,10",
"false,19,10",
"true,20,0",
"false,20,0"
})
public void testPagingWithIncludesReturnsConsistentValues(
boolean theAllowStoringSearchResults,
int theResourceCount,
int theOrgCount
) {
// setup
// create resources
{
Coding tagCode = new Coding();
tagCode.setCode("test");
tagCode.setSystem("http://example.com");
int orgCount = theOrgCount;
for (int i = 0; i < theResourceCount; i++) {
Task t = new Task();
t.getMeta()
.addTag(tagCode);
t.setStatus(Task.TaskStatus.REQUESTED);
if (orgCount > 0) {
Organization org = new Organization();
org.setName("ORG");
IIdType orgId = myOrganizationDao.create(org).getId().toUnqualifiedVersionless();
orgCount--;
t.getOwner().setReference(orgId.getValue());
}
myTaskDao.create(t);
}
}
// when
if (!theAllowStoringSearchResults) {
// we don't actually allow this in our current
// pagingProvider implementations (except for history).
// But we will test with it because our ResponsePage
// is what's under test here
when(myPagingProvider.canStoreSearchResults())
.thenReturn(false);
}
int requestedAmount = 10;
Bundle bundle = myClient
.search()
.byUrl("Task?_count=10&_tag=test&status=requested&_include=Task%3Aowner&_sort=status")
.returnBundle(Bundle.class)
.execute();
int count = bundle.getEntry().size();
assertFalse(bundle.getEntry().isEmpty());
String nextUrl = null;
do {
Bundle.BundleLinkComponent nextLink = bundle.getLink("next");
if (nextLink != null) {
nextUrl = nextLink.getUrl();
// make sure we're always requesting 10
assertTrue(nextUrl.contains(String.format("_count=%d", requestedAmount)));
// get next batch
bundle = myClient.fetchResourceFromUrl(Bundle.class, nextUrl);
int received = bundle.getEntry().size();
// every next result should produce results
assertFalse(bundle.getEntry().isEmpty());
count += received;
} else {
nextUrl = null;
}
} while (nextUrl != null);
// verify
// we should receive all resources and linked resources
assertEquals(theResourceCount + theOrgCount, count);
}
@Test
public void testPagingWithIncludesReturnsConsistentValues() {
// setup
@ -3204,7 +3298,11 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
});
myCaptureQueriesListener.logAllQueriesForCurrentThread();
Bundle bundle = myClient.search().forResource("Patient").returnBundle(Bundle.class).execute();
Bundle bundle = myClient
.search()
.forResource("Patient")
.returnBundle(Bundle.class)
.execute();
ourLog.debug("Result: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(bundle));
assertEquals(2, bundle.getTotal());
assertEquals(1, bundle.getEntry().size());

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -215,7 +215,9 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
@ExtendWith(SpringExtension.class)
@ContextConfiguration(classes = {TestR4Config.class})
@ContextConfiguration(classes = {
TestR4Config.class
})
public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuilder {
public static final String MY_VALUE_SET = "my-value-set";
public static final String URL_MY_VALUE_SET = "http://example.com/my_value_set";
@ -398,6 +400,7 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
@Autowired
@Qualifier("myOrganizationAffiliationDaoR4")
protected IFhirResourceDao<OrganizationAffiliation> myOrganizationAffiliationDao;
@Autowired
protected DatabaseBackedPagingProvider myPagingProvider;
@Autowired

View File

@ -0,0 +1,21 @@
package ca.uhn.fhir.jpa.test.config;
import ca.uhn.fhir.jpa.config.HapiJpaConfig;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import static org.mockito.Mockito.spy;
/**
* This is a Test configuration class that allows spying underlying JpaConfigs beans
*/
@Configuration
public class TestHapiJpaConfig extends HapiJpaConfig {
@Override
@Bean
public DatabaseBackedPagingProvider databaseBackedPagingProvider() {
return spy(super.databaseBackedPagingProvider());
}
}

View File

@ -24,7 +24,6 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.batch2.JpaBatch2Config;
import ca.uhn.fhir.jpa.binary.api.IBinaryStorageSvc;
import ca.uhn.fhir.jpa.binstore.MemoryBinaryStorageSvcImpl;
import ca.uhn.fhir.jpa.config.HapiJpaConfig;
import ca.uhn.fhir.jpa.config.PackageLoaderConfig;
import ca.uhn.fhir.jpa.config.r4.JpaR4Config;
import ca.uhn.fhir.jpa.config.util.HapiEntityManagerFactoryUtil;
@ -65,7 +64,7 @@ import static org.junit.jupiter.api.Assertions.fail;
@Import({
JpaR4Config.class,
PackageLoaderConfig.class,
HapiJpaConfig.class,
TestHapiJpaConfig.class,
TestJPAConfig.class,
TestHSearchAddInConfig.DefaultLuceneHeap.class,
JpaBatch2Config.class,

View File

@ -0,0 +1,70 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.test.util.LogbackCaptureTestExtension;
import ch.qos.logback.classic.Level;
import ch.qos.logback.classic.Logger;
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.classic.spi.LoggingEvent;
import oracle.jdbc.OracleDatabaseException;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.jdbc.UncategorizedSQLException;
import org.springframework.transaction.TransactionException;
import org.springframework.transaction.support.TransactionTemplate;
import javax.sql.DataSource;
import java.sql.SQLException;
import java.util.Collections;
import java.util.List;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.hasSize;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
class AddIndexTaskTest {
@Mock
DriverTypeEnum.ConnectionProperties myConnectionProperties;
@Mock
DataSource myDataSource;
@Mock
TransactionTemplate myTransactionTemplate;
@RegisterExtension
LogbackCaptureTestExtension myLogCapture = new LogbackCaptureTestExtension((Logger) AddIndexTask.ourLog, Level.WARN);
@Test
void testOracleException() throws SQLException {
final AddIndexTask task = new AddIndexTask("1", "1");
task.setColumns(Collections.singletonList("COLUMN_NAME"));
task.setUnique(true);
task.setIndexName("INDEX_NAME");
task.setConnectionProperties(myConnectionProperties);
when(myConnectionProperties.getDataSource()).thenReturn(myDataSource);
when(myConnectionProperties.getTxTemplate()).thenReturn(myTransactionTemplate);
final String sql = "create index INDEX_NAME on TABLE_NAME (COLUMN_NAME)";
when(myTransactionTemplate.execute(any()))
.thenReturn(Collections.emptySet())
.thenThrow(new UncategorizedSQLException("ORA-01408: such column list already indexed", sql, new SQLException("ORA-01408: such column list already indexed", "72000", 1408)));
myLogCapture.clearEvents();
// Red-green: this used to throw an exception. Now it logs a warning.
task.execute();
List<ILoggingEvent> events = myLogCapture.getLogEvents();
assertThat(events, hasSize(1));
LoggingEvent event = (LoggingEvent) events.get(0);
assertThat(event.getFormattedMessage(), containsString("ORA-01408: such column list already indexed"));
}
}

View File

@ -0,0 +1,115 @@
package ca.uhn.fhir.jpa.util;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.test.BaseJpaTest;
import ca.uhn.fhir.jpa.test.config.TestR4Config;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Observation;
import org.hl7.fhir.r4.model.Patient;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.Propagation;
import java.util.concurrent.atomic.AtomicReference;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
@ExtendWith(SpringExtension.class)
@ContextConfiguration(classes = {
TestR4Config.class
})
public class JpaHapiTransactionServiceTest extends BaseJpaTest {
@Autowired PlatformTransactionManager myTxManager;
@Autowired IFhirResourceDao<Patient> myPatientDao;
@Autowired IFhirResourceDao<Observation> myObservationDao;
SystemRequestDetails myRequestDetails = new SystemRequestDetails();
final AtomicReference<IIdType> myObservationId = new AtomicReference<>();
final AtomicReference<IIdType> myPatientId = new AtomicReference<>();
@Override
protected FhirContext getFhirContext() {
return myFhirContext;
}
@Override
protected PlatformTransactionManager getTxManager() {
return myTxManager;
}
@Autowired
HapiTransactionService myHapiTransactionService;
@Test
void testNewTransactionCommitInsideOldTransactionRollback() {
try {
myHapiTransactionService.withSystemRequest().withPropagation(Propagation.REQUIRED).execute(()->{
myObservationId.set(myObservationDao.create(new Observation(), myRequestDetails).getId());
myHapiTransactionService.withSystemRequest().withPropagation(Propagation.REQUIRES_NEW)
.execute(()-> myPatientId.set(myPatientDao.create(new Patient(), myRequestDetails).getId()));
// roll back the Observation. The Patient has committed
throw new RuntimeException("roll back the Observation.");
});
} catch (RuntimeException e) {
// expected
}
assertNotFound(myObservationDao, myObservationId.get());
assertFound(myPatientDao, myPatientId.get());
}
@Test
void testRequiredTransactionCommitInsideExistingTx_rollsBackWithMainTx() {
// given
try {
myHapiTransactionService.withSystemRequest().withPropagation(Propagation.REQUIRED).execute(()->{
myObservationId.set(myObservationDao.create(new Observation(), myRequestDetails).getId());
myHapiTransactionService.withSystemRequest().withPropagation(Propagation.REQUIRED).execute(()-> myPatientId.set(myPatientDao.create(new Patient(), myRequestDetails).getId()));
throw new RuntimeException("roll back both.");
});
} catch (RuntimeException e) {
// expected
}
assertNotFound(myObservationDao, myObservationId.get());
assertNotFound(myPatientDao, myPatientId.get());
}
@Test
void testTransactionCommitRespectsRollbackOnly() {
try {
myHapiTransactionService.withSystemRequest().withPropagation(Propagation.REQUIRED).execute((theTransactionStatus)->{
myObservationId.set(myObservationDao.create(new Observation(), myRequestDetails).getId());
theTransactionStatus.setRollbackOnly();
return null;
});
} catch (RuntimeException e) {
// expected
}
assertNotFound(myObservationDao, myObservationId.get());
}
void assertNotFound(IFhirResourceDao<?> theDao, IIdType id) {
assertThrows(ResourceNotFoundException.class, ()-> theDao.read(id, myRequestDetails));
}
void assertFound(IFhirResourceDao<?> theDao, IIdType theId) {
assertNotNull(theDao.read(theId, myRequestDetails));
}
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -23,6 +23,7 @@ import ca.uhn.fhir.storage.interceptor.balp.IBalpAuditEventSink;
import ca.uhn.fhirtest.ScheduledSubscriptionDeleter;
import ca.uhn.fhirtest.interceptor.AnalyticsInterceptor;
import ca.uhn.fhirtest.joke.HolyFooCowInterceptor;
import ca.uhn.fhirtest.migrate.FhirTestAutoMigrator;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
@ -131,6 +132,11 @@ public class CommonConfig {
return new FhirTestBalpAuditContextServices();
}
@Bean
public FhirTestAutoMigrator migrator() {
return new FhirTestAutoMigrator();
}
public static boolean isLocalTestMode() {
return "true".equalsIgnoreCase(System.getProperty("testmode.local"));
}

View File

@ -120,7 +120,7 @@ public class TestAuditConfig {
}
extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update");
extraProperties.put("hibernate.hbm2ddl.auto", "none");
extraProperties.put("hibernate.jdbc.batch_size", "20");
extraProperties.put("hibernate.cache.use_query_cache", "false");
extraProperties.put("hibernate.cache.use_second_level_cache", "false");

View File

@ -138,7 +138,7 @@ public class TestDstu2Config {
}
extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update");
extraProperties.put("hibernate.hbm2ddl.auto", "none");
extraProperties.put("hibernate.jdbc.batch_size", "20");
extraProperties.put("hibernate.cache.use_query_cache", "false");
extraProperties.put("hibernate.cache.use_second_level_cache", "false");

View File

@ -138,7 +138,7 @@ public class TestDstu3Config {
}
extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update");
extraProperties.put("hibernate.hbm2ddl.auto", "none");
extraProperties.put("hibernate.jdbc.batch_size", "20");
extraProperties.put("hibernate.cache.use_query_cache", "false");
extraProperties.put("hibernate.cache.use_second_level_cache", "false");

View File

@ -136,7 +136,7 @@ public class TestR4BConfig {
}
extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update");
extraProperties.put("hibernate.hbm2ddl.auto", "none");
extraProperties.put("hibernate.jdbc.batch_size", "20");
extraProperties.put("hibernate.cache.use_query_cache", "false");
extraProperties.put("hibernate.cache.use_second_level_cache", "false");

View File

@ -139,7 +139,7 @@ public class TestR4Config {
}
extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update");
extraProperties.put("hibernate.hbm2ddl.auto", "none");
extraProperties.put("hibernate.jdbc.batch_size", "20");
extraProperties.put("hibernate.cache.use_query_cache", "false");
extraProperties.put("hibernate.cache.use_second_level_cache", "false");

View File

@ -148,7 +148,7 @@ public class TestR5Config {
}
extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update");
extraProperties.put("hibernate.hbm2ddl.auto", "none");
extraProperties.put("hibernate.jdbc.batch_size", "20");
extraProperties.put("hibernate.cache.use_query_cache", "false");
extraProperties.put("hibernate.cache.use_second_level_cache", "false");

View File

@ -0,0 +1,51 @@
package ca.uhn.fhirtest.migrate;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.HapiMigrationStorageSvc;
import ca.uhn.fhir.jpa.migrate.MigrationTaskList;
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
import ca.uhn.fhir.jpa.migrate.tasks.HapiFhirJpaMigrationTasks;
import ca.uhn.fhir.util.VersionEnum;
import ca.uhn.fhirtest.config.CommonConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.Properties;
import java.util.Set;
import javax.annotation.PostConstruct;
import javax.sql.DataSource;
public class FhirTestAutoMigrator {
private static final Logger ourLog = LoggerFactory.getLogger(FhirTestAutoMigrator.class);
public static final String MIGRATION_TABLENAME = "MIGRATIONS";
@Autowired
private DataSource myDataSource;
@PostConstruct
public void run() {
DriverTypeEnum driver;
if (CommonConfig.isLocalTestMode()) {
driver = DriverTypeEnum.H2_EMBEDDED;
} else {
driver = DriverTypeEnum.POSTGRES_9_4;
}
HapiMigrationDao hapiMigrationDao = new HapiMigrationDao(myDataSource, driver, MIGRATION_TABLENAME);
HapiMigrationStorageSvc hapiMigrationStorageSvc = new HapiMigrationStorageSvc(hapiMigrationDao);
MigrationTaskList tasks = new HapiFhirJpaMigrationTasks(Set.of()).getAllTasks(VersionEnum.values());
SchemaMigrator schemaMigrator = new SchemaMigrator(
"HAPI FHIR", MIGRATION_TABLENAME, myDataSource, new Properties(), tasks, hapiMigrationStorageSvc);
schemaMigrator.setDriverType(driver);
ourLog.info("About to run migration...");
schemaMigrator.createMigrationTableIfRequired();
schemaMigrator.migrate();
ourLog.info("Migration complete");
}
}

View File

@ -7,7 +7,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -119,6 +119,7 @@ public interface IBundleProvider {
* server's processing rules (e.g. _include'd resources, OperationOutcome, etc.). For example,
* if the method is invoked with index 0,10 the method might return 10 search results, plus an
* additional 20 resources which matched a client's _include specification.
* </p>
* <p>
* Note that if this bundle provider was loaded using a
* page ID (i.e. via {@link ca.uhn.fhir.rest.server.IPagingProvider#retrieveResultList(RequestDetails, String, String)}

View File

@ -43,6 +43,15 @@ public class SimpleBundleProvider implements IBundleProvider {
private Integer myCurrentPageSize;
private ResponsePage.ResponsePageBuilder myPageBuilder;
/**
* The actual number of resources we have tried to fetch.
* This value will only be populated if there is a
* _count query parameter provided.
* In which case, it will be the total number of resources
* we tried to fetch (should be _count + 1 for accurate paging)
*/
private int myTotalResourcesRequestedReturned = -1;
/**
* Constructor
*/
@ -144,6 +153,7 @@ public class SimpleBundleProvider implements IBundleProvider {
@Override
public List<IBaseResource> getResources(
int theFromIndex, int theToIndex, @Nonnull ResponsePage.ResponsePageBuilder theResponsePageBuilder) {
theResponsePageBuilder.setTotalRequestedResourcesFetched(myTotalResourcesRequestedReturned);
return (List<IBaseResource>)
myList.subList(Math.min(theFromIndex, myList.size()), Math.min(theToIndex, myList.size()));
}
@ -153,6 +163,10 @@ public class SimpleBundleProvider implements IBundleProvider {
return myUuid;
}
public void setTotalResourcesRequestedReturned(int theAmount) {
myTotalResourcesRequestedReturned = theAmount;
}
/**
* Defaults to null
*/

View File

@ -105,8 +105,11 @@ public class ResponseBundleBuilder {
pageSize = pagingCalculatePageSize(requestedPage, server.getPagingProvider());
Integer size = bundleProvider.size();
numToReturn =
(size == null) ? pageSize : Math.min(pageSize, size.intValue() - theResponseBundleRequest.offset);
if (size == null) {
numToReturn = pageSize;
} else {
numToReturn = Math.min(pageSize, size.intValue() - theResponseBundleRequest.offset);
}
resourceList =
pagingBuildResourceList(theResponseBundleRequest, bundleProvider, numToReturn, responsePageBuilder);
@ -252,6 +255,7 @@ public class ResponseBundleBuilder {
RestfulServerUtils.prettyPrintResponse(server, theResponseBundleRequest.requestDetails),
theResponseBundleRequest.bundleType);
// set self link
retval.setSelf(theResponseBundleRequest.linkSelf);
// determine if we are using offset / uncached pages

View File

@ -71,6 +71,16 @@ public class ResponsePage {
* even though it will change number of resources returned.
*/
private final int myOmittedResourceCount;
/**
* This is the total count of requested resources
* (ie, non-omitted, non-_include'd resource count).
* We typically fetch (for offset queries) 1 more than
* we need so we know if there is an additional page
* to fetch.
* But this is determined by the implementers of
* IBundleProvider.
*/
private final int myTotalRequestedResourcesFetched;
/**
* The bundle provider.
@ -109,6 +119,7 @@ public class ResponsePage {
int theNumToReturn,
int theIncludedResourceCount,
int theOmittedResourceCount,
int theTotalRequestedResourcesFetched,
IBundleProvider theBundleProvider) {
mySearchId = theSearchId;
myResourceList = theResourceList;
@ -116,6 +127,7 @@ public class ResponsePage {
myNumToReturn = theNumToReturn;
myIncludedResourceCount = theIncludedResourceCount;
myOmittedResourceCount = theOmittedResourceCount;
myTotalRequestedResourcesFetched = theTotalRequestedResourcesFetched;
myBundleProvider = theBundleProvider;
myNumTotalResults = myBundleProvider.size();
@ -190,24 +202,16 @@ public class ResponsePage {
return StringUtils.isNotBlank(myBundleProvider.getNextPageId());
case NONCACHED_OFFSET:
if (myNumTotalResults == null) {
/*
* Having a null total results is synonymous with
* having a next link. Once our results are exhausted,
* we will always have a myNumTotalResults value.
*
* Alternatively, if _total=accurate is provided,
* we'll also have a myNumTotalResults value.
*/
return true;
if (hasNextPageWithoutKnowingTotal()) {
return true;
}
} else if (myNumTotalResults > myNumToReturn + ObjectUtils.defaultIfNull(myRequestedPage.offset, 0)) {
return true;
}
break;
case SAVED_SEARCH:
if (myNumTotalResults == null) {
if (myPageSize == myResourceList.size() + myOmittedResourceCount - myIncludedResourceCount) {
// if the size of the resource list - included resources + omitted resources == pagesize
// we have more pages
if (hasNextPageWithoutKnowingTotal()) {
return true;
}
} else if (myResponseBundleRequest.offset + myNumToReturn < myNumTotalResults) {
@ -220,6 +224,53 @@ public class ResponsePage {
return false;
}
/**
* If myNumTotalResults is null, it typically means we don't
* have an accurate total.
*
* Ie, we're in the middle of a set of pages (of non-named page results),
* and _total=accurate was not passed.
*
* This typically always means that a
* 'next' link definitely exists.
*
* But there are cases where this might not be true:
* * the last page of a search that also has an _include
* query parameter where the total of resources + _include'd
* resources is > the page size expected to be returned.
* * the last page of a search that returns the exact number
* of resources requested
*
* In these case, we must check to see if the returned
* number of *requested* resources.
* If our bundleprovider has fetched > requested,
* we'll know that there are more resources already.
* But if it hasn't, we'll have to check pagesize compared to
* _include'd count, omitted count, and resource count.
*/
private boolean hasNextPageWithoutKnowingTotal() {
// if we have totalRequestedResource count, and it's not equal to pagesize,
// then we can use this, alone, to determine if there are more pages
if (myTotalRequestedResourcesFetched >= 0) {
if (myPageSize < myTotalRequestedResourcesFetched) {
return true;
}
} else {
// otherwise we'll try and determine if there are next links based on the following
// calculation:
// resourceList.size - included resources + omitted resources == pagesize
// -> we (most likely) have more resources
if (myPageSize == myResourceList.size() - myIncludedResourceCount + myOmittedResourceCount) {
ourLog.warn(
"Returning a next page based on calculated resource count."
+ " This could be inaccurate if the exact number of resources were fetched is equal to the pagesize requested. "
+ " Consider setting ResponseBundleBuilder.setTotalResourcesFetchedRequest after fetching resources.");
return true;
}
}
return false;
}
public void setNextPageIfNecessary(BundleLinks theLinks) {
if (hasNextPage()) {
String next;
@ -356,9 +407,10 @@ public class ResponsePage {
private int myIncludedResourceCount;
private int myOmittedResourceCount;
private IBundleProvider myBundleProvider;
private int myTotalRequestedResourcesFetched = -1;
public ResponsePageBuilder setToOmittedResourceCount(int theOmittedResourcesCountToAdd) {
myOmittedResourceCount = theOmittedResourcesCountToAdd;
public ResponsePageBuilder setOmittedResourceCount(int theOmittedResourceCount) {
myOmittedResourceCount = theOmittedResourceCount;
return this;
}
@ -392,6 +444,36 @@ public class ResponsePage {
return this;
}
public ResponsePageBuilder setTotalRequestedResourcesFetched(int theTotalRequestedResourcesFetched) {
myTotalRequestedResourcesFetched = theTotalRequestedResourcesFetched;
return this;
}
/**
* Combine this builder with a second buider.
* Useful if a second page is requested, but you do not wish to
* overwrite the current values.
*
* Will not replace searchId, nor IBundleProvider (which should be
* the exact same for any subsequent searches anyways).
*
* Will also not copy pageSize nor numToReturn, as these should be
* the same for any single search result set.
*
* @param theSecondBuilder - a second builder (cannot be this one)
*/
public void combineWith(ResponsePageBuilder theSecondBuilder) {
assert theSecondBuilder != this; // don't want to combine with itself
if (myTotalRequestedResourcesFetched != -1 && theSecondBuilder.myTotalRequestedResourcesFetched != -1) {
myTotalRequestedResourcesFetched += theSecondBuilder.myTotalRequestedResourcesFetched;
}
// primitives can always be added
myIncludedResourceCount += theSecondBuilder.myIncludedResourceCount;
myOmittedResourceCount += theSecondBuilder.myOmittedResourceCount;
}
public ResponsePage build() {
return new ResponsePage(
mySearchId, // search id
@ -400,6 +482,7 @@ public class ResponsePage {
myNumToReturn, // num to return
myIncludedResourceCount, // included count
myOmittedResourceCount, // omitted resources
myTotalRequestedResourcesFetched, // total count of requested resources
myBundleProvider // the bundle provider
);
}

View File

@ -161,17 +161,24 @@ public class ResponsePageTest {
*/
@ParameterizedTest
@CsvSource({
"true,false,true",
"true,true,true",
"false,false,false",
"false,true,false",
"false,false,true",
"false,true,true"
"true,false,true,true",
"true,true,true,true",
"false,false,false,true",
"false,true,false,true",
"false,false,true,true",
"false,true,true,true",
"true,false,true,false",
"true,true,true,false",
"false,false,false,false",
"false,true,false,false",
"false,false,true,false",
"false,true,true,false"
})
public void nonCachedOffsetPaging_setsNextPreviousLinks_test(
boolean theNumTotalResultsIsNull,
boolean theHasPreviousBoolean,
boolean theHasNextBoolean
boolean theHasNextBoolean,
boolean theHasTotalRequestedCountBool
) {
// setup
myBundleBuilder
@ -193,6 +200,11 @@ public class ResponsePageTest {
} else {
when(myBundleProvider.size())
.thenReturn(null);
if (theHasTotalRequestedCountBool) {
myBundleBuilder.setTotalRequestedResourcesFetched(11); // 1 more than pagesize
} else {
myBundleBuilder.setPageSize(10);
}
}
RequestedPage requestedPage = new RequestedPage(
@ -215,19 +227,28 @@ public class ResponsePageTest {
@ParameterizedTest
@CsvSource({
"true,false,false",
"true,true,false",
"true,false,true",
"true,true,true",
"false,false,false",
"false,true,false",
"false,false,true",
"false,true,true"
"true,false,false,true",
"true,true,false,true",
"true,false,true,true",
"true,true,true,true",
"false,false,false,true",
"false,true,false,true",
"false,false,true,true",
"false,true,true,true",
"true,false,false,false",
"true,true,false,false",
"true,false,true,false",
"true,true,true,false",
"false,false,false,false",
"false,true,false,false",
"false,false,true,false",
"false,true,true,false"
})
public void savedSearch_setsNextPreviousLinks_test(
boolean theNumTotalResultsIsNull,
boolean theHasPreviousBoolean,
boolean theHasNextBoolean
boolean theHasNextBoolean,
boolean theHasTotalRequestedFetched
) {
// setup
int pageSize = myList.size();
@ -255,6 +276,12 @@ public class ResponsePageTest {
if (!theHasNextBoolean) {
myBundleBuilder.setNumToReturn(pageSize + offset + includeResourceCount);
}
} else if (theHasTotalRequestedFetched) {
if (theHasNextBoolean) {
myBundleBuilder.setTotalRequestedResourcesFetched(pageSize + 1); // 1 more than page size
} else {
myBundleBuilder.setTotalRequestedResourcesFetched(pageSize);
}
}
// when

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>hapi-fhir-serviceloaders</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>hapi-fhir-serviceloaders</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
@ -21,7 +21,7 @@
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-caching-api</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
</dependency>
<dependency>

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>hapi-fhir-serviceloaders</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -7,7 +7,7 @@
<parent>
<artifactId>hapi-fhir</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>hapi-deployable-pom</artifactId>
<groupId>ca.uhn.hapi.fhir</groupId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
</parent>
<artifactId>hapi-fhir-spring-boot-sample-client-apache</artifactId>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -37,7 +37,7 @@ import javax.annotation.Nonnull;
public class AddIndexTask extends BaseTableTask {
private static final Logger ourLog = LoggerFactory.getLogger(AddIndexTask.class);
static final Logger ourLog = LoggerFactory.getLogger(AddIndexTask.class);
private String myIndexName;
private List<String> myColumns;
@ -97,8 +97,15 @@ public class AddIndexTask extends BaseTableTask {
try {
executeSql(tableName, sql);
} catch (Exception e) {
if (e.toString().contains("already exists")) {
ourLog.warn("Index {} already exists", myIndexName);
String message = e.toString();
if (message.contains("already exists")
||
// The Oracle message is ORA-01408: such column list already indexed
// TODO KHS consider db-specific handling here that uses the error code instead of the message so
// this is language independent
// e.g. if the db is Oracle than checking e.getErrorCode() == 1408 should detect this case
message.contains("already indexed")) {
ourLog.warn("Index {} already exists: {}", myIndexName, e.getMessage());
} else {
throw e;
}

View File

@ -44,7 +44,7 @@ public abstract class BaseTableColumnTask extends BaseTableTask {
}
public BaseTableColumnTask setColumnName(String theColumnName) {
myColumnName = theColumnName.toUpperCase();
myColumnName = theColumnName.toLowerCase();
return this;
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -76,13 +76,10 @@ public class BulkDataImportProvider {
public static final String PARAM_INPUT_TYPE = "type";
private static final Logger ourLog = LoggerFactory.getLogger(BulkDataImportProvider.class);
@Autowired
private IJobCoordinator myJobCoordinator;
@Autowired
private FhirContext myFhirCtx;
@Autowired
private IRequestPartitionHelperSvc myRequestPartitionHelperService;
private volatile List<String> myResourceTypeOrder;
@ -94,14 +91,17 @@ public class BulkDataImportProvider {
super();
}
@Autowired
public void setJobCoordinator(IJobCoordinator theJobCoordinator) {
myJobCoordinator = theJobCoordinator;
}
@Autowired
public void setFhirContext(FhirContext theCtx) {
myFhirCtx = theCtx;
}
@Autowired
public void setRequestPartitionHelperService(IRequestPartitionHelperSvc theRequestPartitionHelperSvc) {
myRequestPartitionHelperService = theRequestPartitionHelperSvc;
}

View File

@ -7,7 +7,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.11.2-SNAPSHOT</version>
<version>6.11.4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

Some files were not shown because too many files have changed in this diff Show More