Dependency bumps (#3264)

* Bump a number of versions

* Fixes

* Work on version bump

* Bump

* Test fix

* More version bumps

* Build fixes

* Test fixes

* Fixes

* Fixes

* Test fix

* Bump jacoco

* Test fix

* Test fix

* Test fix

* Build fix

* Build fix

* Remove versioned H2

* Build fix

* Avoid memory issue in validation tests

* JDK bump

* Restore compile with errorprone

* Force coloured output

* Force jansi

* Try again to force jansi

* Fix for hardcoded date that just passed

* Fixes to JPA

* Improve changelog

* Work on failing test

* Test fix

* Compile fix

* One more version bump

* Test fixes
This commit is contained in:
James Agnew 2022-01-04 13:21:14 -05:00 committed by GitHub
parent d9820bfb89
commit d610d33ac3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
158 changed files with 122210 additions and 36268 deletions

View File

@ -15,7 +15,7 @@ pool:
jobs:
- job: Build
timeoutInMinutes: 360
container: maven:3-openjdk-15
container: maven:3.8-openjdk-17
steps:
- task: DockerInstaller@0
displayName: Docker Installer
@ -32,11 +32,11 @@ jobs:
script: mkdir -p $(MAVEN_CACHE_FOLDER); pwd; ls -al $(MAVEN_CACHE_FOLDER)
- task: Maven@3
env:
JAVA_HOME_11_X64: /usr/java/openjdk-15
JAVA_HOME_11_X64: /usr/java/openjdk-17
inputs:
goals: 'clean install'
# These are Maven CLI options (and show up in the build logs) - "-nsu"=Don't update snapshots. We can remove this when Maven OSS is more healthy
options: '-P ALLMODULES,JACOCO,CI,ERRORPRONE -e -B -Dmaven.repo.local=$(MAVEN_CACHE_FOLDER)'
options: '-P ALLMODULES,JACOCO,CI,ERRORPRONE -e -B -Dmaven.repo.local=$(MAVEN_CACHE_FOLDER) -Dmaven.wagon.http.pool=false -Dhttp.keepAlive=false -Dstyle.color=always -Djansi.force=true'
# These are JVM options (and don't show up in the build logs)
mavenOptions: '-Xmx1024m $(MAVEN_OPTS) -Dorg.slf4j.simpleLogger.showDateTime=true -Dorg.slf4j.simpleLogger.dateTimeFormat=HH:mm:ss,SSS -Duser.timezone=America/Toronto'
jdkVersionOption: 1.11

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -399,7 +399,13 @@ class ModelScanner {
ourLog.warn(b.toString());
continue;
}
providesMembershipInCompartments.add(next.name());
String name = next.name();
// As of 2021-12-28 the R5 structures incorrectly have this prefix
if (name.startsWith("Base FHIR compartment definition for ")) {
name = name.substring("Base FHIR compartment definition for ".length());
}
providesMembershipInCompartments.add(name);
}
List<RuntimeSearchParam.Component> components = null;

View File

@ -50,7 +50,12 @@ public class ClasspathUtil {
// nothing
}
public static String loadResource(String theClasspath) {
/**
* Load a classpath resource, throw an {@link InternalErrorException} if not found
*
* @throws InternalErrorException If the resource can't be found
*/
public static String loadResource(String theClasspath) throws InternalErrorException {
return loadResource(theClasspath, Function.identity());
}
@ -60,7 +65,7 @@ public class ClasspathUtil {
* @throws InternalErrorException If the resource can't be found
*/
@Nonnull
public static InputStream loadResourceAsStream(String theClasspath) {
public static InputStream loadResourceAsStream(String theClasspath) throws InternalErrorException {
String classpath = theClasspath;
if (classpath.startsWith("classpath:")) {
classpath = classpath.substring("classpath:".length());

View File

@ -41,7 +41,6 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.IdentityHashMap;
@ -339,6 +338,10 @@ public class FhirTerser {
@SuppressWarnings("unchecked")
private <T extends IBase> List<T> getValues(BaseRuntimeElementCompositeDefinition<?> theCurrentDef, IBase theCurrentObj, List<String> theSubList, Class<T> theWantedClass, boolean theCreate, boolean theAddExtension) {
if (theSubList.isEmpty()) {
return Collections.emptyList();
}
String name = theSubList.get(0);
List<T> retVal = new ArrayList<>();
@ -683,7 +686,15 @@ public class FhirTerser {
parts.add(thePath.substring(currentStart));
if (parts.size() > 0 && parts.get(0).equals(theElementDef.getName())) {
String firstPart = parts.get(0);
if (Character.isUpperCase(firstPart.charAt(0)) && theElementDef instanceof RuntimeResourceDefinition) {
if (firstPart.equals(theElementDef.getName())) {
parts = parts.subList(1, parts.size());
} else {
parts = Collections.emptyList();
return parts;
}
} else if (firstPart.equals(theElementDef.getName())) {
parts = parts.subList(1, parts.size());
}

View File

@ -7,6 +7,7 @@ import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import static ca.uhn.fhir.util.TestUtil.sleepAtLeast;
import static org.awaitility.Awaitility.await;
public class AsyncUtilTest {
@ -24,6 +25,7 @@ public class AsyncUtilTest {
outcomeHolder.set(outcome);
});
thread.start();
sleepAtLeast(1000);
thread.interrupt();
await().until(()-> outcomeHolder.get() == false);
}

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -10,7 +10,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -44,8 +44,8 @@ public class HapiFlywayMigrateDatabaseCommandTest {
File location = getLocation("migrator_h2_test_340_current");
String url = "jdbc:h2:" + location.getAbsolutePath() + ";create=true";
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "", "");
String url = "jdbc:h2:" + location.getAbsolutePath();
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "SA", "SA");
String initSql = "/persistence_create_h2_340.sql";
executeSqlStatements(connectionProperties, initSql);
@ -60,8 +60,8 @@ public class HapiFlywayMigrateDatabaseCommandTest {
BaseFlywayMigrateDatabaseCommand.MIGRATE_DATABASE,
"-d", "H2_EMBEDDED",
"-u", url,
"-n", "",
"-p", ""
"-n", "SA",
"-p", "SA"
};
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_RES_REINDEX_JOB"));
@ -119,7 +119,7 @@ public class HapiFlywayMigrateDatabaseCommandTest {
File location = getLocation("migrator_h2_test_340_current_noflyway");
String url = "jdbc:h2:" + location.getAbsolutePath() + ";create=true";
String url = "jdbc:h2:" + location.getAbsolutePath();
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "", "");
String initSql = "/persistence_create_h2_340.sql";
@ -160,7 +160,7 @@ public class HapiFlywayMigrateDatabaseCommandTest {
File location = getLocation("migrator_h2_test_340_dryrun");
String url = "jdbc:h2:" + location.getAbsolutePath() + ";create=true";
String url = "jdbc:h2:" + location.getAbsolutePath();
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "", "");
String initSql = "/persistence_create_h2_340.sql";
@ -229,8 +229,8 @@ public class HapiFlywayMigrateDatabaseCommandTest {
File location = getLocation("migrator_h2_test_empty_current");
String url = "jdbc:h2:" + location.getAbsolutePath() + ";create=true";
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "", "");
String url = "jdbc:h2:" + location.getAbsolutePath();
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "SA", "SA");
ourLog.info("**********************************************");
ourLog.info("Starting Migration...");
@ -240,8 +240,8 @@ public class HapiFlywayMigrateDatabaseCommandTest {
BaseFlywayMigrateDatabaseCommand.MIGRATE_DATABASE,
"-d", "H2_EMBEDDED",
"-u", url,
"-n", "",
"-p", ""
"-n", "SA",
"-p", "SA"
};
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_RESOURCE"));
@ -256,7 +256,7 @@ public class HapiFlywayMigrateDatabaseCommandTest {
File location = getLocation("migrator_h2_test_empty_current_noflyway");
String url = "jdbc:h2:" + location.getAbsolutePath() + ";create=true";
String url = "jdbc:h2:" + location.getAbsolutePath();
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "", "");
ourLog.info("**********************************************");

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom</relativePath>
</parent>

View File

@ -25,7 +25,7 @@ import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.jpa.search.HapiLuceneAnalysisConfigurer;
import org.apache.commons.dbcp2.BasicDataSource;
import org.hibernate.dialect.H2Dialect;
import ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect;
import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings;
import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings;
import org.hibernate.search.engine.cfg.BackendSettings;
@ -83,7 +83,7 @@ public class CommonConfig {
Properties extraProperties = new Properties();
//Regular Hibernate Settings
extraProperties.put("hibernate.dialect", H2Dialect.class.getName());
extraProperties.put("hibernate.dialect", HapiFhirH2Dialect.class.getName());
extraProperties.put("hibernate.format_sql", "true");
extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update");

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,9 +4,26 @@
title: "The version of a few dependencies have been bumped to the latest versions
(dependent HAPI modules listed in brackets):
<ul>
<li>Spring (JPA): 5.3.7 -> 5.3.13</li>
<li>Thymeleaf (Testpage Overlay): 3.0.12.RELEASE -> 3.0.13.RELEASE (Addresses CVE-2021-43466)</li>
<li>log4j-api (JPA): 2.11.1 -> 2.15.0 (Addresses CVE-2021-44228 - HAPI FHIR was not vulnerable to this issue but this upgrade avoids unnecessary OWASP scan notices)</li>
<li>log4j-api (JPA): 2.11.1 -> 2.17.1 (Addresses CVE-2021-44228 - HAPI FHIR was not vulnerable to this issue but this upgrade avoids unnecessary OWASP scan notices)</li>
<li>SLF4j (All): 1.7.30 -> 1.7.32</li>
<li>Logback (All): 1.2.8 -> 1.2.10</li>
<li>Commons-IO (All): 2.8.0 -> 2.11.0</li>
<li>Jackson (All): 2.13.0 -> 2.13.1</li>
<li>Guava (All): 30.1.1-jre -> 31.0.1-jre</li>
<li>JDOM (XML Patch Support): 2.0.6 -> 2.0.6.1 (Addresses CVE-2021-33813)</li>
<li>Spring (JPA): 5.3.7 -> 5.3.14</li>
<li>Spring-Data (JPA): 2.5.0 -> 2.6.0</li>
<li>Hibernate ORM (JPA): 5.4.30.Final -> 5.4.33</li>
<li>Flyway (JPA): 6.5.4 -> 8.3.0</li>
<li>H2 (JPA): 1.4.200 -> 2.0.204 (Note that this change requires the use of the HapiFhirH2Dialect instead of the built-in Hibernate H2Dialect due to Hibernate issue <a href='https://hibernate.atlassian.net/browse/HHH-15002'>HHH-15002</a></li>
<li>Commons-DBCP2 (JPA): .8.0 -> .8.0 -> 2.9.0</li>
<li>Swagger-Models (OpenAPI Support): 2.1.7 -> 2.1.12</li>
<li>Thymeleaf (Testpage Overlay): 3.0.12.RELEASE -> 3.0.14.RELEASE (Addresses CVE-2021-43466)</li>
<li>Commons-CLI (CLI): 1.4 -> 1.5.0</li>
<li>JANSI (CLI): 2.3.2 -> 2.4.0</li>
<li>Jetty Server (CLI): 9.4.43.v20210629 -> 9.4.44.v20210927</li>
<li>Spring Boot (Boot): 2.5.0 -> 2.6.2</li>
<li>Swagger UI (OpenAPI): 3.46.0 -> 4.1.3</li>
</ul>
"

View File

@ -25,6 +25,36 @@ Note also that after the release of the FHIR DSTU2 specification, the FHIR
</tr>
</thead>
<tbody>
<tr>
<td>HAPI FHIR 5.7.0</td>
<td>JDK8</td>
<td class="versions-table-cell-empty"></td>
<td class="versions-table-cell-draft">1.0.2</td>
<td class="versions-table-cell-release">1.4.0</td>
<td class="versions-table-cell-draft">3.0.2</td>
<td class="versions-table-cell-draft">4.0.1</td>
<td class="versions-table-cell-release">5.0.0-snapshot1<span class="download-version-hash"><br/>0394b96b14</span></td>
</tr>
<tr>
<td>HAPI FHIR 5.6.0</td>
<td>JDK8</td>
<td class="versions-table-cell-empty"></td>
<td class="versions-table-cell-draft">1.0.2</td>
<td class="versions-table-cell-release">1.4.0</td>
<td class="versions-table-cell-draft">3.0.2</td>
<td class="versions-table-cell-draft">4.0.1</td>
<td class="versions-table-cell-release">4.6.0<span class="download-version-hash"><br/>9b829d9714</span></td>
</tr>
<tr>
<td>HAPI FHIR 5.5.0</td>
<td>JDK8</td>
<td class="versions-table-cell-empty"></td>
<td class="versions-table-cell-draft">1.0.2</td>
<td class="versions-table-cell-release">1.4.0</td>
<td class="versions-table-cell-draft">3.0.2</td>
<td class="versions-table-cell-draft">4.0.1</td>
<td class="versions-table-cell-release">4.6.0<span class="download-version-hash"><br/>9b829d9714</span></td>
</tr>
<tr>
<td>HAPI FHIR 5.4.0</td>
<td>JDK8</td>

View File

@ -11,7 +11,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -46,7 +46,6 @@ import org.junit.jupiter.api.TestMethodOrder;
import org.mockito.ArgumentCaptor;
import org.mockito.ArgumentMatcher;
import org.mockito.ArgumentMatchers;
import org.mockito.Matchers;
import java.util.ArrayList;
import java.util.Arrays;
@ -178,7 +177,7 @@ public class AbstractJaxRsResourceProviderDstu3Test {
toCreate.getIdentifier().add(new Identifier().setValue("myIdentifier"));
outcome.setResource(toCreate);
when(mock.create(patientCaptor.capture(), isNull(String.class))).thenReturn(outcome);
when(mock.create(patientCaptor.capture(), isNull())).thenReturn(outcome);
client.setEncoding(EncodingEnum.JSON);
final MethodOutcome response = client.create().resource(toCreate).prefer(PreferReturnEnum.REPRESENTATION)
.execute();
@ -295,11 +294,11 @@ public class AbstractJaxRsResourceProviderDstu3Test {
@Test
public void testSearchUsingGenericClientBySearch() {
// Perform a search
when(mock.search(any(StringParam.class), Matchers.isNull(StringAndListParam.class)))
when(mock.search(any(StringParam.class), isNull()))
.thenReturn(Arrays.asList(createPatient(1)));
final Bundle results = client.search().forResource(Patient.class)
.where(Patient.NAME.matchesExactly().value(PATIENT_NAME)).returnBundle(Bundle.class).execute();
verify(mock).search(any(StringParam.class), Matchers.isNull(StringAndListParam.class));
verify(mock).search(any(StringParam.class), isNull());
IBaseResource resource = results.getEntry().get(0).getResource();
compareResultId(1, resource);

View File

@ -43,7 +43,6 @@ import org.junit.jupiter.api.MethodOrderer;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestMethodOrder;
import org.mockito.ArgumentCaptor;
import org.mockito.Matchers;
import java.util.ArrayList;
import java.util.Arrays;
@ -169,7 +168,7 @@ public class AbstractJaxRsResourceProviderTest {
toCreate.getIdentifierFirstRep().setValue("myIdentifier");
outcome.setResource(toCreate);
when(mock.create(patientCaptor.capture(), isNull(String.class))).thenReturn(outcome);
when(mock.create(patientCaptor.capture(), isNull())).thenReturn(outcome);
client.setEncoding(EncodingEnum.JSON);
final MethodOutcome response = client.create().resource(toCreate).prefer(PreferReturnEnum.REPRESENTATION)
.execute();
@ -269,11 +268,11 @@ public class AbstractJaxRsResourceProviderTest {
@Test
public void testSearchUsingGenericClientBySearch() {
// Perform a search
when(mock.search(any(StringParam.class), Matchers.isNull(StringAndListParam.class)))
when(mock.search(any(StringParam.class), isNull()))
.thenReturn(Arrays.asList(createPatient(1)));
Bundle results = client.search().forResource(Patient.class)
.where(Patient.NAME.matchesExactly().value(PATIENT_NAME)).returnBundle(Bundle.class).execute();
verify(mock).search(any(StringParam.class), Matchers.isNull(StringAndListParam.class));
verify(mock).search(any(StringParam.class), isNull());
IResource resource = results.getEntry().get(0).getResource();
compareResultId(1, resource);

View File

@ -1,8 +1,8 @@
package ca.uhn.fhir.jaxrs.server.interceptor;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.isNull;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.isNull;
import static org.mockito.Mockito.*;
import java.net.URI;
@ -10,7 +10,6 @@ import java.util.HashMap;
import javax.interceptor.InvocationContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.core.*;
import org.junit.jupiter.api.BeforeEach;
@ -57,10 +56,10 @@ public class JaxRsExceptionInterceptorTest {
}
@Test
public void testIntercepWithServletError() throws Throwable {
public void testInterceptorWithServletError() throws Throwable {
ExceptionHandlingInterceptor exceptionHandler = mock(ExceptionHandlingInterceptor.class);
when(exceptionHandler.preProcessOutgoingException(any(RequestDetails.class), any(Throwable.class),
isNull(HttpServletRequest.class))).thenThrow(new ServletException("someMessage"));
isNull())).thenThrow(new ServletException("someMessage"));
interceptor = new JaxRsExceptionInterceptor(exceptionHandler);
when(context.proceed()).thenThrow(new ServletException());
try {

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -156,6 +156,7 @@ import org.hl7.fhir.common.hapi.validation.support.UnknownCodeSystemWarningValid
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.utilities.graphql.IGraphQLStorageServices;
import org.hl7.fhir.utilities.npm.FilesystemPackageCacheManager;
import org.hl7.fhir.utilities.npm.PackageClient;
import org.springframework.batch.core.configuration.JobRegistry;
import org.springframework.batch.core.configuration.annotation.BatchConfigurer;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
@ -377,8 +378,8 @@ public abstract class BaseConfig {
public IHapiPackageCacheManager packageCacheManager() {
JpaPackageCache retVal = new JpaPackageCache();
retVal.getPackageServers().clear();
retVal.getPackageServers().add(FilesystemPackageCacheManager.PRIMARY_SERVER);
retVal.getPackageServers().add(FilesystemPackageCacheManager.SECONDARY_SERVER);
retVal.getPackageServers().add(PackageClient.PRIMARY_SERVER);
retVal.getPackageServers().add(PackageClient.SECONDARY_SERVER);
return retVal;
}

View File

@ -1,7 +1,6 @@
package ca.uhn.fhir.jpa.dao.data;
import ca.uhn.fhir.jpa.entity.PartitionEntity;
import org.checkerframework.checker.nullness.Opt;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;

View File

@ -43,7 +43,7 @@ public interface ISearchDao extends JpaRepository<Search, Long>, IHapiFhirJpaRep
@Query("SELECT s.myId FROM Search s WHERE s.myDeleted = TRUE")
Slice<Long> findDeleted(Pageable thePage);
@Query("SELECT s FROM Search s WHERE s.myResourceType = :type AND mySearchQueryStringHash = :hash AND (s.myCreated > :cutoff) AND s.myDeleted = false AND s.myStatus <> 'FAILED'")
@Query("SELECT s FROM Search s WHERE s.myResourceType = :type AND mySearchQueryStringHash = :hash AND (s.myCreated > :cutoff) AND s.myDeleted = FALSE AND s.myStatus <> 'FAILED'")
Collection<Search> findWithCutoffOrExpiry(@Param("type") String theResourceType, @Param("hash") int theHashCode, @Param("cutoff") Date theCreatedCutoff);
@Query("SELECT COUNT(s) FROM Search s WHERE s.myDeleted = TRUE")

View File

@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.dao.expunge;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
public class ResourceForeignKey {
public final String table;
@ -57,7 +58,7 @@ public class ResourceForeignKey {
@Override
public String toString() {
return new ToStringBuilder(this)
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
.append("table", table)
.append("key", key)
.toString();

View File

@ -74,7 +74,7 @@ public class FhirResourceDaoCodeSystemR5 extends BaseHapiFhirResourceDao<CodeSys
@Override
public List<IIdType> findCodeSystemIdsContainingSystemAndCode(String theCode, String theSystem, RequestDetails theRequest) {
List<IIdType> valueSetIds;
Set<ResourcePersistentId> ids = searchForIds(new SearchParameterMap(CodeSystem.SP_CODE, new TokenParam(theSystem, theCode)), theRequest);
Set<ResourcePersistentId> ids = searchForIds(new SearchParameterMap(org.hl7.fhir.r4.model.CodeSystem.SP_CODE, new TokenParam(theSystem, theCode)), theRequest);
valueSetIds = new ArrayList<>();
for (ResourcePersistentId next : ids) {
IIdType id = myIdHelperService.translatePidIdToForcedId(myFhirContext, "CodeSystem", next);

View File

@ -54,22 +54,22 @@ public class FhirResourceDaoObservationR5 extends BaseHapiFhirResourceDaoObserva
@Override
protected String getEffectiveParamName() {
return Observation.SP_DATE;
return org.hl7.fhir.r4.model.Observation.SP_DATE;
}
@Override
protected String getCodeParamName() {
return Observation.SP_CODE;
return org.hl7.fhir.r4.model.Observation.SP_CODE;
}
@Override
protected String getSubjectParamName() {
return Observation.SP_SUBJECT;
return org.hl7.fhir.r4.model.Observation.SP_SUBJECT;
}
@Override
protected String getPatientParamName() {
return Observation.SP_PATIENT;
return org.hl7.fhir.r4.model.Observation.SP_PATIENT;
}
@Override

View File

@ -81,14 +81,14 @@ public class BaseJpaResourceProviderObservationR5 extends JpaResourceProviderR5<
startRequest(theServletRequest);
try {
SearchParameterMap paramMap = new SearchParameterMap();
paramMap.add(Observation.SP_CATEGORY, theCategory);
paramMap.add(Observation.SP_CODE, theCode);
paramMap.add(Observation.SP_DATE, theDate);
paramMap.add(org.hl7.fhir.r4.model.Observation.SP_CATEGORY, theCategory);
paramMap.add(org.hl7.fhir.r4.model.Observation.SP_CODE, theCode);
paramMap.add(org.hl7.fhir.r4.model.Observation.SP_DATE, theDate);
if (thePatient != null) {
paramMap.add(Observation.SP_PATIENT, thePatient);
paramMap.add(org.hl7.fhir.r4.model.Observation.SP_PATIENT, thePatient);
}
if (theSubject != null) {
paramMap.add(Observation.SP_SUBJECT, theSubject);
paramMap.add(org.hl7.fhir.r4.model.Observation.SP_SUBJECT, theSubject);
}
if (theMax != null) {
paramMap.setLastNMax(theMax.getValue());

View File

@ -60,7 +60,7 @@ public class BaseJpaResourceProviderStructureDefinitionR5 extends JpaResourcePro
} else {
SearchParameterMap map = new SearchParameterMap();
map.setLoadSynchronousUpTo(2);
map.add(StructureDefinition.SP_URL, new UriParam(theUrl.getValue()));
map.add(org.hl7.fhir.r4.model.StructureDefinition.SP_URL, new UriParam(theUrl.getValue()));
IBundleProvider outcome = getDao().search(map, theRequestDetails);
Integer numResults = outcome.size();
assert numResults != null;

View File

@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.term.job;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao;
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -41,15 +42,13 @@ public class TermCodeSystemDeleteTasklet implements Tasklet {
@Autowired
private ITermCodeSystemDao myTermCodeSystemDao;
@Autowired
private ITermCodeSystemVersionDao myCodeSystemVersionDao;
@Override
public RepeatStatus execute(@NotNull StepContribution contribution, ChunkContext context) throws Exception {
long codeSystemPid = (Long) context.getStepContext().getJobParameters().get(JOB_PARAM_CODE_SYSTEM_ID);
ourLog.info("Deleting code system {}", codeSystemPid);
myTermCodeSystemDao.findById(codeSystemPid).orElseThrow(IllegalStateException::new);
TermCodeSystem cs = myTermCodeSystemDao.findById(codeSystemPid).orElseThrow(IllegalStateException::new);
ourLog.info("Deleting code system {} / {}", codeSystemPid, cs.getCodeSystemUri());
myTermCodeSystemDao.deleteById(codeSystemPid);
ourLog.info("Code system {} deleted", codeSystemPid);

View File

@ -1,64 +0,0 @@
package ca.uhn.fhir.jpa.term.job;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.repeat.RepeatStatus;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.annotation.Nonnull;
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.JOB_PARAM_CODE_SYSTEM_ID;
/**
* Deletes the TermConcept(s) related to the TermCodeSystemVersion being deleted
* Executes in its own step to be in own transaction because it is a DB-heavy operation
*/
@Component
public class TermConceptDeleteTasklet implements Tasklet {
private static final Logger ourLog = LoggerFactory.getLogger(TermConceptDeleteTasklet.class);
@Autowired
private ITermCodeSystemDao myTermCodeSystemDao;
@Autowired
private ITermCodeSystemVersionDao myCodeSystemVersionDao;
@Override
public RepeatStatus execute(@Nonnull StepContribution contribution, ChunkContext context) throws Exception {
long codeSystemPid = (Long) context.getStepContext().getJobParameters().get(JOB_PARAM_CODE_SYSTEM_ID);
ourLog.info("Deleting code system {}", codeSystemPid);
myTermCodeSystemDao.findById(codeSystemPid).orElseThrow(IllegalStateException::new);
myTermCodeSystemDao.deleteById(codeSystemPid);
return RepeatStatus.FINISHED;
}
}

View File

@ -20,7 +20,7 @@ import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.indices.PutIndexTemplateRequest;
import org.elasticsearch.common.settings.Settings;
import org.hibernate.dialect.H2Dialect;
import ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect;
import org.hibernate.jpa.HibernatePersistenceProvider;
import org.hibernate.search.backend.elasticsearch.cfg.ElasticsearchBackendSettings;
import org.hibernate.search.backend.elasticsearch.cfg.ElasticsearchIndexSettings;
@ -101,7 +101,7 @@ public class ElasticsearchWithPrefixConfig {
extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update");
extraProperties.put("hibernate.dialect", H2Dialect.class.getName());
extraProperties.put("hibernate.dialect", HapiFhirH2Dialect.class.getName());
//Override default lucene settings
// Force elasticsearch to start first
int httpPort = elasticContainer().getMappedPort(9200);//9200 is the HTTP port

View File

@ -9,7 +9,7 @@ import ca.uhn.fhir.validation.ResultSeverityEnum;
import net.ttddyy.dsproxy.listener.ThreadQueryCountHolder;
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
import org.apache.commons.dbcp2.BasicDataSource;
import org.hibernate.dialect.H2Dialect;
import ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
@ -148,7 +148,7 @@ public class TestDstu2Config extends BaseJavaConfigDstu2 {
extraProperties.put("hibernate.format_sql", "true");
extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update");
extraProperties.put("hibernate.dialect", H2Dialect.class.getName());
extraProperties.put("hibernate.dialect", HapiFhirH2Dialect.class.getName());
boolean enableLucene = myEnv.getProperty(BaseJpaTest.CONFIG_ENABLE_LUCENE, Boolean.TYPE, BaseJpaTest.CONFIG_ENABLE_LUCENE_DEFAULT_VALUE);
Map<String, String> hibernateSearchProperties = BaseJpaTest.buildHibernateSearchProperties(enableLucene);

View File

@ -12,7 +12,7 @@ import ca.uhn.fhir.rest.server.mail.MailSvc;
import ca.uhn.fhir.validation.ResultSeverityEnum;
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
import org.apache.commons.dbcp2.BasicDataSource;
import org.hibernate.dialect.H2Dialect;
import ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@ -154,7 +154,7 @@ public class TestDstu3Config extends BaseJavaConfigDstu3 {
extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update");
extraProperties.put("hibernate.dialect", H2Dialect.class.getName());
extraProperties.put("hibernate.dialect", HapiFhirH2Dialect.class.getName());
boolean enableLucene = myEnv.getProperty(BaseJpaTest.CONFIG_ENABLE_LUCENE, Boolean.TYPE, BaseJpaTest.CONFIG_ENABLE_LUCENE_DEFAULT_VALUE);
Map<String, String> hibernateSearchProperties = BaseJpaTest.buildHibernateSearchProperties(enableLucene);

View File

@ -14,7 +14,7 @@ import net.ttddyy.dsproxy.listener.SingleQueryCountHolder;
import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel;
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
import org.apache.commons.dbcp2.BasicDataSource;
import org.hibernate.dialect.H2Dialect;
import ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect;
import org.hibernate.jpa.HibernatePersistenceProvider;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean;
@ -166,7 +166,7 @@ public class TestR4Config extends BaseJavaConfigR4 {
extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update");
extraProperties.put("hibernate.dialect", H2Dialect.class.getName());
extraProperties.put("hibernate.dialect", HapiFhirH2Dialect.class.getName());
boolean enableLucene = myEnv.getProperty(BaseJpaTest.CONFIG_ENABLE_LUCENE, Boolean.TYPE, BaseJpaTest.CONFIG_ENABLE_LUCENE_DEFAULT_VALUE);
Map<String, String> hibernateSearchProperties = BaseJpaTest.buildHibernateSearchProperties(enableLucene);

View File

@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.config;
import java.util.Properties;
import org.hibernate.dialect.H2Dialect;
import ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect;
import org.hibernate.search.backend.elasticsearch.index.layout.IndexLayoutStrategy;
import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings;
import org.hibernate.search.engine.cfg.BackendSettings;
@ -42,7 +42,7 @@ public class TestR4WithLuceneDisabledConfig extends TestR4Config {
extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update");
extraProperties.put("hibernate.dialect", H2Dialect.class.getName());
extraProperties.put("hibernate.dialect", HapiFhirH2Dialect.class.getName());
extraProperties.put(HibernateOrmMapperSettings.ENABLED, "false");
return extraProperties;
}

View File

@ -10,7 +10,7 @@ import ca.uhn.fhir.validation.ResultSeverityEnum;
import net.ttddyy.dsproxy.listener.SingleQueryCountHolder;
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
import org.apache.commons.dbcp2.BasicDataSource;
import org.hibernate.dialect.H2Dialect;
import ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean;
@ -149,7 +149,7 @@ public class TestR5Config extends BaseJavaConfigR5 {
extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update");
extraProperties.put("hibernate.dialect", H2Dialect.class.getName());
extraProperties.put("hibernate.dialect", HapiFhirH2Dialect.class.getName());
boolean enableLucene = myEnv.getProperty(BaseJpaTest.CONFIG_ENABLE_LUCENE, Boolean.TYPE, BaseJpaTest.CONFIG_ENABLE_LUCENE_DEFAULT_VALUE);
Map<String, String> hibernateSearchProperties = BaseJpaTest.buildHibernateSearchProperties(enableLucene);

View File

@ -11,7 +11,6 @@ import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.config.BaseConfig;
import ca.uhn.fhir.jpa.config.TestDstu2Config;
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedComboTokensNonUniqueDao;
@ -72,7 +71,6 @@ import org.hl7.fhir.dstu3.model.Resource;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.jetbrains.annotations.NotNull;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;

View File

@ -17,7 +17,6 @@ import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamTokenDao;
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.provider.JpaSystemProviderDstu2;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
@ -61,8 +60,6 @@ import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory;
import org.apache.commons.io.IOUtils;
import org.hibernate.search.mapper.orm.Search;
import org.hibernate.search.mapper.orm.session.SearchSession;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;

View File

@ -33,7 +33,6 @@ import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao;
import ca.uhn.fhir.jpa.dao.dstu2.FhirResourceDaoDstu2SearchNoFtTest;
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.provider.dstu3.JpaSystemProviderDstu3;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.search.IStaleSearchDeletingSvc;
@ -55,8 +54,6 @@ import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
import ca.uhn.fhir.util.UrlUtil;
import org.apache.commons.io.IOUtils;
import org.hibernate.search.mapper.orm.Search;
import org.hibernate.search.mapper.orm.session.SearchSession;
import org.hl7.fhir.convertors.advisors.impl.BaseAdvisor_30_40;
import org.hl7.fhir.convertors.factory.VersionConvertorFactory_30_40;
import org.hl7.fhir.dstu3.model.AllergyIntolerance;

View File

@ -199,7 +199,10 @@ public class FhirResourceDaoDstu3ValueSetMultiVersionTest extends BaseJpaDstu3Te
if (optionalTermValueSet.isPresent()) {
fail();
}
myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v2"));
optionalTermValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2");
if (!optionalTermValueSet.isPresent()) {
fail("No TerValueSet found for " + URL_MY_VALUE_SET + " version v2");
}
myValueSetDao.delete(myValueSets.get(ValueSetVersions.V2).getResource().getIdElement());
assertEquals(0, myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size());

View File

@ -5,13 +5,18 @@ import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashSet;
import java.util.List;
import java.util.stream.Collectors;
import java.util.Set;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder;
@ -24,21 +29,45 @@ class ResourceTableFKProviderTest extends BaseJpaR4Test {
protected EntityManager myEntityManager;
@Autowired
ResourceTableFKProvider myResourceTableFKProvider;
@Autowired
private DataSource myDataSource;
@Test
public void testWeHaveAllForeignKeys() {
runInTransaction(()-> {
List<Object[]> result = myEntityManager.createNativeQuery("SELECT FKTABLE_NAME, FKCOLUMN_NAME FROM INFORMATION_SCHEMA.CROSS_REFERENCES WHERE PKTABLE_NAME = 'HFJ_RESOURCE'").getResultList();
List<ResourceForeignKey> expected = result.stream().map(a -> new ResourceForeignKey(a[0].toString(), a[1].toString())).collect(Collectors.toList());
public void testWeHaveAllForeignKeys() throws SQLException {
Set<ResourceForeignKey> expected = new HashSet<>();
// Add the extra FKs that are not available in the CROSS_REFERENCES table
expected.add(new ResourceForeignKey("HFJ_HISTORY_TAG", "RES_ID"));
//expected.add(new ResourceForeignKey("TRM_CODESYSTEM_VER", "RES_ID"));
//expected.add(new ResourceForeignKey("HFJ_RES_VER_PROV", "RES_PID"));
// If this assertion fails, it means hapi-fhir has added a new foreign-key dependency to HFJ_RESOURCE. To fix
// the test, add the missing key to myResourceTableFKProvider.getResourceForeignKeys()
assertThat(myResourceTableFKProvider.getResourceForeignKeys(), containsInAnyOrder(expected.toArray()));
});
try (Connection connection = myDataSource.getConnection()) {
DatabaseMetaData metadata = connection.getMetaData();
Set<String> tableNames = new HashSet<>();
ResultSet tables = metadata.getTables(null, null, null, null);
while (tables.next()) {
tableNames.add(tables.getString("TABLE_NAME"));
}
tableNames.remove("HFJ_RESOURCE");
for (String nextTargetTable : tableNames) {
String sourceTable = "HFJ_RESOURCE";
ResultSet crossRefs = metadata.getCrossReference(null, null, sourceTable, null, null, nextTargetTable);
while (crossRefs.next()) {
String fkTableName = crossRefs.getString("FKTABLE_NAME");
String fkColumnName = crossRefs.getString("FKCOLUMN_NAME");
ResourceForeignKey foreignKey = new ResourceForeignKey(fkTableName, fkColumnName);
ourLog.info("Found FK to HFJ_RESOURCE: {}", foreignKey);
expected.add(foreignKey);
}
}
}
// Add the extra FKs that are not available in the CROSS_REFERENCES table
expected.add(new ResourceForeignKey("HFJ_HISTORY_TAG", "RES_ID"));
// If this assertion fails, it means hapi-fhir has added a new foreign-key dependency to HFJ_RESOURCE. To fix
// the test, add the missing key to myResourceTableFKProvider.getResourceForeignKeys()
List<ResourceForeignKey> actual = myResourceTableFKProvider.getResourceForeignKeys();
assertThat(actual, containsInAnyOrder(expected.toArray()));
}
}

View File

@ -31,7 +31,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.mockito.Matchers.any;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.mock;
@ -463,14 +463,14 @@ public class FhirResourceDaoR4InterceptorTest extends BaseJpaR4Test {
IServerOperationInterceptor interceptor = mock(IServerOperationInterceptor.class);
myInterceptorRegistry.registerInterceptor(interceptor);
verify(interceptor, times(0)).resourceCreated(Mockito.isNull(RequestDetails.class), any());
verify(interceptor, times(0)).resourceCreated(Mockito.isNull(), any());
Patient p = new Patient();
p.addName().setFamily("PATIENT");
IIdType id = myPatientDao.create(p, (RequestDetails) null).getId();
assertEquals(1L, id.getVersionIdPartAsLong().longValue());
verify(interceptor, times(1)).resourceCreated(Mockito.isNull(RequestDetails.class), any());
verify(interceptor, times(1)).resourceCreated(Mockito.isNull(), any());
}
@Test
@ -478,8 +478,8 @@ public class FhirResourceDaoR4InterceptorTest extends BaseJpaR4Test {
IServerOperationInterceptor interceptor = mock(IServerOperationInterceptor.class);
myInterceptorRegistry.registerInterceptor(interceptor);
verify(interceptor, times(0)).resourceCreated(Mockito.isNull(RequestDetails.class), any());
verify(interceptor, times(0)).resourceDeleted(Mockito.isNull(RequestDetails.class), any());
verify(interceptor, times(0)).resourceCreated(Mockito.isNull(), any());
verify(interceptor, times(0)).resourceDeleted(Mockito.isNull(), any());
Patient p = new Patient();
p.addName().setFamily("PATIENT");
@ -489,8 +489,8 @@ public class FhirResourceDaoR4InterceptorTest extends BaseJpaR4Test {
p.addName().setFamily("2");
myPatientDao.delete(p.getIdElement().toUnqualifiedVersionless());
verify(interceptor, times(1)).resourceCreated(Mockito.isNull(RequestDetails.class), any());
verify(interceptor, times(1)).resourceDeleted(Mockito.isNull(RequestDetails.class), any());
verify(interceptor, times(1)).resourceCreated(Mockito.isNull(), any());
verify(interceptor, times(1)).resourceDeleted(Mockito.isNull(), any());
}
/**
@ -602,9 +602,9 @@ public class FhirResourceDaoR4InterceptorTest extends BaseJpaR4Test {
IServerOperationInterceptor interceptor = mock(IServerOperationInterceptor.class);
myInterceptorRegistry.registerInterceptor(interceptor);
verify(interceptor, times(0)).resourceCreated(Mockito.isNull(RequestDetails.class), any());
verify(interceptor, times(0)).resourceUpdated(Mockito.isNull(RequestDetails.class), any());
verify(interceptor, times(0)).resourceUpdated(Mockito.isNull(RequestDetails.class), any(), any());
verify(interceptor, times(0)).resourceCreated(Mockito.isNull(), any());
verify(interceptor, times(0)).resourceUpdated(Mockito.isNull(), any());
verify(interceptor, times(0)).resourceUpdated(Mockito.isNull(), any(), any());
Patient p = new Patient();
p.addName().setFamily("PATIENT");
@ -614,9 +614,9 @@ public class FhirResourceDaoR4InterceptorTest extends BaseJpaR4Test {
p.addName().setFamily("2");
myPatientDao.update(p);
verify(interceptor, times(1)).resourceCreated(Mockito.isNull(RequestDetails.class), any());
verify(interceptor, times(1)).resourceUpdated(Mockito.isNull(RequestDetails.class), any());
verify(interceptor, times(1)).resourceUpdated(Mockito.isNull(RequestDetails.class), any(), any());
verify(interceptor, times(1)).resourceCreated(Mockito.isNull(), any());
verify(interceptor, times(1)).resourceUpdated(Mockito.isNull(), any());
verify(interceptor, times(1)).resourceUpdated(Mockito.isNull(), any(), any());
}
private class MyOneResourceAnswer implements Answer {

View File

@ -1,68 +1,27 @@
package ca.uhn.fhir.jpa.provider.r5;
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
import ca.uhn.fhir.jpa.provider.r5.BaseResourceProviderR5Test;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.client.interceptor.SimpleRequestHeaderInterceptor;
import ca.uhn.fhir.rest.server.exceptions.AuthenticationException;
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor;
import ca.uhn.fhir.rest.server.interceptor.auth.IAuthRule;
import ca.uhn.fhir.rest.server.interceptor.auth.IAuthRuleTester;
import ca.uhn.fhir.rest.server.interceptor.auth.PolicyEnum;
import ca.uhn.fhir.rest.server.interceptor.auth.RuleBuilder;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.util.UrlUtil;
import org.apache.commons.io.IOUtils;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r5.model.Bundle;
import org.hl7.fhir.r5.model.CodeableConcept;
import org.hl7.fhir.r5.model.Coding;
import org.hl7.fhir.r5.model.Condition;
import org.hl7.fhir.r5.model.Encounter;
import org.hl7.fhir.r5.model.Enumerations;
import org.hl7.fhir.r5.model.IdType;
import org.hl7.fhir.r5.model.Identifier;
import org.hl7.fhir.r5.model.Observation;
import org.hl7.fhir.r5.model.Organization;
import org.hl7.fhir.r5.model.Parameters;
import org.hl7.fhir.r5.model.Patient;
import org.hl7.fhir.r5.model.Practitioner;
import org.hl7.fhir.r5.model.Reference;
import org.hl7.fhir.r5.model.StringType;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.List;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.startsWith;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.fail;
public class AuthorizationInterceptorJpaR5Test extends BaseResourceProviderR5Test {
private static final Logger ourLog = LoggerFactory.getLogger(AuthorizationInterceptorJpaR5Test.class);
@BeforeEach
@Override
public void before() throws Exception {

View File

@ -21,11 +21,13 @@ import org.apache.http.entity.StringEntity;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r5.model.Bundle;
import org.hl7.fhir.r5.model.Bundle.BundleEntryComponent;
import org.hl7.fhir.r5.model.CapabilityStatement;
import org.hl7.fhir.r5.model.CodeableConcept;
import org.hl7.fhir.r5.model.DateTimeType;
import org.hl7.fhir.r5.model.IdType;
import org.hl7.fhir.r5.model.Observation;
import org.hl7.fhir.r5.model.Observation.ObservationComponentComponent;
import org.hl7.fhir.r5.model.OperationOutcome;
import org.hl7.fhir.r5.model.Patient;
import org.hl7.fhir.r5.model.Quantity;
import org.junit.jupiter.api.AfterEach;
@ -96,7 +98,7 @@ public class ResourceProviderR5Test extends BaseResourceProviderR5Test {
Bundle output = myClient
.search()
.forResource("Patient")
.where(Patient.NAME.contains().value("ZAB"))
.where(org.hl7.fhir.r4.model.Patient.NAME.contains().value("ZAB"))
.returnBundle(Bundle.class)
.execute();
List<String> ids = output.getEntry().stream().map(t -> t.getResource().getIdElement().toUnqualifiedVersionless().getValue()).collect(Collectors.toList());
@ -105,7 +107,7 @@ public class ResourceProviderR5Test extends BaseResourceProviderR5Test {
output = myClient
.search()
.forResource("Patient")
.where(Patient.NAME.contains().value("zab"))
.where(org.hl7.fhir.r4.model.Patient.NAME.contains().value("zab"))
.returnBundle(Bundle.class)
.execute();
ids = output.getEntry().stream().map(t -> t.getResource().getIdElement().toUnqualifiedVersionless().getValue()).collect(Collectors.toList());
@ -122,7 +124,7 @@ public class ResourceProviderR5Test extends BaseResourceProviderR5Test {
// Perform the search
Bundle response0 = myClient.search()
.forResource("Patient")
.where(Patient.NAME.matches().value("Hello"))
.where(org.hl7.fhir.r4.model.Patient.NAME.matches().value("Hello"))
.returnBundle(Bundle.class)
.execute();
assertEquals(1, response0.getEntry().size());
@ -130,7 +132,7 @@ public class ResourceProviderR5Test extends BaseResourceProviderR5Test {
// Perform the search again (should return the same)
Bundle response1 = myClient.search()
.forResource("Patient")
.where(Patient.NAME.matches().value("Hello"))
.where(org.hl7.fhir.r4.model.Patient.NAME.matches().value("Hello"))
.returnBundle(Bundle.class)
.execute();
assertEquals(1, response1.getEntry().size());
@ -142,7 +144,7 @@ public class ResourceProviderR5Test extends BaseResourceProviderR5Test {
// Perform the search again (shouldn't return the errored out search)
Bundle response3 = myClient.search()
.forResource("Patient")
.where(Patient.NAME.matches().value("Hello"))
.where(org.hl7.fhir.r4.model.Patient.NAME.matches().value("Hello"))
.returnBundle(Bundle.class)
.execute();
assertEquals(1, response3.getEntry().size());
@ -182,7 +184,7 @@ public class ResourceProviderR5Test extends BaseResourceProviderR5Test {
// Perform a search for the first page
Bundle response0 = myClient.search()
.forResource("Patient")
.where(Patient.NAME.matches().value("Hello"))
.where(org.hl7.fhir.r4.model.Patient.NAME.matches().value("Hello"))
.returnBundle(Bundle.class)
.count(1)
.execute();
@ -222,7 +224,15 @@ public class ResourceProviderR5Test extends BaseResourceProviderR5Test {
try (CloseableHttpResponse resp = ourHttpClient.execute(post)) {
String respString = IOUtils.toString(resp.getEntity().getContent(), Charsets.UTF_8);
ourLog.info(respString);
assertEquals(200, resp.getStatusLine().getStatusCode());
// assertEquals(200, resp.getStatusLine().getStatusCode());
// As of 2021-12-28, the R5 structures return a version string that isn't
// actually in the fhirVersion ValueSet. If this stops being the case this
// test will fail and the line above should be restored
OperationOutcome oo = myFhirCtx.newJsonParser().parseResource(OperationOutcome.class, respString);
assertEquals(1, oo.getIssue().size());
assertEquals("The value provided ('5.0.0-snapshot1') is not in the value set 'FHIRVersion' (http://hl7.org/fhir/ValueSet/FHIR-version|4.6.0), and a code is required from this value set) (error message = Unknown code '5.0.0-snapshot1' for in-memory expansion of ValueSet 'http://hl7.org/fhir/ValueSet/FHIR-version')", oo.getIssue().get(0).getDiagnostics());
}
}

View File

@ -1,7 +1,6 @@
package ca.uhn.fhir.jpa.search.reindex;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;

View File

@ -66,6 +66,7 @@ import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.data.domain.Sort;
import org.springframework.data.repository.query.FluentQuery;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionDefinition;
@ -96,6 +97,7 @@ import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.stream.Collectors;
import static org.hamcrest.MatcherAssert.assertThat;
@ -525,6 +527,11 @@ public class GiantTransactionPerfTest {
public <S extends ResourceHistoryTable> boolean exists(Example<S> example) {
throw new UnsupportedOperationException();
}
@Override
public <S extends ResourceHistoryTable, R> R findBy(Example<S> example, Function<FluentQuery.FetchableFluentQuery<S>, R> queryFunction) {
throw new UnsupportedOperationException();
}
}
private class MockEntityManager implements EntityManager {

View File

@ -82,7 +82,7 @@ public class TerminologyLoaderSvcSnomedCtTest extends BaseLoaderTest {
mySvc.loadSnomedCt(myFiles.getFiles(), mySrd);
verify(myTermCodeSystemStorageSvc).storeNewCodeSystemVersion(any(CodeSystem.class), myCsvCaptor.capture(), any(RequestDetails.class), anyList(), anyListOf(ConceptMap.class));
verify(myTermCodeSystemStorageSvc).storeNewCodeSystemVersion(any(CodeSystem.class), myCsvCaptor.capture(), any(RequestDetails.class), anyList(), anyList());
TermCodeSystemVersion csv = myCsvCaptor.getValue();
TreeSet<String> allCodes = toCodes(csv, true);

View File

@ -7,7 +7,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -17,7 +17,7 @@ import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Matchers;
import org.mockito.ArgumentMatchers;
import org.mockito.Mock;
import org.mockito.MockedStatic;
import org.mockito.MockitoAnnotations;
@ -130,7 +130,7 @@ public class TranslatorHelperTest implements CqlProviderTestBase {
CqlTranslator translator = null;
try {
MockedStatic<CqlTranslator> cqlTranslator = mockStatic(CqlTranslator.class);
when(CqlTranslator.fromStream(any(InputStream.class), any(ModelManager.class), any(LibraryManager.class), Matchers.<CqlTranslator.Options>anyVararg())).thenThrow(IOException.class);
when(CqlTranslator.fromStream(any(InputStream.class), any(ModelManager.class), any(LibraryManager.class), any())).thenThrow(IOException.class);
translator = TranslatorHelper.getTranslator(new ByteArrayInputStream("INVALID-FILENAME".getBytes(StandardCharsets.UTF_8)), libraryManager, modelManager);
fail();
} catch (IllegalArgumentException | IOException e) {

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -13,7 +13,7 @@ import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
import ca.uhn.fhir.mdm.model.MdmTransactionContext;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.server.messaging.ResourceOperationMessage;
import org.hibernate.dialect.H2Dialect;
import ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect;
import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings;
import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings;
import org.hibernate.search.engine.cfg.BackendSettings;

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -0,0 +1,18 @@
package ca.uhn.fhir.jpa.model.dialect;
import org.hibernate.dialect.H2Dialect;
/**
* HAPI FHIR dialect for H2 database
*/
public class HapiFhirH2Dialect extends H2Dialect {
/**
* Workaround until this bug is fixed:
* https://hibernate.atlassian.net/browse/HHH-15002
*/
@Override
public String toBooleanValueString(boolean bool) {
return bool ? "true" : "false";
}
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -29,7 +29,6 @@ import com.github.benmanes.caffeine.cache.Caffeine;
import com.google.common.annotations.VisibleForTesting;
import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.exceptions.PathEngineException;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.context.IWorkerContext;
import org.hl7.fhir.r4.hapi.ctx.HapiWorkerContext;
@ -43,7 +42,6 @@ import org.hl7.fhir.r4.model.ValueSet;
import org.hl7.fhir.r4.utils.FHIRPathEngine;
import javax.annotation.PostConstruct;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;

View File

@ -28,7 +28,6 @@ import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.exceptions.PathEngineException;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r5.context.IWorkerContext;
import org.hl7.fhir.r5.hapi.ctx.HapiWorkerContext;
@ -42,7 +41,6 @@ import org.hl7.fhir.r5.model.ValueSet;
import org.hl7.fhir.r5.utils.FHIRPathEngine;
import javax.annotation.PostConstruct;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;

View File

@ -61,7 +61,6 @@ import org.hl7.fhir.instance.model.api.IIdType;
import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nonnull;
import javax.validation.constraints.NotNull;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;

View File

@ -50,7 +50,6 @@ import javax.annotation.Nonnull;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.regex.Pattern;
public class InMemoryResourceMatcher {

View File

@ -66,8 +66,8 @@ public class ReadOnlySearchParamCache {
}
protected Map<String, RuntimeSearchParam> getSearchParamMap(String theResourceName) {
Map<String, RuntimeSearchParam> retval = myResourceNameToSpNameToSp.get(theResourceName);
if (retval == null) {
Map<String, RuntimeSearchParam> retVal = myResourceNameToSpNameToSp.get(theResourceName);
if (retVal == null) {
return Collections.emptyMap();
}
return Collections.unmodifiableMap(myResourceNameToSpNameToSp.get(theResourceName));
@ -92,11 +92,36 @@ public class ReadOnlySearchParamCache {
Set<String> resourceNames = theFhirContext.getResourceTypes();
IBaseBundle allSearchParameterBundle = null;
if (theFhirContext.getVersion().getVersion() == FhirVersionEnum.R4) {
IBaseBundle allSearchParameterBundle = (IBaseBundle) theFhirContext.newJsonParser().parseResource(ClasspathUtil.loadResourceAsStream("org/hl7/fhir/r4/model/sp/search-parameters.json"));
allSearchParameterBundle = (IBaseBundle) theFhirContext.newJsonParser().parseResource(ClasspathUtil.loadResourceAsStream("org/hl7/fhir/r4/model/sp/search-parameters.json"));
} else if (theFhirContext.getVersion().getVersion() == FhirVersionEnum.R5) {
allSearchParameterBundle = (IBaseBundle) theFhirContext.newXmlParser().parseResource(ClasspathUtil.loadResourceAsStream("org/hl7/fhir/r5/model/sp/search-parameters.xml"));
}
if (allSearchParameterBundle != null) {
for (IBaseResource next : BundleUtil.toListOfResources(theFhirContext, allSearchParameterBundle)) {
RuntimeSearchParam nextCanonical = theCanonicalizer.canonicalizeSearchParameter(next);
if (nextCanonical != null) {
// Force status to ACTIVE - For whatever reason the R5 draft SPs ship with
// a status of DRAFT which means the server doesn't actually apply them.
// At least this was the case as of 2021-12-24 - JA
nextCanonical = new RuntimeSearchParam(
nextCanonical.getId(),
nextCanonical.getUri(),
nextCanonical.getName(),
nextCanonical.getDescription(),
nextCanonical.getPath(),
nextCanonical.getParamType(),
nextCanonical.getProvidesMembershipInCompartments(),
nextCanonical.getTargets(),
RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE,
nextCanonical.getComboSearchParamType(),
nextCanonical.getComponents(),
nextCanonical.getBase());
Collection<String> base = nextCanonical.getBase();
if (base.contains("Resource") || base.contains("DomainResource")) {
base = resourceNames;

View File

@ -46,7 +46,7 @@ public class RuntimeSearchParamCache extends ReadOnlySearchParamCache {
// so it may get added more than once by this method
ourLog.trace("Search param was previously registered for url: {}", uri);
} else if (existingForUrl != null) {
ourLog.warn("Multiple search parameters have URL: {}", uri);
ourLog.debug("Multiple search parameters have URL: {}", uri);
} else {
myUrlToParam.put(uri, theSearchParam);
}
@ -69,7 +69,8 @@ public class RuntimeSearchParamCache extends ReadOnlySearchParamCache {
for (Map.Entry<String, Map<String, RuntimeSearchParam>> nextBuiltInEntry : builtInSps) {
for (RuntimeSearchParam nextParam : nextBuiltInEntry.getValue().values()) {
String nextResourceName = nextBuiltInEntry.getKey();
add(nextResourceName, nextParam.getName(), nextParam);
String nextParamName = nextParam.getName();
add(nextResourceName, nextParamName, nextParam);
}
ourLog.trace("Have {} built-in SPs for: {}", nextBuiltInEntry.getValue().size(), nextBuiltInEntry.getKey());
@ -95,8 +96,8 @@ public class RuntimeSearchParamCache extends ReadOnlySearchParamCache {
}
public static RuntimeSearchParamCache fromReadOnlySearchParmCache(ReadOnlySearchParamCache theBuiltInSearchParams) {
RuntimeSearchParamCache retval = new RuntimeSearchParamCache();
retval.putAll(theBuiltInSearchParams);
return retval;
RuntimeSearchParamCache retVal = new RuntimeSearchParamCache();
retVal.putAll(theBuiltInSearchParams);
return retVal;
}
}

View File

@ -88,6 +88,13 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC
private IInterceptorService myInterceptorBroadcaster;
private IResourceChangeListenerCache myResourceChangeListenerCache;
/**
* Constructor
*/
public SearchParamRegistryImpl() {
super();
}
@Override
public RuntimeSearchParam getActiveSearchParam(String theResourceName, String theParamName) {
requiresActiveSearchParams();
@ -156,7 +163,8 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC
private void initializeActiveSearchParams(Collection<IBaseResource> theJpaSearchParams) {
StopWatch sw = new StopWatch();
RuntimeSearchParamCache searchParams = RuntimeSearchParamCache.fromReadOnlySearchParmCache(getBuiltInSearchParams());
ReadOnlySearchParamCache builtInSearchParams = getBuiltInSearchParams();
RuntimeSearchParamCache searchParams = RuntimeSearchParamCache.fromReadOnlySearchParmCache(builtInSearchParams);
long overriddenCount = overrideBuiltinSearchParamsWithActiveJpaSearchParams(searchParams, theJpaSearchParams);
ourLog.trace("Have overridden {} built-in search parameters", overriddenCount);
removeInactiveSearchParams(searchParams);

View File

@ -58,7 +58,7 @@ public class LastNParameterHelper {
private static boolean isLastNParameterDstu3(String theParamName) {
return (theParamName.equals(org.hl7.fhir.dstu3.model.Observation.SP_SUBJECT) || theParamName.equals(org.hl7.fhir.dstu3.model.Observation.SP_PATIENT)
|| theParamName.equals(org.hl7.fhir.dstu3.model.Observation.SP_CATEGORY) || theParamName.equals(org.hl7.fhir.r5.model.Observation.SP_CODE))
|| theParamName.equals(org.hl7.fhir.dstu3.model.Observation.SP_CATEGORY) || theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_CODE))
|| theParamName.equals(org.hl7.fhir.dstu3.model.Observation.SP_DATE);
}
@ -69,14 +69,14 @@ public class LastNParameterHelper {
}
private static boolean isLastNParameterR5(String theParamName) {
return (theParamName.equals(org.hl7.fhir.r5.model.Observation.SP_SUBJECT) || theParamName.equals(org.hl7.fhir.r5.model.Observation.SP_PATIENT)
|| theParamName.equals(org.hl7.fhir.r5.model.Observation.SP_CATEGORY) || theParamName.equals(org.hl7.fhir.r5.model.Observation.SP_CODE))
|| theParamName.equals(org.hl7.fhir.r5.model.Observation.SP_DATE);
return (theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_SUBJECT) || theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_PATIENT)
|| theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_CATEGORY) || theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_CODE))
|| theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_DATE);
}
public static String getSubjectParamName(FhirContext theContext) {
if (theContext.getVersion().getVersion() == FhirVersionEnum.R5) {
return org.hl7.fhir.r5.model.Observation.SP_SUBJECT;
return org.hl7.fhir.r4.model.Observation.SP_SUBJECT;
} else if (theContext.getVersion().getVersion() == FhirVersionEnum.R4) {
return org.hl7.fhir.r4.model.Observation.SP_SUBJECT;
} else if (theContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) {
@ -88,7 +88,7 @@ public class LastNParameterHelper {
public static String getPatientParamName(FhirContext theContext) {
if (theContext.getVersion().getVersion() == FhirVersionEnum.R5) {
return org.hl7.fhir.r5.model.Observation.SP_PATIENT;
return org.hl7.fhir.r4.model.Observation.SP_PATIENT;
} else if (theContext.getVersion().getVersion() == FhirVersionEnum.R4) {
return org.hl7.fhir.r4.model.Observation.SP_PATIENT;
} else if (theContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) {
@ -100,7 +100,7 @@ public class LastNParameterHelper {
public static String getEffectiveParamName(FhirContext theContext) {
if (theContext.getVersion().getVersion() == FhirVersionEnum.R5) {
return org.hl7.fhir.r5.model.Observation.SP_DATE;
return org.hl7.fhir.r4.model.Observation.SP_DATE;
} else if (theContext.getVersion().getVersion() == FhirVersionEnum.R4) {
return org.hl7.fhir.r4.model.Observation.SP_DATE;
} else if (theContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) {
@ -112,7 +112,7 @@ public class LastNParameterHelper {
public static String getCategoryParamName(FhirContext theContext) {
if (theContext.getVersion().getVersion() == FhirVersionEnum.R5) {
return org.hl7.fhir.r5.model.Observation.SP_CATEGORY;
return org.hl7.fhir.r4.model.Observation.SP_CATEGORY;
} else if (theContext.getVersion().getVersion() == FhirVersionEnum.R4) {
return org.hl7.fhir.r4.model.Observation.SP_CATEGORY;
} else if (theContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) {
@ -124,7 +124,7 @@ public class LastNParameterHelper {
public static String getCodeParamName(FhirContext theContext) {
if (theContext.getVersion().getVersion() == FhirVersionEnum.R5) {
return org.hl7.fhir.r5.model.Observation.SP_CODE;
return org.hl7.fhir.r4.model.Observation.SP_CODE;
} else if (theContext.getVersion().getVersion() == FhirVersionEnum.R4) {
return org.hl7.fhir.r4.model.Observation.SP_CODE;
} else if (theContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) {

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
@ -36,7 +36,6 @@
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<version>1.4.199</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>

View File

@ -31,7 +31,7 @@ import net.ttddyy.dsproxy.listener.SingleQueryCountHolder;
import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel;
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
import org.apache.commons.dbcp2.BasicDataSource;
import org.hibernate.dialect.H2Dialect;
import ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect;
import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings;
import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings;
import org.hibernate.search.engine.cfg.BackendSettings;
@ -107,7 +107,7 @@ public class TestJpaDstu3Config extends BaseJavaConfigDstu3 {
extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update");
extraProperties.put("hibernate.dialect", H2Dialect.class.getName());
extraProperties.put("hibernate.dialect", HapiFhirH2Dialect.class.getName());
extraProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "lucene");
extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), HapiLuceneAnalysisConfigurer.class.getName());

View File

@ -33,7 +33,7 @@ import net.ttddyy.dsproxy.listener.SingleQueryCountHolder;
import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel;
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
import org.apache.commons.dbcp2.BasicDataSource;
import org.hibernate.dialect.H2Dialect;
import ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect;
import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings;
import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings;
import org.hibernate.search.engine.cfg.BackendSettings;
@ -109,7 +109,7 @@ public class TestJpaR4Config extends BaseJavaConfigR4 {
extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update");
extraProperties.put("hibernate.dialect", H2Dialect.class.getName());
extraProperties.put("hibernate.dialect", HapiFhirH2Dialect.class.getName());
extraProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "lucene");
extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), HapiLuceneAnalysisConfigurer.class.getName());

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -35,21 +35,18 @@ import ca.uhn.fhir.mdm.rules.json.MdmSimilarityJson;
import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
import ca.uhn.fhir.util.FhirTerser;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.xml.crypto.Data;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
@Service
public class MdmRuleValidator implements IMdmRuleValidator {

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -32,7 +32,6 @@ import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
import ca.uhn.fhir.util.VersionUtil;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Multimap;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IBaseResource;
@ -49,7 +48,6 @@ import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;

View File

@ -32,6 +32,7 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.stream.Collectors;
// TODO: JA remove default methods
public interface ISearchParamRegistry {
@ -52,8 +53,6 @@ public interface ISearchParamRegistry {
default void forceRefresh() {
}
;
/**
* Request that the cache be refreshed at the next convenient time (in a different thread)
*/
@ -103,4 +102,5 @@ public interface ISearchParamRegistry {
*/
@Nullable
RuntimeSearchParam getActiveSearchParamByUrl(String theUrl);
}

View File

@ -10,6 +10,7 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
@ -20,7 +21,7 @@ public class RuleImplOpTest {
@Test
public void testToString() {
new RuleImplOp("").toString();
assertEquals("RuleImplOp[op=<null>,transactionAppliesToOp=<null>,appliesTo=<null>,appliesToTypes=<null>,classifierCompartmentName=<null>,classifierCompartmentOwners=<null>,classifierType=<null>]", new RuleImplOp("").toString());
}
@Test

View File

@ -3,12 +3,14 @@ package ca.uhn.fhir.rest.server.interceptor.auth;
import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor.Verdict;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class VerdictTest {
@Test
public void testToString() {
Verdict v = new AuthorizationInterceptor.Verdict(PolicyEnum.ALLOW, new RuleImplOp("foo"));
v.toString();
assertEquals("AuthorizationInterceptor.Verdict[rule=foo,decision=ALLOW]", v.toString());
}
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
</parent>
<artifactId>hapi-fhir-spring-boot-sample-client-apache</artifactId>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
</parent>
<artifactId>hapi-fhir-spring-boot-sample-client-okhttp</artifactId>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
</parent>
<artifactId>hapi-fhir-spring-boot-sample-server-jersey</artifactId>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
</parent>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -64,6 +64,11 @@ public class FlywayMigrationTask implements JavaMigration {
return false;
}
@Override
public boolean isBaselineMigration() {
return false;
}
@Override
public boolean canExecuteInTransaction() {
return false;

View File

@ -46,6 +46,7 @@ import org.hibernate.tool.schema.extract.spi.SequenceInformationExtractor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.ColumnMapRowMapper;
import org.springframework.transaction.support.TransactionTemplate;
import javax.annotation.Nullable;
import javax.sql.DataSource;
@ -234,7 +235,8 @@ public class JdbcUtils {
DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource());
try (Connection connection = dataSource.getConnection()) {
return theConnectionProperties.getTxTemplate().execute(t -> {
TransactionTemplate txTemplate = theConnectionProperties.getTxTemplate();
return txTemplate.execute(t -> {
DatabaseMetaData metadata;
try {
metadata = connection.getMetaData();

View File

@ -133,7 +133,7 @@ public class DropIndexTask extends BaseTableTask {
*/
if (getDriverType() == DriverTypeEnum.H2_EMBEDDED) {
@Language("SQL") String findConstraintSql = "SELECT DISTINCT constraint_name FROM INFORMATION_SCHEMA.INDEXES WHERE constraint_name = ? AND table_name = ?";
@Language("SQL") String findConstraintSql = "SELECT DISTINCT constraint_name FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS WHERE constraint_name = ? AND table_name = ?";
@Language("SQL") String dropConstraintSql = "ALTER TABLE " + getTableName() + " DROP CONSTRAINT ?";
findAndDropConstraint(findConstraintSql, dropConstraintSql);
} else if (getDriverType() == DriverTypeEnum.DERBY_EMBEDDED) {

View File

@ -5,6 +5,7 @@ import ca.uhn.fhir.jpa.migrate.taskdef.AddTableRawSqlTask;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTest;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.flywaydb.core.api.FlywayException;
import org.hamcrest.Matchers;
import org.junit.jupiter.params.ParameterizedTest;
@ -72,7 +73,9 @@ public class SchemaMigratorTest extends BaseTest {
SchemaMigrator schemaMigrator = createSchemaMigrator("SOMETABLE", "create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))", "2");
schemaMigrator.migrate();
schemaMigrator = createSchemaMigrator("SOMETABLE", "create table SOMEOTHERTABLE (PID bigint not null, TEXTCOL varchar(255))", "1");
AddTableRawSqlTask task1 = createAddTableTask("SOMEOTHERTABLE", "create table SOMEOTHERTABLE (PID bigint not null, TEXTCOL varchar(255))", "1");
AddTableRawSqlTask task2 = createAddTableTask("SOMETABLE", "create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))", "2");
schemaMigrator = createSchemaMigrator(task1, task2);
schemaMigrator.setStrictOrder(true);
try {
@ -147,11 +150,22 @@ public class SchemaMigratorTest extends BaseTest {
@Nonnull
private SchemaMigrator createSchemaMigrator(String theTableName, String theSql, String theSchemaVersion) {
AddTableRawSqlTask task = createAddTableTask(theTableName, theSql, theSchemaVersion);
return createSchemaMigrator(task);
}
@Nonnull
private SchemaMigrator createSchemaMigrator(BaseTask... tasks) {
SchemaMigrator retVal = new SchemaMigrator(getUrl(), SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, getDataSource(), new Properties(), Lists.newArrayList(tasks));
retVal.setDriverType(getDriverType());
return retVal;
}
@Nonnull
private AddTableRawSqlTask createAddTableTask(String theTableName, String theSql, String theSchemaVersion) {
AddTableRawSqlTask task = new AddTableRawSqlTask("1", theSchemaVersion);
task.setTableName(theTableName);
task.addSql(getDriverType(), theSql);
SchemaMigrator retval = new SchemaMigrator(getUrl(), SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, getDataSource(), new Properties(), ImmutableList.of(task));
retval.setDriverType(getDriverType());
return retval;
return task;
}
}

View File

@ -6,6 +6,7 @@ import org.flywaydb.core.internal.command.DbMigrate;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
import javax.annotation.Nonnull;
import java.sql.SQLException;
import java.util.function.Supplier;
@ -109,7 +110,7 @@ public class ModifyColumnTest extends BaseTest {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255) not null)");
assertFalse(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "PID"));
assertFalse(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
assertEquals(new JdbcUtils.ColumnType(ColumnTypeEnum.LONG, 19), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
assertEquals(getLongColumnType(theTestDatabaseDetails), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
assertEquals(new JdbcUtils.ColumnType(ColumnTypeEnum.STRING, 255), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
// PID
@ -134,7 +135,7 @@ public class ModifyColumnTest extends BaseTest {
assertTrue(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "PID"));
assertTrue(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
assertEquals(new JdbcUtils.ColumnType(ColumnTypeEnum.LONG, 19), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
assertEquals(getLongColumnType(theTestDatabaseDetails), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
assertEquals(new JdbcUtils.ColumnType(ColumnTypeEnum.STRING, 255), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
// Make sure additional migrations don't crash
@ -152,7 +153,7 @@ public class ModifyColumnTest extends BaseTest {
executeSql("create table SOMETABLE (PID bigint not null, DATECOL timestamp not null)");
assertFalse(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "PID"));
assertFalse(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "DATECOL"));
assertEquals(new JdbcUtils.ColumnType(ColumnTypeEnum.LONG, 19), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
assertEquals(getLongColumnType(theTestDatabaseDetails), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
assertEquals(ColumnTypeEnum.DATE_TIMESTAMP, JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "DATECOL").getColumnTypeEnum());
getMigrator().setNoColumnShrink(true);
@ -178,7 +179,7 @@ public class ModifyColumnTest extends BaseTest {
assertTrue(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "PID"));
assertTrue(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "DATECOL"));
assertEquals(new JdbcUtils.ColumnType(ColumnTypeEnum.LONG, 19), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
assertEquals(getLongColumnType(theTestDatabaseDetails), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
assertEquals(ColumnTypeEnum.DATE_TIMESTAMP, JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "DATECOL").getColumnTypeEnum());
// Make sure additional migrations don't crash
@ -194,7 +195,7 @@ public class ModifyColumnTest extends BaseTest {
executeSql("create table SOMETABLE (PID bigint, TEXTCOL varchar(255))");
assertTrue(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "PID"));
assertTrue(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
assertEquals(new JdbcUtils.ColumnType(ColumnTypeEnum.LONG, 19), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
assertEquals(getLongColumnType(theTestDatabaseDetails), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
assertEquals(new JdbcUtils.ColumnType(ColumnTypeEnum.STRING, 255), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
// PID
@ -219,7 +220,7 @@ public class ModifyColumnTest extends BaseTest {
assertFalse(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "PID"));
assertFalse(JdbcUtils.isColumnNullable(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
assertEquals(new JdbcUtils.ColumnType(ColumnTypeEnum.LONG, 19), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
assertEquals(getLongColumnType(theTestDatabaseDetails), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID"));
assertEquals(new JdbcUtils.ColumnType(ColumnTypeEnum.STRING, 255), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
// Make sure additional migrations don't crash
@ -228,6 +229,19 @@ public class ModifyColumnTest extends BaseTest {
}
@SuppressWarnings("EnumSwitchStatementWhichMissesCases")
@Nonnull
private JdbcUtils.ColumnType getLongColumnType(Supplier<TestDatabaseDetails> theTestDatabaseDetails) {
switch (theTestDatabaseDetails.get().getDriverType()) {
case H2_EMBEDDED:
return new JdbcUtils.ColumnType(ColumnTypeEnum.LONG, 64);
case DERBY_EMBEDDED:
return new JdbcUtils.ColumnType(ColumnTypeEnum.LONG, 19);
default:
throw new UnsupportedOperationException();
}
}
@ParameterizedTest(name = "{index}: {0}")
@MethodSource("data")
public void testColumnDoesntAlreadyExist(Supplier<TestDatabaseDetails> theTestDatabaseDetails) throws SQLException {
@ -306,8 +320,7 @@ public class ModifyColumnTest extends BaseTest {
task.setNullable(true);
JdbcUtils.ColumnType existingColumnType = JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "PID");
assertEquals(ColumnTypeEnum.LONG, existingColumnType.getColumnTypeEnum());
assertEquals(19L, existingColumnType.getLength().longValue());
assertEquals(getLongColumnType(theTestDatabaseDetails), existingColumnType);
assertTrue(existingColumnType.equals(task.getColumnType(), task.getColumnLength()));
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.7.0-PRE8-SNAPSHOT</version>
<version>5.7.0-PRE9-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -76,10 +76,8 @@ import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.validation.constraints.NotNull;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;

Some files were not shown because too many files have changed in this diff Show More