Fixes
This commit is contained in:
parent
e3270af4a9
commit
215cbaa80c
|
@ -22,6 +22,18 @@ package ca.uhn.fhir.rest.param;
|
|||
import java.util.Map;
|
||||
|
||||
public class HistorySearchDateRangeParam extends DateRangeParam {
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @since 8.0.0
|
||||
*/
|
||||
public HistorySearchDateRangeParam() {
|
||||
this(Map.of(), new DateRangeParam(), null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public HistorySearchDateRangeParam(
|
||||
Map<String, String[]> theParameters, DateRangeParam theDateRange, Integer theOffset) {
|
||||
super(theDateRange);
|
||||
|
|
|
@ -29,7 +29,7 @@ public class FileUtil {
|
|||
if (theBytes <= 0) {
|
||||
return "0 " + UNITS[0];
|
||||
}
|
||||
int digitGroups = (int) (Math.log10(theBytes) / Math.log10(1024));
|
||||
int digitGroups = (int) (Math.log10((double)theBytes) / Math.log10(1024));
|
||||
digitGroups = Math.min(digitGroups, UNITS.length - 1);
|
||||
return new DecimalFormat("###0.#").format(theBytes / Math.pow(1024, digitGroups)) + " " + UNITS[digitGroups];
|
||||
}
|
||||
|
|
|
@ -47,6 +47,18 @@
|
|||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-tinder-test</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.github.jsqlparser</groupId>
|
||||
<artifactId>jsqlparser</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@ import ca.uhn.fhir.jpa.binary.interceptor.BinaryStorageInterceptor;
|
|||
import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.dao.TestDaoSearch;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryProvenanceDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao;
|
||||
|
@ -142,7 +143,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
|
|||
import static org.mockito.Mockito.mock;
|
||||
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@ContextConfiguration(classes = {TestR5Config.class})
|
||||
@ContextConfiguration(classes = {TestR5Config.class, TestDaoSearch.Config.class})
|
||||
public abstract class BaseJpaR5Test extends BaseJpaTest implements ITestDataBuilder {
|
||||
@Autowired
|
||||
protected IJobCoordinator myJobCoordinator;
|
||||
|
@ -421,12 +422,15 @@ public abstract class BaseJpaR5Test extends BaseJpaTest implements ITestDataBuil
|
|||
|
||||
@AfterEach()
|
||||
public void afterCleanupDao() {
|
||||
myStorageSettings.setExpireSearchResults(new JpaStorageSettings().isExpireSearchResults());
|
||||
myStorageSettings.setEnforceReferentialIntegrityOnDelete(new JpaStorageSettings().isEnforceReferentialIntegrityOnDelete());
|
||||
myStorageSettings.setExpireSearchResultsAfterMillis(new JpaStorageSettings().getExpireSearchResultsAfterMillis());
|
||||
myStorageSettings.setReuseCachedSearchResultsForMillis(new JpaStorageSettings().getReuseCachedSearchResultsForMillis());
|
||||
myStorageSettings.setSuppressUpdatesWithNoChange(new JpaStorageSettings().isSuppressUpdatesWithNoChange());
|
||||
myStorageSettings.setAllowContainsSearches(new JpaStorageSettings().isAllowContainsSearches());
|
||||
JpaStorageSettings defaults = new JpaStorageSettings();
|
||||
myStorageSettings.setAccessMetaSourceInformationFromProvenanceTable(defaults.isAccessMetaSourceInformationFromProvenanceTable());
|
||||
myStorageSettings.setAllowContainsSearches(defaults.isAllowContainsSearches());
|
||||
myStorageSettings.setEnforceReferentialIntegrityOnDelete(defaults.isEnforceReferentialIntegrityOnDelete());
|
||||
myStorageSettings.setExpireSearchResults(defaults.isExpireSearchResults());
|
||||
myStorageSettings.setExpireSearchResultsAfterMillis(defaults.getExpireSearchResultsAfterMillis());
|
||||
myStorageSettings.setReuseCachedSearchResultsForMillis(defaults.getReuseCachedSearchResultsForMillis());
|
||||
myStorageSettings.setSuppressUpdatesWithNoChange(defaults.isSuppressUpdatesWithNoChange());
|
||||
myStorageSettings.setAutoCreatePlaceholderReferenceTargets(defaults.isAutoCreatePlaceholderReferenceTargets());
|
||||
|
||||
myPagingProvider.setDefaultPageSize(BasePagingProvider.DEFAULT_DEFAULT_PAGE_SIZE);
|
||||
myPagingProvider.setMaximumPageSize(BasePagingProvider.DEFAULT_MAX_PAGE_SIZE);
|
||||
|
|
|
@ -0,0 +1,47 @@
|
|||
package ca.uhn.fhir.jpa.dao.r5.dbpartitionmode;
|
||||
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.dao.r5.BaseJpaR5Test;
|
||||
import ca.uhn.fhir.jpa.entity.PartitionEntity;
|
||||
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
|
||||
import ca.uhn.fhir.jpa.util.TestPartitionSelectorInterceptor;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
|
||||
public class BaseDbpmJpaR5Test extends BaseJpaR5Test {
|
||||
|
||||
public static final String PARTITION_NAME_1 = "Partition_1";
|
||||
public static final String PARTITION_NAME_2 = "Partition_2";
|
||||
public static final int PARTITION_1 = 1;
|
||||
public static final int PARTITION_2 = 2;
|
||||
|
||||
protected final TestPartitionSelectorInterceptor myPartitionSelectorInterceptor = new TestPartitionSelectorInterceptor();
|
||||
|
||||
@Autowired
|
||||
private IPartitionLookupSvc myPartitionConfigSvc;
|
||||
|
||||
@Override
|
||||
@AfterEach
|
||||
protected void afterResetInterceptors() {
|
||||
super.afterResetInterceptors();
|
||||
myPartitionSettings.setPartitioningEnabled(false);
|
||||
myInterceptorRegistry.unregisterInterceptor(myPartitionSelectorInterceptor);
|
||||
}
|
||||
|
||||
protected void registerPartitionInterceptorAndCreatePartitions() {
|
||||
assertFalse(myInterceptorRegistry.hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_READ), ()->myInterceptorRegistry.getAllRegisteredInterceptors().toString());
|
||||
myInterceptorRegistry.registerInterceptor(myPartitionSelectorInterceptor);
|
||||
|
||||
myPartitionConfigSvc.createPartition(new PartitionEntity().setId(PARTITION_1).setName(PARTITION_NAME_1), null);
|
||||
myPartitionConfigSvc.createPartition(new PartitionEntity().setId(PARTITION_2).setName(PARTITION_NAME_2), null);
|
||||
|
||||
// Load to pre-cache and avoid adding SQL queries
|
||||
myPartitionConfigSvc.getPartitionById(PARTITION_1);
|
||||
myPartitionConfigSvc.getPartitionById(PARTITION_2);
|
||||
myPartitionConfigSvc.getPartitionByName(PARTITION_NAME_1);
|
||||
myPartitionConfigSvc.getPartitionByName(PARTITION_NAME_2);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
package ca.uhn.fhir.jpa.dao.r5.dbpartitionmode;
|
||||
|
||||
import ca.uhn.fhir.jpa.util.TestPartitionSelectorInterceptor;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
|
||||
/**
|
||||
* This is a test verifying that we emit the right SQL for HAPI FHIR running in
|
||||
* full legacy mode - No partitioning, no partition IDs in PKs.
|
||||
*/
|
||||
public class DbpmDisabledPartitioningDisabledTest extends BaseDbpmJpaR5Test {
|
||||
|
||||
@Nested
|
||||
public class MyTestDefinitions extends TestDefinitions {
|
||||
MyTestDefinitions() {
|
||||
super(DbpmDisabledPartitioningDisabledTest.this, new TestPartitionSelectorInterceptor(), false, false);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
package ca.uhn.fhir.jpa.dao.r5.dbpartitionmode;
|
||||
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
|
||||
/**
|
||||
* This is a test verifying that we emit the right SQL when running in
|
||||
* legacy partition mode with DEFAULT partition value of null (the default if
|
||||
* not configured otherwise) - Partition IDs are in use, but they aren't
|
||||
* included in primary keys or joins.
|
||||
*/
|
||||
public class DbpmDisabledPartitioningEnabledNullDefaultPartitionTest extends BaseDbpmJpaR5Test {
|
||||
|
||||
@Override
|
||||
@BeforeEach
|
||||
public void before() throws Exception {
|
||||
super.before();
|
||||
myPartitionSettings.setPartitioningEnabled(true);
|
||||
myPartitionSettings.setDefaultPartitionId(null);
|
||||
|
||||
registerPartitionInterceptorAndCreatePartitions();
|
||||
}
|
||||
|
||||
@Nested
|
||||
public class MyTestDefinitions extends TestDefinitions {
|
||||
MyTestDefinitions() {
|
||||
super(DbpmDisabledPartitioningEnabledNullDefaultPartitionTest.this, myPartitionSelectorInterceptor, true, false);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
package ca.uhn.fhir.jpa.dao.r5.dbpartitionmode;
|
||||
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
|
||||
/**
|
||||
* This is a test verifying that we emit the right SQL when running in
|
||||
* legacy partition mode - Partition IDs are in use, but they aren't
|
||||
* included in primary keys or joins.
|
||||
*/
|
||||
public class DbpmDisabledPartitioningEnabledTest extends BaseDbpmJpaR5Test {
|
||||
|
||||
@Override
|
||||
@BeforeEach
|
||||
public void before() throws Exception {
|
||||
super.before();
|
||||
myPartitionSettings.setPartitioningEnabled(true);
|
||||
myPartitionSettings.setDefaultPartitionId(0);
|
||||
|
||||
registerPartitionInterceptorAndCreatePartitions();
|
||||
}
|
||||
|
||||
@Nested
|
||||
public class MyTestDefinitions extends TestDefinitions {
|
||||
MyTestDefinitions() {
|
||||
super(DbpmDisabledPartitioningEnabledTest.this, myPartitionSelectorInterceptor, true, false);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -413,6 +413,7 @@ public abstract class BaseJpaTest extends BaseTest {
|
|||
|
||||
PartitionSettings defaultPartConfig = new PartitionSettings();
|
||||
myPartitionSettings.setIncludePartitionInSearchHashes(defaultPartConfig.isIncludePartitionInSearchHashes());
|
||||
myPartitionSettings.setAllowReferencesAcrossPartitions(defaultPartConfig.getAllowReferencesAcrossPartitions());
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
|
|
|
@ -19,9 +19,12 @@
|
|||
*/
|
||||
package ca.uhn.fhir.jpa.util;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.SystemUtils;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
public final class DatabaseSupportUtil {
|
||||
|
||||
private DatabaseSupportUtil() {}
|
||||
|
@ -50,4 +53,12 @@ public final class DatabaseSupportUtil {
|
|||
&& StringUtils.isNotBlank(System.getenv("DOCKER_HOST"))
|
||||
&& System.getenv("DOCKER_HOST").contains("colima");
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new connection to a randomized H2 database for testing
|
||||
*/
|
||||
public static DriverTypeEnum.ConnectionProperties newConnection() {
|
||||
String url = "jdbc:h2:mem:test_migration-" + UUID.randomUUID() + ";CASE_INSENSITIVE_IDENTIFIERS=TRUE;";
|
||||
return DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "SA", "SA");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,52 @@
|
|||
package ca.uhn.fhir.jpa.util;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.api.Hook;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.partition.BaseRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
||||
public class TestPartitionSelectorInterceptor {
|
||||
private RequestPartitionId myNextPartition;
|
||||
private BaseRequestPartitionHelperSvc myHelperSvc = new RequestPartitionHelperSvc();
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public TestPartitionSelectorInterceptor() {
|
||||
super();
|
||||
}
|
||||
|
||||
public void setNextPartitionId(Integer theNextPartitionId) {
|
||||
myNextPartition = RequestPartitionId.fromPartitionId(theNextPartitionId);
|
||||
}
|
||||
|
||||
public void setNextPartition(RequestPartitionId theNextPartition) {
|
||||
myNextPartition = theNextPartition;
|
||||
}
|
||||
|
||||
@Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE)
|
||||
public RequestPartitionId selectPartitionCreate(IBaseResource theResource) {
|
||||
String resourceType = FhirContext.forR5Cached().getResourceType(theResource);
|
||||
return selectPartition(resourceType);
|
||||
}
|
||||
|
||||
@Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_READ)
|
||||
public RequestPartitionId selectPartitionRead(ReadPartitionIdRequestDetails theDetails) {
|
||||
return selectPartition(theDetails.getResourceType());
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private RequestPartitionId selectPartition(String theResourceType) {
|
||||
if (!myHelperSvc.isResourcePartitionable(theResourceType)) {
|
||||
return RequestPartitionId.defaultPartition();
|
||||
}
|
||||
|
||||
assert myNextPartition != null;
|
||||
return myNextPartition;
|
||||
}
|
||||
}
|
|
@ -28,11 +28,11 @@ import net.ttddyy.dsproxy.listener.MethodExecutionContext;
|
|||
import net.ttddyy.dsproxy.proxy.ParameterSetOperation;
|
||||
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Queue;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.trim;
|
||||
|
||||
|
@ -85,10 +85,16 @@ public abstract class BaseCaptureQueriesListener
|
|||
&& next.getParametersList().get(0).size() > 0) {
|
||||
size = next.getParametersList().size();
|
||||
List<ParameterSetOperation> values = next.getParametersList().get(0);
|
||||
params = values.stream()
|
||||
.map(t -> t.getArgs()[1])
|
||||
.map(t -> t != null ? t.toString() : "NULL")
|
||||
.collect(Collectors.toList());
|
||||
params = new ArrayList<>();
|
||||
for (ParameterSetOperation t : values) {
|
||||
if (t.getMethod().getName().equals("setNull")) {
|
||||
params.add(null);
|
||||
} else {
|
||||
Object arg = t.getArgs()[1];
|
||||
String s = arg != null ? arg.toString() : null;
|
||||
params.add(s);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
params = Collections.emptyList();
|
||||
size = next.getParametersList().size();
|
||||
|
|
|
@ -138,10 +138,15 @@ public class SqlQuery {
|
|||
break;
|
||||
}
|
||||
String nextParamValue = nextParams.remove(0);
|
||||
if (theSanitizeParams) {
|
||||
nextParamValue = UrlUtil.sanitizeUrlPart(nextParamValue);
|
||||
String nextSubstitution;
|
||||
if (nextParamValue != null) {
|
||||
if (theSanitizeParams) {
|
||||
nextParamValue = UrlUtil.sanitizeUrlPart(nextParamValue);
|
||||
}
|
||||
nextSubstitution = "'" + nextParamValue + "'";
|
||||
} else {
|
||||
nextSubstitution = "NULL";
|
||||
}
|
||||
String nextSubstitution = "'" + nextParamValue + "'";
|
||||
retVal = retVal.substring(0, idx) + nextSubstitution + retVal.substring(idx + 1);
|
||||
idx += nextSubstitution.length();
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ package ca.uhn.fhir.test.utilities;
|
|||
|
||||
import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
|
||||
import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition;
|
||||
import ca.uhn.fhir.context.BaseRuntimeElementDefinition;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.util.FhirTerser;
|
||||
|
@ -87,6 +88,13 @@ public interface ITestDataBuilder {
|
|||
return t -> __setPrimitiveChild(getFhirContext(), t, "language", "string", theLanguage);
|
||||
}
|
||||
|
||||
/**
|
||||
* List.entry.item
|
||||
*/
|
||||
default ICreationArgument withListItem(IIdType theReference) {
|
||||
return withElementAt("entry", withReference("item", theReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set Patient.gender
|
||||
*/
|
||||
|
@ -233,6 +241,10 @@ public interface ITestDataBuilder {
|
|||
return buildResource("Patient", theModifiers);
|
||||
}
|
||||
|
||||
default IIdType createList(ICreationArgument... theModifiers) {
|
||||
return createResource("List", theModifiers);
|
||||
}
|
||||
|
||||
default IIdType createPatient(ICreationArgument... theModifiers) {
|
||||
return createResource("Patient", theModifiers);
|
||||
}
|
||||
|
@ -315,7 +327,7 @@ public interface ITestDataBuilder {
|
|||
IBaseReference reference = (IBaseReference) getFhirContext().getElementDefinition("Reference").newInstance();
|
||||
reference.setReference(theReferenceValue.getValue());
|
||||
|
||||
RuntimeResourceDefinition resourceDef = getFhirContext().getResourceDefinition((IBaseResource) t);
|
||||
BaseRuntimeElementDefinition<?> resourceDef = getFhirContext().getElementDefinition(t.getClass());
|
||||
resourceDef.getChildByName(theReferenceName).getMutator().addValue(t, reference);
|
||||
}
|
||||
};
|
||||
|
|
7
pom.xml
7
pom.xml
|
@ -1164,6 +1164,11 @@
|
|||
<artifactId>caffeine</artifactId>
|
||||
<version>${caffeine_version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.github.jsqlparser</groupId>
|
||||
<artifactId>jsqlparser</artifactId>
|
||||
<version>5.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.googlecode.owasp-java-html-sanitizer</groupId>
|
||||
<artifactId>owasp-java-html-sanitizer</artifactId>
|
||||
|
@ -1355,7 +1360,7 @@
|
|||
<dependency>
|
||||
<groupId>org.jetbrains</groupId>
|
||||
<artifactId>annotations</artifactId>
|
||||
<version>23.0.0</version>
|
||||
<version>24.0.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-io</groupId>
|
||||
|
|
Loading…
Reference in New Issue