More cleanup in advance of DB Partitioning Mode (#6553)

* Work on next merge

* Bring cleanup forward

* Add error code

* Address review comments

* Build fix

* Cleanup
This commit is contained in:
James Agnew 2024-12-12 05:34:10 -05:00 committed by GitHub
parent e174572a3b
commit c3eba4681f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
54 changed files with 419 additions and 252 deletions

View File

@ -2,12 +2,14 @@ package ca.uhn.fhir.cli;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.provider.BaseJpaSystemProvider;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.system.HapiSystemProperties;
import ca.uhn.fhir.test.utilities.RestServerR4Helper;
import ca.uhn.fhir.test.utilities.TlsAuthenticationTestHelper;
import ca.uhn.fhir.util.ParametersUtil;
import ca.uhn.test.util.LogbackTestExtension;
import ca.uhn.test.util.LogbackTestExtensionAssert;
import ch.qos.logback.classic.Level;
import ch.qos.logback.classic.Logger;
import org.hl7.fhir.instance.model.api.IBaseParameters;
import org.junit.jupiter.api.BeforeEach;
@ -40,9 +42,8 @@ class ReindexTerminologyCommandTest {
public final RestServerR4Helper myRestServerR4Helper = RestServerR4Helper.newInitialized();
@RegisterExtension
public TlsAuthenticationTestHelper myTlsAuthenticationTestHelper = new TlsAuthenticationTestHelper();
// Deliberately not registered - we manually run this later because App startup resets the logging.
LogbackTestExtension myAppLogCapture;
@RegisterExtension
public LogbackTestExtension myLogbackTestExtension = new LogbackTestExtension(BaseApp.ourLog);
static {
HapiSystemProperties.enableTestMode();
@ -69,7 +70,7 @@ class ReindexTerminologyCommandTest {
);
runAppWithStartupHook(args, getLoggingStartupHook());
LogbackTestExtensionAssert.assertThat(myAppLogCapture).doesNotHaveMessage(FAILURE_MESSAGE);
LogbackTestExtensionAssert.assertThat(myLogbackTestExtension).doesNotHaveMessage(FAILURE_MESSAGE);
}
@ParameterizedTest
@ -131,7 +132,7 @@ class ReindexTerminologyCommandTest {
);
runAppWithStartupHook(args, getLoggingStartupHook());
LogbackTestExtensionAssert.assertThat(myAppLogCapture)
LogbackTestExtensionAssert.assertThat(myLogbackTestExtension)
.hasMessage(FAILURE_MESSAGE)
.hasMessage("Internal error. Command result unknown. Check system logs for details");
}
@ -156,7 +157,7 @@ class ReindexTerminologyCommandTest {
);
runAppWithStartupHook(args, getLoggingStartupHook());
LogbackTestExtensionAssert.assertThat(myAppLogCapture)
LogbackTestExtensionAssert.assertThat(myLogbackTestExtension)
.hasMessage(FAILURE_MESSAGE)
.hasMessage("Freetext service is not configured. Operation didn't run.");
}
@ -176,8 +177,11 @@ class ReindexTerminologyCommandTest {
*/
Consumer<BaseApp> getLoggingStartupHook() {
return (unused) -> {
myAppLogCapture = new LogbackTestExtension((Logger) BaseApp.ourLog);
myAppLogCapture.setUp();
try {
myLogbackTestExtension.reRegister();
} catch (Exception e) {
throw new InternalErrorException(e);
}
};
}
}

View File

@ -130,17 +130,17 @@ public class RequestPartitionHelperSvc extends BaseRequestPartitionHelperSvc {
}
if (theRequestPartitionId.hasPartitionIds()) {
Integer partitionId = theRequestPartitionId.getPartitionIds().get(i);
if (partition == null) {
Validate.isTrue(
theRequestPartitionId.getPartitionIds().get(i) == null,
partitionId == null || partitionId.equals(myPartitionSettings.getDefaultPartitionId()),
"Partition %s must not have an ID",
JpaConstants.DEFAULT_PARTITION_NAME);
} else {
Validate.isTrue(
Objects.equals(
theRequestPartitionId.getPartitionIds().get(i), partition.getId()),
Objects.equals(partitionId, partition.getId()),
"Partition ID %s does not match name %s",
theRequestPartitionId.getPartitionIds().get(i),
partitionId,
theRequestPartitionId.getPartitionNames().get(i));
}
} else {

View File

@ -491,7 +491,7 @@ public class QueryStack {
}
private void addSortCustomJoin(
DbColumn theFromDbColumn[],
DbColumn[] theFromDbColumn,
BaseJoiningPredicateBuilder theToJoiningPredicateBuilder,
Condition theCondition) {
@ -550,14 +550,14 @@ public class QueryStack {
private Condition createPredicateComposite(
@Nullable DbColumn[] theSourceJoinColumn,
String theResourceName,
String theSpnamePrefix,
String theSPNamePrefix,
RuntimeSearchParam theParamDef,
List<? extends IQueryParameterType> theNextAnd,
RequestPartitionId theRequestPartitionId) {
return createPredicateComposite(
theSourceJoinColumn,
theResourceName,
theSpnamePrefix,
theSPNamePrefix,
theParamDef,
theNextAnd,
theRequestPartitionId,
@ -698,7 +698,7 @@ public class QueryStack {
}
// TODO - Change this when we have HFJ_SPIDX_MISSING table
/**
/*
* How we search depends on if the
* {@link JpaStorageSettings#getIndexMissingFields()} property
* is Enabled or Disabled.
@ -821,7 +821,6 @@ public class QueryStack {
ICanMakeMissingParamPredicate innerQuery = PredicateBuilderFactory.createPredicateBuilderForParamType(
theParams.getParamType(), theParams.getSqlBuilder(), this);
return innerQuery.createPredicateParamMissingValue(new MissingQueryParameterPredicateParams(
table, theParams.isMissing(), theParams.getParamName(), theParams.getRequestPartitionId()));
}
@ -943,16 +942,13 @@ public class QueryStack {
QueryStack theQueryStack3,
SearchFilterParser.BaseFilter theFilter,
String theResourceName,
RequestDetails theRequest,
RequestPartitionId theRequestPartitionId) {
if (theFilter instanceof SearchFilterParser.FilterParameter) {
return createPredicateFilter(
theRequest,
theQueryStack3,
(SearchFilterParser.FilterParameter) theFilter,
theResourceName,
theRequest,
theRequestPartitionId);
} else if (theFilter instanceof SearchFilterParser.FilterLogical) {
// Left side
@ -960,7 +956,6 @@ public class QueryStack {
theQueryStack3,
((SearchFilterParser.FilterLogical) theFilter).getFilter1(),
theResourceName,
theRequest,
theRequestPartitionId);
// Right side
@ -968,7 +963,6 @@ public class QueryStack {
theQueryStack3,
((SearchFilterParser.FilterLogical) theFilter).getFilter2(),
theResourceName,
theRequest,
theRequestPartitionId);
if (((SearchFilterParser.FilterLogical) theFilter).getOperation()
@ -987,17 +981,14 @@ public class QueryStack {
theQueryStack3,
((SearchFilterParser.FilterParameterGroup) theFilter).getContained(),
theResourceName,
theRequest,
theRequestPartitionId);
}
}
private Condition createPredicateFilter(
RequestDetails theRequestDetails,
QueryStack theQueryStack3,
SearchFilterParser.FilterParameter theFilter,
String theResourceName,
RequestDetails theRequest,
RequestPartitionId theRequestPartitionId) {
String paramName = theFilter.getParamPath().getName();
@ -1089,7 +1080,6 @@ public class QueryStack {
new ArrayList<>(),
Collections.singletonList(referenceParam),
operation,
theRequest,
theRequestPartitionId);
} else if (typeEnum == RestSearchParameterTypeEnum.QUANTITY) {
return theQueryStack3.createPredicateQuantity(
@ -1240,9 +1230,8 @@ public class QueryStack {
Condition pathPredicate = toEqualToOrInPredicate(
resourceLinkTableJoin.getColumnSourcePath(), mySqlBuilder.generatePlaceholders(paths));
Condition linkedPredicate = searchForIdsWithAndOr(
theRequest,
with().setSourceJoinColumn(resourceLinkTableJoin.getJoinColumnsForSource())
Condition linkedPredicate =
searchForIdsWithAndOr(with().setSourceJoinColumn(resourceLinkTableJoin.getJoinColumnsForSource())
.setResourceName(targetResourceType)
.setParamName(parameterName)
.setAndOrParams(Collections.singletonList(orValues))
@ -1433,7 +1422,6 @@ public class QueryStack {
List<String> theQualifiers,
List<? extends IQueryParameterType> theList,
SearchFilterParser.CompareOperation theOperation,
RequestDetails theRequest,
RequestPartitionId theRequestPartitionId) {
return createPredicateReference(
theSourceJoinColumn,
@ -1442,7 +1430,6 @@ public class QueryStack {
theQualifiers,
theList,
theOperation,
theRequest,
theRequestPartitionId,
mySqlBuilder);
}
@ -1454,7 +1441,6 @@ public class QueryStack {
List<String> theQualifiers,
List<? extends IQueryParameterType> theList,
SearchFilterParser.CompareOperation theOperation,
RequestDetails theRequest,
RequestPartitionId theRequestPartitionId,
SearchQueryBuilder theSqlBuilder) {
@ -1484,7 +1470,7 @@ public class QueryStack {
() -> theSqlBuilder.addReferencePredicateBuilder(this, theSourceJoinColumn))
.getResult();
return predicateBuilder.createPredicate(
theRequest,
myRequestDetails,
theResourceName,
theParamName,
theQualifiers,
@ -1526,7 +1512,6 @@ public class QueryStack {
RuntimeSearchParam theSearchParam,
List<? extends IQueryParameterType> theList,
SearchFilterParser.CompareOperation theOperation,
RequestDetails theRequest,
RequestPartitionId theRequestPartitionId,
EmbeddedChainedSearchModeEnum theEmbeddedChainedSearchModeEnum) {
@ -1553,14 +1538,11 @@ public class QueryStack {
}
UnionQuery union = null;
List<Condition> predicates = null;
if (wantChainedAndNormal) {
union = new UnionQuery(SetOperationQuery.Type.UNION_ALL);
} else {
predicates = new ArrayList<>();
}
predicates = new ArrayList<>();
List<Condition> predicates = new ArrayList<>();
for (List<String> nextReferenceLink : referenceLinks.keySet()) {
for (LeafNodeDefinition leafNodeDefinition : referenceLinks.get(nextReferenceLink)) {
SearchQueryBuilder builder;
@ -1593,7 +1575,6 @@ public class QueryStack {
leafNodeDefinition.getOrValues(),
theOperation,
leafNodeDefinition.getQualifiers(),
theRequest,
theRequestPartitionId,
builder);
@ -1792,11 +1773,7 @@ public class QueryStack {
Map<List<String>, Set<LeafNodeDefinition>> theReferenceLinksMap,
ArrayList<String> thePath,
Set<LeafNodeDefinition> theLeafNodesToAdd) {
Set<LeafNodeDefinition> leafNodes = theReferenceLinksMap.get(thePath);
if (leafNodes == null) {
leafNodes = Sets.newHashSet();
theReferenceLinksMap.put(thePath, leafNodes);
}
Set<LeafNodeDefinition> leafNodes = theReferenceLinksMap.computeIfAbsent(thePath, k -> Sets.newHashSet());
leafNodes.addAll(theLeafNodesToAdd);
}
@ -1822,7 +1799,6 @@ public class QueryStack {
ArrayList<IQueryParameterType> theOrValues,
SearchFilterParser.CompareOperation theOperation,
List<String> theQualifiers,
RequestDetails theRequest,
RequestPartitionId theRequestPartitionId,
SearchQueryBuilder theSqlBuilder) {
Condition containedCondition;
@ -1912,7 +1888,6 @@ public class QueryStack {
theQualifiers,
theOrValues,
theOperation,
theRequest,
theRequestPartitionId,
theSqlBuilder);
break;
@ -2396,7 +2371,7 @@ public class QueryStack {
}
@Nullable
public Condition searchForIdsWithAndOr(RequestDetails theRequestDetails, SearchForIdsParams theSearchForIdsParams) {
public Condition searchForIdsWithAndOr(SearchForIdsParams theSearchForIdsParams) {
if (theSearchForIdsParams.myAndOrParams.isEmpty()) {
return null;
@ -2432,7 +2407,6 @@ public class QueryStack {
theSearchForIdsParams.myResourceName,
theSearchForIdsParams.myParamName,
theSearchForIdsParams.myAndOrParams,
theSearchForIdsParams.myRequest,
theSearchForIdsParams.myRequestPartitionId);
} else {
return createPredicateTag(
@ -2460,7 +2434,6 @@ public class QueryStack {
theSearchForIdsParams.myResourceName,
theSearchForIdsParams.myParamName,
theSearchForIdsParams.myAndOrParams,
theSearchForIdsParams.myRequest,
theSearchForIdsParams.myRequestPartitionId);
}
}
@ -2520,7 +2493,6 @@ public class QueryStack {
String theResourceName,
String theParamName,
List<List<IQueryParameterType>> theAndOrParams,
RequestDetails theRequest,
RequestPartitionId theRequestPartitionId) {
List<Condition> andPredicates = new ArrayList<>();
RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam(
@ -2540,7 +2512,7 @@ public class QueryStack {
// FT: 2021-01-18 use operation 'gt', 'ge', 'le' or 'lt'
// to create the predicateDate instead of generic one with operation = null
SearchFilterParser.CompareOperation operation = null;
if (nextAnd.size() > 0) {
if (!nextAnd.isEmpty()) {
DateParam param = (DateParam) nextAnd.get(0);
operation = toOperation(param.getPrefix());
}
@ -2557,7 +2529,7 @@ public class QueryStack {
case QUANTITY:
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
SearchFilterParser.CompareOperation operation = null;
if (nextAnd.size() > 0) {
if (!nextAnd.isEmpty()) {
QuantityParam param = (QuantityParam) nextAnd.get(0);
operation = toOperation(param.getPrefix());
}
@ -2580,7 +2552,6 @@ public class QueryStack {
theSourceJoinColumn,
theResourceName,
theParamName,
theRequest,
theRequestPartitionId,
andPredicates,
nextAnd)) {
@ -2597,7 +2568,6 @@ public class QueryStack {
new ArrayList<>(),
nextAnd,
null,
theRequest,
theRequestPartitionId));
} else {
andPredicates.add(createPredicateReferenceForEmbeddedChainedSearchResource(
@ -2606,7 +2576,6 @@ public class QueryStack {
nextParamDef,
nextAnd,
null,
theRequest,
theRequestPartitionId,
embeddedChainedSearchModeEnum));
}
@ -2721,8 +2690,8 @@ public class QueryStack {
+ " parameter is disabled on this server");
}
Condition predicate = createPredicateFilter(
this, filter, theResourceName, theRequest, theRequestPartitionId);
Condition predicate =
createPredicateFilter(this, filter, theResourceName, theRequestPartitionId);
if (predicate != null) {
mySqlBuilder.addPredicate(predicate);
}
@ -2779,7 +2748,6 @@ public class QueryStack {
@Nullable DbColumn[] theSourceJoinColumn,
String theResourceName,
String theParamName,
RequestDetails theRequest,
RequestPartitionId theRequestPartitionId,
List<Condition> andPredicates,
List<? extends IQueryParameterType> nextAnd) {
@ -2795,7 +2763,7 @@ public class QueryStack {
.collect(Collectors.toList());
List<List<IQueryParameterType>> params = List.of(swappedParamTypes);
Condition predicate = createPredicateSearchParameter(
theSourceJoinColumn, theResourceName, fullName, params, theRequest, theRequestPartitionId);
theSourceJoinColumn, theResourceName, fullName, params, theRequestPartitionId);
andPredicates.add(predicate);
return true;
}
@ -2955,7 +2923,7 @@ public class QueryStack {
/**
* @see #isEligibleForEmbeddedChainedResourceSearch(String, String, List) for an explanation of the values in this enum
*/
enum EmbeddedChainedSearchModeEnum {
public enum EmbeddedChainedSearchModeEnum {
UPLIFTED_ONLY(true),
UPLIFTED_AND_REF_JOIN(true),
REF_JOIN_ONLY(false);
@ -3119,11 +3087,8 @@ public class QueryStack {
orValues.add(qp);
}
Set<LeafNodeDefinition> leafNodes = myChains.get(theSearchParams);
if (leafNodes == null) {
leafNodes = Sets.newHashSet();
myChains.put(theSearchParams, leafNodes);
}
Set<LeafNodeDefinition> leafNodes =
myChains.computeIfAbsent(theSearchParams, k -> Sets.newHashSet());
leafNodes.add(new LeafNodeDefinition(
nextSearchParam, orValues, nextTarget, nextParamName, "", qualifiersBranch));
} else {
@ -3244,16 +3209,11 @@ public class QueryStack {
List<List<IQueryParameterType>> myAndOrParams;
RequestDetails myRequest;
RequestPartitionId myRequestPartitionId;
ResourceTablePredicateBuilder myResourceTablePredicateBuilder;
public static SearchForIdsParams with() {
return new SearchForIdsParams();
}
public DbColumn[] getSourceJoinColumn() {
return mySourceJoinColumn;
}
public SearchForIdsParams setSourceJoinColumn(DbColumn[] theSourceJoinColumn) {
mySourceJoinColumn = theSourceJoinColumn;
return this;
@ -3277,10 +3237,6 @@ public class QueryStack {
return this;
}
public List<List<IQueryParameterType>> getAndOrParams() {
return myAndOrParams;
}
public SearchForIdsParams setAndOrParams(List<List<IQueryParameterType>> theAndOrParams) {
myAndOrParams = theAndOrParams;
return this;
@ -3303,15 +3259,5 @@ public class QueryStack {
myRequestPartitionId = theRequestPartitionId;
return this;
}
public ResourceTablePredicateBuilder getResourceTablePredicateBuilder() {
return myResourceTablePredicateBuilder;
}
public SearchForIdsParams setResourceTablePredicateBuilder(
ResourceTablePredicateBuilder theResourceTablePredicateBuilder) {
myResourceTablePredicateBuilder = theResourceTablePredicateBuilder;
return this;
}
}
}

View File

@ -340,13 +340,11 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
continue;
}
List<List<IQueryParameterType>> andOrParams = myParams.get(nextParamName);
Condition predicate = theQueryStack.searchForIdsWithAndOr(
theRequest,
with().setResourceName(myResourceName)
.setParamName(nextParamName)
.setAndOrParams(andOrParams)
.setRequest(theRequest)
.setRequestPartitionId(myRequestPartitionId));
Condition predicate = theQueryStack.searchForIdsWithAndOr(with().setResourceName(myResourceName)
.setParamName(nextParamName)
.setAndOrParams(andOrParams)
.setRequest(theRequest)
.setRequestPartitionId(myRequestPartitionId));
if (predicate != null) {
theSearchSqlBuilder.addPredicate(predicate);
}

View File

@ -36,8 +36,6 @@ import com.healthmarketscience.sqlbuilder.Condition;
import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn;
import jakarta.annotation.Nullable;
import org.hl7.fhir.instance.model.api.IIdType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.HashSet;
@ -50,7 +48,6 @@ import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
public class ResourceIdPredicateBuilder extends BasePredicateBuilder {
private static final Logger ourLog = LoggerFactory.getLogger(ResourceIdPredicateBuilder.class);
@Autowired
private IIdHelperService<JpaPid> myIdHelperService;

View File

@ -549,9 +549,8 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder im
List<Condition> andPredicates = new ArrayList<>();
List<List<IQueryParameterType>> chainParamValues = Collections.singletonList(orValues);
andPredicates.add(childQueryFactory.searchForIdsWithAndOr(
theRequest,
with().setSourceJoinColumn(getJoinColumnsForTarget())
andPredicates.add(
childQueryFactory.searchForIdsWithAndOr(with().setSourceJoinColumn(getJoinColumnsForTarget())
.setResourceName(subResourceName)
.setParamName(chain)
.setAndOrParams(chainParamValues)

View File

@ -19,6 +19,7 @@
*/
package ca.uhn.fhir.jpa.search.builder.sql;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.search.builder.QueryStack;
import ca.uhn.fhir.jpa.search.builder.predicate.CoordsPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.DatePredicateBuilder;
@ -30,13 +31,12 @@ import ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.TokenPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.UriPredicateBuilder;
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import jakarta.annotation.Nonnull;
public class PredicateBuilderFactory {
private static final Logger ourLog = LoggerFactory.getLogger(PredicateBuilderFactory.class);
@Nonnull
public static ICanMakeMissingParamPredicate createPredicateBuilderForParamType(
RestSearchParameterTypeEnum theParamType, SearchQueryBuilder theBuilder, QueryStack theQueryStack) {
switch (theParamType) {
@ -59,51 +59,40 @@ public class PredicateBuilderFactory {
return createCoordsPredicateBuilder(theBuilder);
case COMPOSITE:
default:
// we don't expect to see this
ourLog.error("Invalid param type " + theParamType.name());
return null;
throw new InternalErrorException(Msg.code(2593) + "Invalid param type " + theParamType.name());
}
}
private static StringPredicateBuilder createStringPredicateBuilder(SearchQueryBuilder theBuilder) {
StringPredicateBuilder sp = theBuilder.getSqlBuilderFactory().stringIndexTable(theBuilder);
return sp;
return theBuilder.getSqlBuilderFactory().stringIndexTable(theBuilder);
}
private static NumberPredicateBuilder createNumberPredicateBuilder(SearchQueryBuilder theBuilder) {
NumberPredicateBuilder np = theBuilder.getSqlBuilderFactory().numberIndexTable(theBuilder);
return np;
return theBuilder.getSqlBuilderFactory().numberIndexTable(theBuilder);
}
private static QuantityPredicateBuilder createQuantityPredicateBuilder(SearchQueryBuilder theBuilder) {
QuantityPredicateBuilder qp = theBuilder.getSqlBuilderFactory().quantityIndexTable(theBuilder);
return qp;
return theBuilder.getSqlBuilderFactory().quantityIndexTable(theBuilder);
}
private static CoordsPredicateBuilder createCoordsPredicateBuilder(SearchQueryBuilder theBuilder) {
CoordsPredicateBuilder cp = theBuilder.getSqlBuilderFactory().coordsPredicateBuilder(theBuilder);
return cp;
return theBuilder.getSqlBuilderFactory().coordsPredicateBuilder(theBuilder);
}
private static TokenPredicateBuilder createTokenPredicateBuilder(SearchQueryBuilder theBuilder) {
TokenPredicateBuilder tp = theBuilder.getSqlBuilderFactory().tokenIndexTable(theBuilder);
return tp;
return theBuilder.getSqlBuilderFactory().tokenIndexTable(theBuilder);
}
private static DatePredicateBuilder createDatePredicateBuilder(SearchQueryBuilder theBuilder) {
DatePredicateBuilder dp = theBuilder.getSqlBuilderFactory().dateIndexTable(theBuilder);
return dp;
return theBuilder.getSqlBuilderFactory().dateIndexTable(theBuilder);
}
private static UriPredicateBuilder createUriPredicateBuilder(SearchQueryBuilder theBuilder) {
UriPredicateBuilder up = theBuilder.getSqlBuilderFactory().uriIndexTable(theBuilder);
return up;
return theBuilder.getSqlBuilderFactory().uriIndexTable(theBuilder);
}
private static ResourceLinkPredicateBuilder createReferencePredicateBuilder(
QueryStack theQueryStack, SearchQueryBuilder theBuilder) {
ResourceLinkPredicateBuilder retVal =
theBuilder.getSqlBuilderFactory().referenceIndexTable(theQueryStack, theBuilder, false);
return retVal;
return theBuilder.getSqlBuilderFactory().referenceIndexTable(theQueryStack, theBuilder, false);
}
}

View File

@ -177,7 +177,7 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService {
List<String> messages = new ArrayList<>();
JpaPid pid = entity.getPersistentId();
JpaPid pid = entity.getId();
ReindexOutcome outcome = dao.reindex(pid, new ReindexParameters(), theRequestDetails, new TransactionDetails());
messages.add("Reindex completed in " + sw);

View File

@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.search.builder;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.server.util.FhirContextSearchParamRegistry;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
@ -14,9 +15,12 @@ import org.mockito.Mock;
import org.mockito.Spy;
import org.mockito.junit.jupiter.MockitoExtension;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
@ -68,4 +72,58 @@ class SearchBuilderTest {
assertThat(types).hasSize(146);
}
@Test
void testPartitionBySizeAndPartitionId_ReuseIfSmallEnoughAndAllSamePartition() {
List<JpaPid> input = List.of(
JpaPid.fromId(100L, 1),
JpaPid.fromId(101L, 1)
);
Iterable<Collection<JpaPid>> actual = SearchBuilder.partitionBySizeAndPartitionId(input, 3);
assertSame(input, actual.iterator().next());
}
@Test
void testPartitionBySizeAndPartitionId_Partitioned() {
List<JpaPid> input = List.of(
JpaPid.fromId(0L),
JpaPid.fromId(1L),
JpaPid.fromId(2L),
JpaPid.fromId(3L),
JpaPid.fromId(100L, 1),
JpaPid.fromId(101L, 1),
JpaPid.fromId(102L, 1),
JpaPid.fromId(103L, 1),
JpaPid.fromId(200L, 2),
JpaPid.fromId(201L, 2),
JpaPid.fromId(202L, 2),
JpaPid.fromId(203L, 2)
);
// Test
Iterable<Collection<JpaPid>> actual = SearchBuilder.partitionBySizeAndPartitionId(input, 3);
// Verify
assertThat(actual).asList().containsExactlyInAnyOrder(
List.of(
JpaPid.fromId(0L),
JpaPid.fromId(1L),
JpaPid.fromId(2L)),
List.of(
JpaPid.fromId(3L)),
List.of(
JpaPid.fromId(100L, 1),
JpaPid.fromId(101L, 1),
JpaPid.fromId(102L, 1)),
List.of(
JpaPid.fromId(103L, 1)),
List.of(
JpaPid.fromId(200L, 2),
JpaPid.fromId(201L, 2),
JpaPid.fromId(202L, 2)),
List.of(
JpaPid.fromId(203L, 2)
)
);
}
}

View File

@ -8,6 +8,7 @@ import ca.uhn.fhir.jpa.fql.executor.IHfqlExecutor;
import ca.uhn.fhir.jpa.fql.parser.HfqlStatement;
import ca.uhn.fhir.jpa.fql.provider.HfqlRestProvider;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.test.utilities.LoggingExtension;
import ca.uhn.fhir.test.utilities.server.RestfulServerExtension;
import com.google.common.collect.Lists;
import org.apache.commons.dbcp2.BasicDataSource;
@ -68,6 +69,8 @@ public class JdbcDriverTest {
public RestfulServerExtension myServer = new RestfulServerExtension(ourCtx)
.registerProvider(myProvider)
.registerInterceptor(ourHeaderCaptureInterceptor);
@RegisterExtension
public LoggingExtension myLoggingExtension = new LoggingExtension();
private BasicDataSource myDs;

View File

@ -180,8 +180,8 @@ public class MdmLinkUpdaterSvcImpl implements IMdmLinkUpdaterSvc {
// with the link broken, the golden resource has delta info from a resource
// that is no longer matched to it; we need to remove this delta. But it's
// easier to just rebuild the resource from scratch using survivorship rules/current links
goldenResource =
myMdmSurvivorshipService.rebuildGoldenResourceWithSurvivorshipRules(goldenResource, mdmContext);
goldenResource = myMdmSurvivorshipService.rebuildGoldenResourceWithSurvivorshipRules(
theParams.getRequestDetails(), goldenResource, mdmContext);
}
if (myInterceptorBroadcaster.hasHooks(Pointcut.MDM_POST_UPDATE_LINK)) {

View File

@ -72,7 +72,7 @@ public class FindCandidateByEidSvc extends BaseCandidateFinder {
if (isNoMatch(foundGoldenResource, theIncomingResource)) {
continue;
}
IResourcePersistentId pidOrNull =
IResourcePersistentId<?> pidOrNull =
myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), foundGoldenResource);
MatchedGoldenResourceCandidate mpc =
new MatchedGoldenResourceCandidate(pidOrNull, MdmMatchOutcome.EID_MATCH);

View File

@ -59,7 +59,6 @@ import java.util.Date;
import java.util.List;
import java.util.Optional;
import java.util.function.Function;
import java.util.function.Supplier;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.slf4j.LoggerFactory.getLogger;

View File

@ -10,7 +10,6 @@ import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.test.utilities.RangeTestHelper;
import ca.uhn.fhir.util.ParametersUtil;
import ca.uhn.fhir.util.StopWatch;
import org.apache.commons.lang3.StringUtils;

View File

@ -82,7 +82,7 @@ class MdmSurvivorshipSvcImplIT extends BaseMdmR4Test {
myMdmLinkDaoSvc.createOrUpdateLinkEntity(goldenPatient, frankPatient1, MdmMatchOutcome.NEW_GOLDEN_RESOURCE_MATCH, MdmLinkSourceEnum.MANUAL, createContextForCreate("Patient"));
myMdmLinkDaoSvc.createOrUpdateLinkEntity(goldenPatient, frankPatient2, MdmMatchOutcome.NEW_GOLDEN_RESOURCE_MATCH, MdmLinkSourceEnum.MANUAL, createContextForCreate("Patient"));
myMdmSurvivorshipService.rebuildGoldenResourceWithSurvivorshipRules(goldenPatient, new MdmTransactionContext(MdmTransactionContext.OperationType.UPDATE_LINK));
myMdmSurvivorshipService.rebuildGoldenResourceWithSurvivorshipRules(mySrd, goldenPatient, new MdmTransactionContext(MdmTransactionContext.OperationType.UPDATE_LINK));
}
@Test
@ -169,7 +169,7 @@ class MdmSurvivorshipSvcImplIT extends BaseMdmR4Test {
}
// test
myMdmSurvivorshipService.rebuildGoldenResourceWithSurvivorshipRules(goldenPatient, transactionContext);
myMdmSurvivorshipService.rebuildGoldenResourceWithSurvivorshipRules(mySrd, goldenPatient, transactionContext);
IBundleProvider provider = myPatientDao.search(new SearchParameterMap().setLoadSynchronous(true),
new SystemRequestDetails().setRequestPartitionId(RequestPartitionId.allPartitions()));

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.mdm.util.GoldenResourceHelper;
import ca.uhn.fhir.mdm.util.MdmPartitionHelper;
import ca.uhn.fhir.mdm.util.MdmResourceUtil;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Patient;
@ -184,6 +185,7 @@ public class MdmSurvivorshipSvcImplTest {
// test
Patient goldenPatientRebuilt = mySvc.rebuildGoldenResourceWithSurvivorshipRules(
new SystemRequestDetails(),
goldenPatient,
createTransactionContext()
);

View File

@ -157,6 +157,8 @@ public class TerminologyLoaderSvcIntegrationDstu3Test extends BaseJpaDstu3Test {
Parameters parameters = (Parameters) result.toParameters(myFhirContext, null);
ourLog.debug(myFhirContext.newXmlParser().setPrettyPrint(true).encodeResourceToString(parameters));
logAllConcepts();
logAllConceptProperties();
Optional<Coding> propertyValue = findProperty(parameters, "SCALE_TYP");
assertThat(propertyValue).isPresent();

View File

@ -184,6 +184,8 @@ public abstract class BasePartitioningR4Test extends BaseJpaR4SystemTest {
mySearchParameterDao.update(sp, mySrd);
mySearchParamRegistry.forceRefresh();
assertNoRemainingPartitionIds();
}
protected void createNonUniqueComboSp() {
@ -288,6 +290,18 @@ public abstract class BasePartitioningR4Test extends BaseJpaR4SystemTest {
};
}
protected ICreationArgument withReadWritePartitions(Integer thePartitionId) {
return t -> {
if (thePartitionId != null) {
addReadPartition(thePartitionId);
addCreatePartition(thePartitionId, null);
} else {
addReadDefaultPartition();
addCreateDefaultPartition();
}
};
}
@Interceptor
public static class MyReadWriteInterceptor extends MyWriteInterceptor {

View File

@ -687,6 +687,9 @@ public class FhirResourceDaoCreatePlaceholdersR4Test extends BaseJpaR4Test {
patient.setActive(true);
myPatientDao.update(patient);
logAllResources();
logAllResourceVersions();
// observation (with version 2)
Observation obs = new Observation();
obs.setId("Observation/DEF");

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.jpa.model.util.SearchParamHash;
import ca.uhn.fhir.jpa.model.util.UcumServiceUtil;
import ca.uhn.fhir.jpa.reindex.ReindexStepV1Test;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.searchparam.submit.interceptor.SearchParamValidatingInterceptor;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
import ca.uhn.fhir.rest.param.BaseParam;
import ca.uhn.fhir.rest.param.DateParam;
@ -79,6 +80,16 @@ public class FhirResourceDaoR4IndexStorageOptimizedTest extends BaseJpaR4Test {
@Autowired
private SearchConfig mySearchConfig;
@Override
@BeforeEach
public void before() throws Exception {
super.before();
// We rely on this interceptor being in place, and it should be unless some cheeky other test
// has removed it
assertEquals(1, myInterceptorRegistry.getAllRegisteredInterceptors().stream().filter(t -> t instanceof SearchParamValidatingInterceptor).count());
}
@AfterEach
void cleanUp() {
myPartitionSettings.setIncludePartitionInSearchHashes(false);

View File

@ -1492,6 +1492,10 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test {
return (OperationOutcome) e.getOperationOutcome();
}
break;
case RDF:
break;
case NDJSON:
break;
}
throw new IllegalStateException(); // shouldn't get here
@ -1502,8 +1506,6 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test {
// setup
IParser parser = myFhirContext.newJsonParser();
myLogbackTestExtension.setUp(Level.WARN);
String obsStr ="""
{
"resourceType": "Observation",

View File

@ -4,6 +4,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.searchparam.submit.interceptor.SearchParamValidatingInterceptor;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import org.hl7.fhir.r4.model.Enumerations;
@ -32,6 +33,10 @@ public class PartitioningNonNullDefaultPartitionR4Test extends BasePartitioningR
super.before();
myPartitionSettings.setDefaultPartitionId(1);
// This test relies on this interceptor already being in place, which it should be unless
// another test misbehaved
assertEquals(1, myInterceptorRegistry.getAllRegisteredInterceptors().stream().filter(t->t instanceof SearchParamValidatingInterceptor).count());
}
@AfterEach

View File

@ -47,7 +47,7 @@ public class DeleteConflictServiceR4Test extends BaseJpaR4Test {
@AfterEach
public void afterUnregisterInterceptor() {
myInterceptorRegistry.unregisterAllInterceptors();
myInterceptorRegistry.unregisterInterceptor(myDeleteInterceptor);
}
@Test

View File

@ -6,7 +6,6 @@ import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.api.Hook;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.model.entity.EntityIndexStatusEnum;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
@ -45,7 +44,6 @@ import org.hl7.fhir.r4.model.Reference;
import org.hl7.fhir.r4.model.SearchParameter;
import org.hl7.fhir.r4.model.SearchParameter.XPathUsageType;
import org.hl7.fhir.r4.model.StringType;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.transaction.TransactionStatus;
@ -70,15 +68,6 @@ public class ResourceProviderCustomSearchParamR4Test extends BaseResourceProvide
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResourceProviderCustomSearchParamR4Test.class);
@Override
@AfterEach
public void after() throws Exception {
super.after();
myStorageSettings.setDefaultSearchParamsCanBeOverridden(new JpaStorageSettings().isDefaultSearchParamsCanBeOverridden());
myStorageSettings.setAllowContainsSearches(new JpaStorageSettings().isAllowContainsSearches());
}
@BeforeEach
@Override
public void before() throws Exception {

View File

@ -1441,6 +1441,7 @@ public class ResourceProviderR4EverythingTest extends BaseResourceProviderR4Test
}
private List<IIdType> validateAndGetIdListFromBundle(Bundle theBundle, int theSize) {
ourLog.info("Resource IDs: {}", theBundle.getEntry().stream().map(t->t.getResource().getIdElement().toUnqualifiedVersionless().getValue()).toList());
assertEquals(Bundle.BundleType.SEARCHSET, theBundle.getType());
assertThat(theBundle.getEntry()).hasSize(theSize);
return toUnqualifiedVersionlessIds(theBundle);

View File

@ -30,8 +30,6 @@ import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.mock.mockito.SpyBean;
import org.springframework.context.annotation.Bean;
@ -48,10 +46,9 @@ import static org.junit.jupiter.api.Assertions.assertFalse;
@ContextConfiguration(classes = {AsyncSubscriptionMessageSubmissionIT.SpringConfig.class})
public class AsyncSubscriptionMessageSubmissionIT extends BaseSubscriptionsR4Test {
private static final Logger ourLog = LoggerFactory.getLogger(AsyncSubscriptionMessageSubmissionIT.class);
@RegisterExtension
public LogbackTestExtension myLogbackTestExtension = new LogbackTestExtension(AsyncResourceModifiedSubmitterSvc.class);
public LogbackTestExtension myLogbackTestExtension = new LogbackTestExtension(AsyncResourceModifiedSubmitterSvc.class.getName(), Level.DEBUG);
@SpyBean
IResourceModifiedConsumer myResourceModifiedConsumer;
@ -98,8 +95,6 @@ public class AsyncSubscriptionMessageSubmissionIT extends BaseSubscriptionsR4Tes
@Test
public void runDeliveryPass_withManyResources_isBatchedAndKeepsResourceUsageDown() throws JsonProcessingException, InterruptedException {
// setup
myLogbackTestExtension.setUp(Level.DEBUG);
String resourceType = "Patient";
int factor = 5;
int numberOfResourcesToCreate = factor * AsyncResourceModifiedSubmitterSvc.MAX_LIMIT;

View File

@ -1,16 +1,17 @@
package ca.uhn.fhir.batch2.jobs.models;
import ca.uhn.fhir.batch2.jobs.chunk.TypedPidJson;
import ca.uhn.fhir.util.JsonUtil;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
class BatchResourceIdTest {
class TypedPidJsonTest {
@Test
public void testEstimateSize() {
BatchResourceId id = new BatchResourceId();
id.setId("12345");
TypedPidJson id = new TypedPidJson();
id.setPid("12345");
id.setResourceType("Patient");
String serialized = JsonUtil.serialize(id, false);
assertEquals(serialized.length(), id.estimateSerializedSize(), serialized);

View File

@ -7,6 +7,7 @@ import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.util.HapiExtensions;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.hl7.fhir.r5.model.BooleanType;
@ -22,6 +23,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import jakarta.annotation.Nonnull;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import static org.assertj.core.api.Assertions.assertThat;
@ -344,7 +346,8 @@ public class InstanceReindexServiceImplR5Test extends BaseJpaR5Test {
}
@Nonnull
private static List<Parameters.ParametersParameterComponent> findIndexes(Parameters theResponse, String theParamName, int theExpectedSize, String theSectionName) {
private static List<Parameters.ParametersParameterComponent> findIndexes(Parameters theResponse, @Nonnull String theParamName, int theExpectedSize, String theSectionName) {
Validate.notBlank(theParamName, "theParamName must not be blank");
List<Parameters.ParametersParameterComponent> indexes = theResponse.getParameters(theSectionName);
assertThat(indexes).hasSize(1);
@ -352,7 +355,7 @@ public class InstanceReindexServiceImplR5Test extends BaseJpaR5Test {
.get(0)
.getPart()
.stream()
.filter(t -> t.getName().equals(theParamName))
.filter(t -> Objects.equals(theParamName, t.getName()))
.toList();
assertThat(indexInstances).hasSize(theExpectedSize);

View File

@ -20,6 +20,7 @@
package ca.uhn.fhir.jpa.embedded;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.util.SqlUtil;
import jakarta.annotation.PreDestroy;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
@ -29,10 +30,8 @@ import org.springframework.jdbc.core.JdbcTemplate;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import javax.sql.DataSource;
/**
@ -108,7 +107,7 @@ public abstract class JpaEmbeddedDatabase {
}
public void executeSqlAsBatch(String theSql) {
List<String> statements = Arrays.stream(theSql.split(";")).collect(Collectors.toList());
List<String> statements = SqlUtil.splitSqlFileIntoStatements(theSql);
executeSqlAsBatch(statements);
}

View File

@ -419,6 +419,7 @@ public abstract class BaseJpaTest extends BaseTest {
JpaStorageSettings defaultConfig = new JpaStorageSettings();
myStorageSettings.setAccessMetaSourceInformationFromProvenanceTable(defaultConfig.isAccessMetaSourceInformationFromProvenanceTable());
myStorageSettings.setAllowContainsSearches(defaultConfig.isAllowContainsSearches());
myStorageSettings.setDefaultSearchParamsCanBeOverridden(defaultConfig.isDefaultSearchParamsCanBeOverridden());
myStorageSettings.setDeleteEnabled(defaultConfig.isDeleteEnabled());
myStorageSettings.setHibernateSearchIndexSearchParams(defaultConfig.isHibernateSearchIndexSearchParams());
myStorageSettings.setHibernateSearchIndexFullText(defaultConfig.isHibernateSearchIndexFullText());
@ -428,6 +429,7 @@ public abstract class BaseJpaTest extends BaseTest {
myStorageSettings.setPreExpandValueSets(defaultConfig.isPreExpandValueSets());
myStorageSettings.getTreatBaseUrlsAsLocal().clear();
ParserOptions defaultParserOptions = new ParserOptions();
myFhirContext.getParserOptions().setStripVersionsFromReferences(defaultParserOptions.isStripVersionsFromReferences());

View File

@ -75,15 +75,6 @@ public class HapiFhirJpaMigrationTasksTest {
// Create migrator and initialize schema using a static version
// of the schema from the 7.2.0 release
HapiFhirJpaMigrationTasks tasks = new HapiFhirJpaMigrationTasks(Set.of());
// This is just logging to try and track down an intermittent failure
for (VersionEnum next : VersionEnum.values()) {
int size = tasks.getAllTasks(next).size();
if (size > 0) {
ourLog.info("Version {} has {} tasks", next, size);
}
}
HapiMigrator migrator = new HapiMigrator(MIGRATION_TABLE_NAME, myDataSource, DriverTypeEnum.H2_EMBEDDED);
migrator.addTask(new InitializeSchemaTask("7.2.0", "20180115.0",
new SchemaInitializationProvider(
@ -142,30 +133,30 @@ public class HapiFhirJpaMigrationTasksTest {
"""
insert into
HFJ_RESOURCE (
RES_DELETED_AT,
RES_VERSION,
FHIR_ID,
HAS_TAGS,
RES_PUBLISHED,
RES_UPDATED,
SP_HAS_LINKS,
HASH_SHA256,
SP_INDEX_STATUS,
RES_LANGUAGE,
SP_CMPSTR_UNIQ_PRESENT,
SP_COORDS_PRESENT,
SP_DATE_PRESENT,
SP_NUMBER_PRESENT,
SP_QUANTITY_PRESENT,
SP_STRING_PRESENT,
SP_TOKEN_PRESENT,
SP_URI_PRESENT,
SP_QUANTITY_NRML_PRESENT,
RES_TYPE,
RES_VER,
RES_ID)
values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
RES_DELETED_AT,
RES_VERSION,
FHIR_ID,
HAS_TAGS,
RES_PUBLISHED,
RES_UPDATED,
SP_HAS_LINKS,
HASH_SHA256,
SP_INDEX_STATUS,
RES_LANGUAGE,
SP_CMPSTR_UNIQ_PRESENT,
SP_COORDS_PRESENT,
SP_DATE_PRESENT,
SP_NUMBER_PRESENT,
SP_QUANTITY_PRESENT,
SP_STRING_PRESENT,
SP_TOKEN_PRESENT,
SP_URI_PRESENT,
SP_QUANTITY_NRML_PRESENT,
RES_TYPE,
RES_VER,
RES_ID)
values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
@Override
protected void setValues(@Nonnull PreparedStatement thePs, @Nonnull LobCreator theLobCreator) throws SQLException {

View File

@ -24,7 +24,7 @@ import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import java.util.Date;
public interface IMdmLink<T extends IResourcePersistentId> {
public interface IMdmLink<T extends IResourcePersistentId<?>> {
T getId();
IMdmLink<T> setId(T theId);

View File

@ -20,6 +20,7 @@
package ca.uhn.fhir.mdm.api;
import ca.uhn.fhir.mdm.model.MdmTransactionContext;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import org.hl7.fhir.instance.model.api.IBase;
/**
@ -78,5 +79,5 @@ public interface IMdmSurvivorshipService {
* @param <T> - Resource type to apply the survivorship rules to
*/
<T extends IBase> T rebuildGoldenResourceWithSurvivorshipRules(
T theGoldenResource, MdmTransactionContext theMdmTransactionContext);
RequestDetails theRequestDetails, T theGoldenResource, MdmTransactionContext theMdmTransactionContext);
}

View File

@ -41,7 +41,7 @@ import java.util.Date;
import java.util.List;
import java.util.Optional;
public interface IMdmLinkDao<P extends IResourcePersistentId, M extends IMdmLink<P>> {
public interface IMdmLinkDao<P extends IResourcePersistentId<?>, M extends IMdmLink<P>> {
int deleteWithAnyReferenceToPid(P thePid);
int deleteWithAnyReferenceToPidAndMatchResultNot(P thePid, MdmMatchResultEnum theMatchResult);

View File

@ -36,6 +36,7 @@ import ca.uhn.fhir.mdm.model.MdmTransactionContext;
import ca.uhn.fhir.mdm.model.mdmevents.MdmLinkJson;
import ca.uhn.fhir.mdm.util.GoldenResourceHelper;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import ca.uhn.fhir.util.TerserUtil;
@ -108,13 +109,13 @@ public class MdmSurvivorshipSvcImpl implements IMdmSurvivorshipService {
@SuppressWarnings({"rawtypes", "unchecked"})
@Override
public <T extends IBase> T rebuildGoldenResourceWithSurvivorshipRules(
T theGoldenResourceBase, MdmTransactionContext theMdmTransactionContext) {
RequestDetails theRequestDetails, T theGoldenResourceBase, MdmTransactionContext theMdmTransactionContext) {
IBaseResource goldenResource = (IBaseResource) theGoldenResourceBase;
// we want a list of source ids linked to this
// golden resource id; sorted and filtered for only MATCH results
Stream<IBaseResource> sourceResources =
getMatchedSourceIdsByLinkUpdateDate(goldenResource, theMdmTransactionContext);
getMatchedSourceIdsByLinkUpdateDate(theRequestDetails, goldenResource, theMdmTransactionContext);
IBaseResource toSave = myGoldenResourceHelper.createGoldenResourceFromMdmSourceResource(
(IAnyResource) goldenResource,
@ -145,7 +146,9 @@ public class MdmSurvivorshipSvcImpl implements IMdmSurvivorshipService {
@SuppressWarnings("rawtypes")
private Stream<IBaseResource> getMatchedSourceIdsByLinkUpdateDate(
IBaseResource theGoldenResource, MdmTransactionContext theMdmTransactionContext) {
RequestDetails theRequestDetails,
IBaseResource theGoldenResource,
MdmTransactionContext theMdmTransactionContext) {
String resourceType = theGoldenResource.fhirType();
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(resourceType);

View File

@ -255,6 +255,8 @@ public class HapiMigrator {
}
public void addTask(BaseTask theTask) {
// Don't add a check for unit test mode here - We call this from
// tests which expect tasks to always be added
myTaskList.add(theTask);
}

View File

@ -0,0 +1,49 @@
/*-
* #%L
* HAPI FHIR Server - SQL Migration
* %%
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.migrate.util;
import jakarta.annotation.Nonnull;
import org.apache.commons.lang3.StringUtils;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
public class SqlUtil {
/**
* Non instantiable
*/
private SqlUtil() {
// nothing
}
@Nonnull
public static List<String> splitSqlFileIntoStatements(String theSql) {
String sqlWithoutComments = Arrays.stream(theSql.split("\n"))
.filter(t -> !t.trim().startsWith("--"))
.collect(Collectors.joining("\n"));
return Arrays.stream(sqlWithoutComments.split(";"))
.filter(StringUtils::isNotBlank)
.map(StringUtils::trim)
.collect(Collectors.toList());
}
}

View File

@ -0,0 +1,31 @@
package ca.uhn.fhir.jpa.migrate.util;
import org.junit.jupiter.api.Test;
import java.util.List;
import static org.junit.jupiter.api.Assertions.*;
class SqlUtilTest {
@Test
public void testSplit() {
String input = """
select
*
-- COMMENT
FROM FOO;
-- Also a comment
;
select BLAH
""";
List<String> statements = SqlUtil.splitSqlFileIntoStatements(input);
assertEquals(2, statements.size());
assertEquals("select * FROM FOO", statements.get(0).replace("\n ", " "));
assertEquals("select BLAH", statements.get(1).replace("\n ", " "));
}
}

View File

@ -128,6 +128,13 @@ public class BulkDataExportProvider {
@Autowired
private IRequestPartitionHelperSvc myRequestPartitionHelperService;
/**
* Constructor
*/
public BulkDataExportProvider() {
super();
}
/**
* $export
*/

View File

@ -49,9 +49,7 @@ public class DeleteCodeSystemVersionStep
throws JobExecutionFailedException {
CodeSystemVersionPIDResult versionPidResult = theStepExecutionDetails.getData();
long versionId = versionPidResult.getCodeSystemVersionPID();
myITermCodeSystemSvc.deleteCodeSystemVersion(versionId);
myITermCodeSystemSvc.deleteCodeSystemVersion(versionPidResult.getCodeSystemVersionPID());
theDataSink.accept(versionPidResult);
return RunOutcome.SUCCESS;

View File

@ -29,6 +29,7 @@ import ca.uhn.fhir.batch2.progress.InstanceProgress;
import ca.uhn.fhir.batch2.progress.JobInstanceProgressCalculator;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.model.api.IModelJson;
import ca.uhn.fhir.model.primitive.InstantDt;
import ca.uhn.fhir.util.JsonUtil;
import ca.uhn.fhir.util.Logs;
import org.apache.commons.lang3.Validate;
@ -66,9 +67,10 @@ public class ReductionStepDataSink<PT extends IModelJson, IT extends IModelJson,
boolean changed = myJobPersistence.updateInstance(instanceId, instance -> {
Validate.validState(
StatusEnum.FINALIZE.equals(instance.getStatus()),
"Job %s must be in FINALIZE state. In %s",
"Job %s must be in FINALIZE state. In %s with update time %s",
instanceId,
instance.getStatus());
instance.getStatus(),
new InstantDt(instance.getUpdateTime()));
if (instance.getReport() != null) {
// last in wins - so we won't throw

View File

@ -482,7 +482,7 @@ public class JobMaintenanceServiceImplTest extends BaseBatch2Test {
JobInstance instance = createInstance();
instance.setCurrentGatedStepId(STEP_2);
myLogCapture.setUp(Level.ERROR);
myLogCapture.setLoggerLevel(Level.ERROR);
// when
doAnswer(args -> {

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import ca.uhn.fhir.rest.api.server.storage.IResourceVersionPersistentId;
import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -38,19 +39,20 @@ import java.util.concurrent.atomic.AtomicInteger;
@Component
@Scope("prototype")
public class ExpungeOperation implements Callable<ExpungeOutcome> {
private static final Logger ourLog = LoggerFactory.getLogger(ExpungeService.class);
public class ExpungeOperation<T extends IResourcePersistentId<?>, V extends IResourceVersionPersistentId>
implements Callable<ExpungeOutcome> {
private static final Logger ourLog = LoggerFactory.getLogger(ExpungeOperation.class);
public static final String PROCESS_NAME = "Expunging";
public static final String THREAD_PREFIX = "expunge";
@Autowired
private IResourceExpungeService myResourceExpungeService;
private IResourceExpungeService<T, V> myResourceExpungeService;
@Autowired
private JpaStorageSettings myStorageSettings;
private final String myResourceName;
private final IResourcePersistentId myResourceId;
private final T myResourceId;
private final ExpungeOptions myExpungeOptions;
private final RequestDetails myRequestDetails;
private final AtomicInteger myRemainingCount;
@ -60,7 +62,7 @@ public class ExpungeOperation implements Callable<ExpungeOutcome> {
public ExpungeOperation(
String theResourceName,
IResourcePersistentId theResourceId,
T theResourceId,
ExpungeOptions theExpungeOptions,
RequestDetails theRequestDetails) {
myResourceName = theResourceName;
@ -91,7 +93,7 @@ public class ExpungeOperation implements Callable<ExpungeOutcome> {
}
private void expungeDeletedResources() {
List<IResourcePersistentId> resourceIds = findHistoricalVersionsOfDeletedResources();
List<T> resourceIds = findHistoricalVersionsOfDeletedResources();
deleteHistoricalVersions(resourceIds);
if (expungeLimitReached()) {
@ -101,8 +103,8 @@ public class ExpungeOperation implements Callable<ExpungeOutcome> {
deleteCurrentVersionsOfDeletedResources(resourceIds);
}
private List<IResourcePersistentId> findHistoricalVersionsOfDeletedResources() {
List<IResourcePersistentId> retVal = getPartitionAwareSupplier()
private List<T> findHistoricalVersionsOfDeletedResources() {
List<T> retVal = getPartitionAwareSupplier()
.supplyInPartitionedContext(() -> myResourceExpungeService.findHistoricalVersionsOfDeletedResources(
myResourceName, myResourceId, myRemainingCount.get()));
@ -119,7 +121,7 @@ public class ExpungeOperation implements Callable<ExpungeOutcome> {
}
private void expungeOldVersions() {
List<IResourcePersistentId> historicalIds = getPartitionAwareSupplier()
List<V> historicalIds = getPartitionAwareSupplier()
.supplyInPartitionedContext(() -> myResourceExpungeService.findHistoricalVersionsOfNonDeletedResources(
myResourceName, myResourceId, myRemainingCount.get()));
@ -144,7 +146,7 @@ public class ExpungeOperation implements Callable<ExpungeOutcome> {
myRequestDetails);
}
private void deleteCurrentVersionsOfDeletedResources(List<IResourcePersistentId> theResourceIds) {
private void deleteCurrentVersionsOfDeletedResources(List<T> theResourceIds) {
getPartitionRunner()
.runInPartitionedThreads(
theResourceIds,
@ -152,7 +154,7 @@ public class ExpungeOperation implements Callable<ExpungeOutcome> {
myRequestDetails, partition, myRemainingCount));
}
private void deleteHistoricalVersions(List<IResourcePersistentId> theResourceIds) {
private void deleteHistoricalVersions(List<T> theResourceIds) {
getPartitionRunner()
.runInPartitionedThreads(
theResourceIds,
@ -174,6 +176,7 @@ public class ExpungeOperation implements Callable<ExpungeOutcome> {
myStorageSettings = theStorageSettings;
}
@SuppressWarnings({"unchecked", "rawtypes"})
@VisibleForTesting
public void setExpungeDaoServiceForTesting(IResourceExpungeService theIResourceExpungeService) {
myResourceExpungeService = theIResourceExpungeService;

View File

@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.dao.expunge;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import jakarta.annotation.Nonnull;
import org.apache.commons.lang3.Validate;
import java.util.function.Supplier;
@ -33,7 +34,6 @@ public class PartitionAwareSupplier {
private final HapiTransactionService myTransactionService;
private final RequestDetails myRequestDetails;
@Nonnull
public PartitionAwareSupplier(HapiTransactionService theTxService, RequestDetails theRequestDetails) {
myTransactionService = theTxService;
myRequestDetails = theRequestDetails;
@ -41,6 +41,9 @@ public class PartitionAwareSupplier {
@Nonnull
public <T> T supplyInPartitionedContext(Supplier<T> theResourcePersistentIdSupplier) {
return myTransactionService.withRequest(myRequestDetails).execute(tx -> theResourcePersistentIdSupplier.get());
T retVal =
myTransactionService.withRequest(myRequestDetails).execute(tx -> theResourcePersistentIdSupplier.get());
Validate.notNull(retVal, "No resource persistent id supplied by supplier %s", theResourcePersistentIdSupplier);
return retVal;
}
}

View File

@ -22,7 +22,6 @@ package ca.uhn.fhir.jpa.dao.expunge;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.util.StopWatch;
@ -83,8 +82,7 @@ public class PartitionRunner {
myRequestDetails = theRequestDetails;
}
public void runInPartitionedThreads(
List<IResourcePersistentId> theResourceIds, Consumer<List<IResourcePersistentId>> partitionConsumer) {
public <T> void runInPartitionedThreads(List<T> theResourceIds, Consumer<List<T>> partitionConsumer) {
List<Callable<Void>> runnableTasks = buildCallableTasks(theResourceIds, partitionConsumer);
if (runnableTasks.size() == 0) {
@ -134,8 +132,7 @@ public class PartitionRunner {
}
}
private List<Callable<Void>> buildCallableTasks(
List<IResourcePersistentId> theResourceIds, Consumer<List<IResourcePersistentId>> partitionConsumer) {
private <T> List<Callable<Void>> buildCallableTasks(List<T> theResourceIds, Consumer<List<T>> partitionConsumer) {
List<Callable<Void>> retval = new ArrayList<>();
if (myBatchSize > theResourceIds.size()) {
@ -143,10 +140,10 @@ public class PartitionRunner {
} else {
ourLog.info("Creating batch job of {} entries", theResourceIds.size());
}
List<List<IResourcePersistentId>> partitions = Lists.partition(theResourceIds, myBatchSize);
List<List<T>> partitions = Lists.partition(theResourceIds, myBatchSize);
for (List<IResourcePersistentId> nextPartition : partitions) {
if (nextPartition.size() > 0) {
for (List<T> nextPartition : partitions) {
if (!nextPartition.isEmpty()) {
Callable<Void> callableTask = () -> {
ourLog.info(myProcessName + " {} resources", nextPartition.size());
partitionConsumer.accept(nextPartition);

View File

@ -41,6 +41,7 @@ import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
@ -58,7 +59,7 @@ public abstract class BaseRequestPartitionHelperSvc implements IRequestPartition
private IInterceptorBroadcaster myInterceptorBroadcaster;
@Autowired
private PartitionSettings myPartitionSettings;
PartitionSettings myPartitionSettings;
public BaseRequestPartitionHelperSvc() {
myNonPartitionableResourceNames = new HashSet<>();
@ -322,6 +323,19 @@ public abstract class BaseRequestPartitionHelperSvc implements IRequestPartition
validateHasPartitionPermissions(theRequest, theResourceType, retVal);
}
// Replace null partition ID with non-null default partition ID if one is being used
if (myPartitionSettings.getDefaultPartitionId() != null
&& retVal.hasPartitionIds()
&& retVal.hasDefaultPartitionId()) {
List<Integer> partitionIds = new ArrayList<>(retVal.getPartitionIds());
for (int i = 0; i < partitionIds.size(); i++) {
if (partitionIds.get(i) == null) {
partitionIds.set(i, myPartitionSettings.getDefaultPartitionId());
}
}
retVal = RequestPartitionId.fromPartitionIds(partitionIds);
}
return retVal;
}

View File

@ -187,7 +187,6 @@
<plugin>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-tinder-plugin</artifactId>
<version>${project.version}</version>
<executions>
<execution>
<id>generate</id>

View File

@ -77,6 +77,13 @@ public class LogbackTestExtension implements BeforeEachCallback, AfterEachCallba
this((Logger) theLogger);
}
/**
* Sets the root logger to the given level
*/
public LogbackTestExtension(Level theLevel) {
this(org.slf4j.Logger.ROOT_LOGGER_NAME, theLevel);
}
/**
* Returns a copy to avoid concurrent modification errors.
* @return A copy of the log events so far.
@ -95,28 +102,49 @@ public class LogbackTestExtension implements BeforeEachCallback, AfterEachCallba
@Override
public void beforeEach(ExtensionContext context) throws Exception {
setUp();
}
public void setUp() {
setUp(myLevel);
}
public void setUp(Level theLevel) {
assert myListAppender == null;
myListAppender = new ListAppender<>();
myListAppender.start();
myLogger.addAppender(myListAppender);
mySavedLevel = myLogger.getLevel();
setLoggerLevel(myLevel);
}
/**
* Temporarily set the logger level - It will be reset after the current test method is done
*/
public void setLoggerLevel(Level theLevel) {
if (theLevel != null) {
mySavedLevel = myLogger.getLevel();
myLogger.setLevel(theLevel);
}
}
/**
* @deprecated Use {@link #setLoggerLevel(Level)} instead
*/
@Deprecated
public void setUp(Level theLevel) {
setLoggerLevel(theLevel);
}
/**
* @deprecated This class should be registered as a junit5 extension, and will be set
* up automatically.
*/
@Deprecated
public void setUp() {
// nothing
}
@Override
public void afterEach(ExtensionContext context) throws Exception {
myLogger.detachAppender(myListAppender);
myListAppender.stop();
if (myLevel != null) {
if (myListAppender != null) {
myLogger.detachAppender(myListAppender);
myListAppender.stop();
myListAppender = null;
}
if (mySavedLevel != null) {
myLogger.setLevel(mySavedLevel);
}
}
@ -130,4 +158,16 @@ public class LogbackTestExtension implements BeforeEachCallback, AfterEachCallba
return getLogEvents().stream().map(ILoggingEvent::getMessage).toList();
}
public void reRegister() throws Exception {
afterEach(null);
beforeEach(null);
}
/**
* Predicate for passing to {@link #getLogEvents(Predicate)}
*/
public static Predicate<ILoggingEvent> atLeastLevel(Level theLevel) {
return e -> e.getLevel().isGreaterOrEqual(theLevel);
}
}

View File

@ -33,22 +33,20 @@ import org.junit.jupiter.api.extension.ExtensionContext;
public class StaticLogbackTestExtension implements BeforeAllCallback, AfterAllCallback {
private final LogbackTestExtension myLogbackTestExtension;
public StaticLogbackTestExtension(LogbackTestExtension theLogbackTestExtension) {
myLogbackTestExtension = theLogbackTestExtension;
}
public StaticLogbackTestExtension() {
myLogbackTestExtension = new LogbackTestExtension();
}
public static StaticLogbackTestExtension withThreshold(Level theLevel) {
LogbackTestExtension logbackTestExtension = new LogbackTestExtension();
logbackTestExtension.setUp(theLevel);
ThresholdFilter thresholdFilter = new ThresholdFilter();
thresholdFilter.setLevel(theLevel.levelStr);
logbackTestExtension.getAppender().addFilter(thresholdFilter);
public StaticLogbackTestExtension(String theLoggerName, Level theLevel) {
myLogbackTestExtension = new LogbackTestExtension(theLoggerName, theLevel);
}
return new StaticLogbackTestExtension(logbackTestExtension);
public StaticLogbackTestExtension(Level theLevel) {
myLogbackTestExtension = new LogbackTestExtension(theLevel);
}
private StaticLogbackTestExtension(LogbackTestExtension theLogbackTestExtension) {
myLogbackTestExtension = theLogbackTestExtension;
}
@Override
@ -65,4 +63,8 @@ public class StaticLogbackTestExtension implements BeforeAllCallback, AfterAllCa
return myLogbackTestExtension;
}
public static StaticLogbackTestExtension withThreshold(Level theLevel) {
return new StaticLogbackTestExtension(theLevel);
}
}

View File

@ -130,7 +130,7 @@ public class TinderJpaRestServerMojo extends AbstractMojo {
baseResourceNames.removeAll(excludeResourceNames);
}
ourLog.info("Including the following resources: {}", baseResourceNames);
ourLog.debug("Including the following resources: {}", baseResourceNames);
File configPackageDirectoryBase =
new File(targetDirectory, configPackageBase.replace(".", File.separatorChar + ""));

View File

@ -601,7 +601,7 @@ public abstract class BaseStructureParser {
}
if (!actuallyWrite) {
ourLog.info("Skipping writing already up-to-date file: {}", theFile.getAbsolutePath());
ourLog.debug("Skipping writing already up-to-date file: {}", theFile.getAbsolutePath());
return;
}

View File

@ -98,7 +98,6 @@
<plugin>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-tinder-plugin</artifactId>
<version>${project.version}</version>
<dependencies>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>

View File

@ -6,7 +6,7 @@ import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertNotNull;
public class TestParticulars {
public class ParticularsTest {
@Test
public void testElementsWithSpecialNames() {

View File

@ -2398,6 +2398,11 @@
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-tinder-plugin</artifactId>
<version>${project.version}</version>
</plugin>
<plugin>
<groupId>com.diffplug.spotless</groupId>
<artifactId>spotless-maven-plugin</artifactId>