Merge remote-tracking branch 'origin/master' into 6008-reindex-across-multiple-partitions

This commit is contained in:
Martha 2024-06-27 15:13:09 -07:00
commit 8a4e5ca673
359 changed files with 10640 additions and 3097 deletions

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>7.3.7-SNAPSHOT</version>
<version>7.3.8-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version>
<version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version>
<version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -58,6 +58,7 @@ public class RuntimeSearchParam {
private final Map<String, String> myUpliftRefchains = new HashMap<>();
private final ComboSearchParamType myComboSearchParamType;
private final List<Component> myComponents;
private final IIdType myIdUnqualifiedVersionless;
private IPhoneticEncoder myPhoneticEncoder;
/**
@ -127,6 +128,7 @@ public class RuntimeSearchParam {
super();
myId = theId;
myIdUnqualifiedVersionless = theId != null ? theId.toUnqualifiedVersionless() : null;
myUri = theUri;
myName = theName;
myDescription = theDescription;
@ -214,6 +216,10 @@ public class RuntimeSearchParam {
return myId;
}
public IIdType getIdUnqualifiedVersionless() {
return myIdUnqualifiedVersionless;
}
public String getUri() {
return myUri;
}

View File

@ -24,6 +24,8 @@ import jakarta.annotation.Nullable;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IIdType;
import java.util.List;
public interface IFhirPathEvaluationContext {
/**
@ -36,4 +38,15 @@ public interface IFhirPathEvaluationContext {
default IBase resolveReference(@Nonnull IIdType theReference, @Nullable IBase theContext) {
return null;
}
/**
*
* @param appContext
* @param name The name of the constant(s) to be resolved
* @param beforeContext
* @return
*/
default List<IBase> resolveConstant(Object appContext, String name, boolean beforeContext) {
return null;
}
}

View File

@ -321,9 +321,12 @@ public enum Pointcut implements IPointcut {
* This hook is invoked before an incoming request is processed. Note that this method is called
* after the server has begun preparing the response to the incoming client request.
* As such, it is not able to supply a response to the incoming request in the way that
* SERVER_INCOMING_REQUEST_PRE_PROCESSED and
* {@link #SERVER_INCOMING_REQUEST_POST_PROCESSED}
* are.
* SERVER_INCOMING_REQUEST_PRE_PROCESSED and {@link #SERVER_INCOMING_REQUEST_POST_PROCESSED} are.
* At this point the request has already been passed to the handler so any changes
* (e.g. adding parameters) will not be considered.
* If you'd like to modify request parameters before they are passed to the handler,
* use {@link Pointcut#SERVER_INCOMING_REQUEST_PRE_HANDLER_SELECTED} or {@link Pointcut#SERVER_INCOMING_REQUEST_POST_PROCESSED}.
* If you are attempting to modify a search before it occurs, use {@link Pointcut#STORAGE_PRESEARCH_REGISTERED}.
* <p>
* Hooks may accept the following parameters:
* <ul>
@ -902,7 +905,6 @@ public enum Pointcut implements IPointcut {
* canonical subscription such as adding headers, modifying the channel
* endpoint, etc.
* Furthermore, you may modify the outgoing message wrapper, for example adding headers via ResourceModifiedJsonMessage field.
*
* </p>
* Hooks may accept the following parameters:
* <ul>
@ -1122,7 +1124,6 @@ public enum Pointcut implements IPointcut {
* <b>Storage Hook:</b>
* Invoked when a Bulk Export job is being kicked off. Hook methods may modify
* the request, or raise an exception to prevent it from being initiated.
*
* This hook is not guaranteed to be called before permission checks, and so
* anu implementers should be cautious of changing the options in ways that would
* affect permissions.
@ -1192,7 +1193,7 @@ public enum Pointcut implements IPointcut {
/**
* <b>Storage Hook:</b>
* Invoked when a set of resources are about to be deleted and expunged via url like http://localhost/Patient?active=false&_expunge=true
* Invoked when a set of resources are about to be deleted and expunged via url like {@code http://localhost/Patient?active=false&_expunge=true}.
* <p>
* Hooks may accept the following parameters:
* </p>
@ -1228,7 +1229,7 @@ public enum Pointcut implements IPointcut {
/**
* <b>Storage Hook:</b>
* Invoked when a batch of resource pids are about to be deleted and expunged via url like http://localhost/Patient?active=false&_expunge=true
* Invoked when a batch of resource pids are about to be deleted and expunged via url like {@code http://localhost/Patient?active=false&_expunge=true}.
* <p>
* Hooks may accept the following parameters:
* </p>
@ -1575,11 +1576,11 @@ public enum Pointcut implements IPointcut {
/**
* <b>Storage Hook:</b>
* Invoked before a resource will be created, immediately before the resource
* is persisted to the database.
* Invoked before a resource will be deleted, immediately before the resource
* is removed from the database.
* <p>
* Hooks will have access to the contents of the resource being created
* and may choose to make modifications to it. These changes will be
* Hooks will have access to the contents of the resource being deleted
* and may choose to make modifications related to it. These changes will be
* reflected in permanent storage.
* </p>
* Hooks may accept the following parameters:
@ -2957,7 +2958,6 @@ public enum Pointcut implements IPointcut {
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails",
"ca.uhn.fhir.jpa.util.SqlQueryList"),
@Deprecated(since = "7.2.0 - Use STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX instead.")
/**
* <b> Deprecated but still supported. Will eventually be removed. <code>Please use Pointcut.STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX</code> </b>
* <b> Binary Blob Prefix Assigning Hook:</b>
@ -2980,6 +2980,7 @@ public enum Pointcut implements IPointcut {
* Hooks should return <code>String</code>, which represents the full prefix to be applied to the blob.
* </p>
*/
@Deprecated(since = "7.2.0 - Use STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX instead.")
STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX(
String.class,
"ca.uhn.fhir.rest.api.server.RequestDetails",

View File

@ -31,6 +31,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;
import java.util.List;
@ -80,6 +81,15 @@ public abstract class ResourceMetadataKeyEnum<T> implements Serializable {
*/
public static final ResourceMetadataKeyEnum<BundleEntrySearchModeEnum> ENTRY_SEARCH_MODE =
new ResourceMetadataKeyEnum<>("ENTRY_SEARCH_MODE", BundleEntrySearchModeEnum.class) {};
/**
* If present and populated with a decimal value, contains the "bundle entry search score", which is the value of the status field in the Bundle entry containing this resource.
* The value for this key corresponds to field <code>Bundle.entry.search.score</code>. This value represents the search ranking score, where 1.0 is relevant and 0.0 is irrelevant.
* <p>
* Note that status is only used in FHIR DSTU2 and later.
* </p>
*/
public static final ResourceMetadataKeyEnum<BigDecimal> ENTRY_SEARCH_SCORE =
new ResourceMetadataKeyEnum<>("ENTRY_SEARCH_SCORE", BigDecimal.class) {};
/**
* If present and populated with a {@link BundleEntryTransactionMethodEnum}, contains the "bundle entry transaction operation", which is the value of the status field in the Bundle entry
* containing this resource. The value for this key corresponds to field <code>Bundle.entry.transaction.operation</code>. This value can be set in resources being transmitted to a server to

View File

@ -21,15 +21,22 @@ package ca.uhn.fhir.rest.param;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.model.primitive.UriDt;
import ca.uhn.fhir.rest.api.Constants;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.commons.lang3.StringUtils.defaultString;
public class SpecialParam extends BaseParam /*implements IQueryParameterType*/ {
private static final Logger ourLog = LoggerFactory.getLogger(StringParam.class);
private String myValue;
private boolean myContains;
/**
* Constructor
@ -40,7 +47,11 @@ public class SpecialParam extends BaseParam /*implements IQueryParameterType*/ {
@Override
String doGetQueryParameterQualifier() {
return null;
if (myContains) {
return Constants.PARAMQUALIFIER_STRING_CONTAINS;
} else {
return null;
}
}
/**
@ -56,6 +67,15 @@ public class SpecialParam extends BaseParam /*implements IQueryParameterType*/ {
*/
@Override
void doSetValueAsQueryToken(FhirContext theContext, String theParamName, String theQualifier, String theParameter) {
if (Constants.PARAMQUALIFIER_STRING_CONTAINS.equals(theQualifier)) {
if (theParamName.equalsIgnoreCase(Constants.PARAM_TEXT)
|| theParamName.equalsIgnoreCase(Constants.PARAM_CONTENT)) {
setContains(true);
} else {
ourLog.debug(
"Attempted to set the :contains modifier on a special search parameter that was not `_text` or `_content`. This is not supported.");
}
}
setValue(ParameterUtil.unescape(theParameter));
}
@ -93,4 +113,52 @@ public class SpecialParam extends BaseParam /*implements IQueryParameterType*/ {
private static String toSystemValue(UriDt theSystem) {
return theSystem.getValueAsString();
}
/**
* Special parameter modifier <code>:contains</code> for _text and _content
*/
public boolean isContains() {
return myContains;
}
/**
* Special parameter modifier <code>:contains</code> for _text and _content
*/
public SpecialParam setContains(boolean theContains) {
myContains = theContains;
if (myContains) {
setMissing(null);
}
return this;
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(isContains())
.append(getValue())
.append(getMissing())
.toHashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof SpecialParam)) {
return false;
}
SpecialParam other = (SpecialParam) obj;
EqualsBuilder eb = new EqualsBuilder();
eb.append(myContains, other.myContains);
eb.append(myValue, other.myValue);
eb.append(getMissing(), other.getMissing());
return eb.isEquals();
}
}

View File

@ -50,6 +50,7 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashSet;
@ -578,42 +579,53 @@ public class BundleUtil {
BaseRuntimeElementCompositeDefinition<?> searchChildContentsDef =
(BaseRuntimeElementCompositeDefinition<?>) searchChildDef.getChildByName("search");
BaseRuntimeChildDefinition searchModeChildDef = searchChildContentsDef.getChildByName("mode");
BaseRuntimeChildDefinition searchScoreChildDef = searchChildContentsDef.getChildByName("score");
List<SearchBundleEntryParts> retVal = new ArrayList<>();
for (IBase nextEntry : entries) {
SearchBundleEntryParts parts = getSearchBundleEntryParts(
fullUrlChildDef, resourceChildDef, searchChildDef, searchModeChildDef, nextEntry);
fullUrlChildDef,
resourceChildDef,
searchChildDef,
searchModeChildDef,
searchScoreChildDef,
nextEntry);
retVal.add(parts);
}
return retVal;
}
private static SearchBundleEntryParts getSearchBundleEntryParts(
BaseRuntimeChildDefinition fullUrlChildDef,
BaseRuntimeChildDefinition resourceChildDef,
BaseRuntimeChildDefinition searchChildDef,
BaseRuntimeChildDefinition searchModeChildDef,
BaseRuntimeChildDefinition theFullUrlChildDef,
BaseRuntimeChildDefinition theResourceChildDef,
BaseRuntimeChildDefinition theSearchChildDef,
BaseRuntimeChildDefinition theSearchModeChildDef,
BaseRuntimeChildDefinition theSearchScoreChildDef,
IBase entry) {
IBaseResource resource = null;
String matchMode = null;
BigDecimal searchScore = null;
String fullUrl = fullUrlChildDef
String fullUrl = theFullUrlChildDef
.getAccessor()
.getFirstValueOrNull(entry)
.map(t -> ((IPrimitiveType<?>) t).getValueAsString())
.orElse(null);
for (IBase nextResource : resourceChildDef.getAccessor().getValues(entry)) {
for (IBase nextResource : theResourceChildDef.getAccessor().getValues(entry)) {
resource = (IBaseResource) nextResource;
}
for (IBase nextSearch : searchChildDef.getAccessor().getValues(entry)) {
for (IBase nextUrl : searchModeChildDef.getAccessor().getValues(nextSearch)) {
for (IBase nextSearch : theSearchChildDef.getAccessor().getValues(entry)) {
for (IBase nextUrl : theSearchModeChildDef.getAccessor().getValues(nextSearch)) {
matchMode = ((IPrimitiveType<?>) nextUrl).getValueAsString();
}
for (IBase nextUrl : theSearchScoreChildDef.getAccessor().getValues(nextSearch)) {
searchScore = (BigDecimal) ((IPrimitiveType<?>) nextUrl).getValue();
}
}
return new SearchBundleEntryParts(fullUrl, resource, matchMode);
return new SearchBundleEntryParts(fullUrl, resource, matchMode, searchScore);
}
/**

View File

@ -22,15 +22,20 @@ package ca.uhn.fhir.util.bundle;
import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum;
import org.hl7.fhir.instance.model.api.IBaseResource;
import java.math.BigDecimal;
public class SearchBundleEntryParts {
private final IBaseResource myResource;
private final BundleEntrySearchModeEnum mySearchMode;
private final BigDecimal mySearchScore;
private final String myFullUrl;
public SearchBundleEntryParts(String theFullUrl, IBaseResource theResource, String theSearchMode) {
public SearchBundleEntryParts(
String theFullUrl, IBaseResource theResource, String theSearchMode, BigDecimal theSearchScore) {
myFullUrl = theFullUrl;
myResource = theResource;
mySearchMode = BundleEntrySearchModeEnum.forCode(theSearchMode);
mySearchScore = theSearchScore;
}
public String getFullUrl() {
@ -44,4 +49,8 @@ public class SearchBundleEntryParts {
public BundleEntrySearchModeEnum getSearchMode() {
return mySearchMode;
}
public BigDecimal getSearchScore() {
return mySearchScore;
}
}

View File

@ -0,0 +1,67 @@
package ca.uhn.fhir.rest.param;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.api.Constants;
import ch.qos.logback.classic.Logger;
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.core.read.ListAppender;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.slf4j.LoggerFactory;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
@ExtendWith(MockitoExtension.class)
public class SpecialParamTest {
private static final Logger ourLog = (Logger) LoggerFactory.getLogger(StringParam.class);
private ListAppender<ILoggingEvent> myListAppender = new ListAppender<>();
@Mock
private FhirContext myContext;
@BeforeEach
public void beforeEach(){
myListAppender = new ListAppender<>();
myListAppender.start();
ourLog.addAppender(myListAppender);
}
@AfterEach
public void afterEach(){
myListAppender.stop();
}
@Test
public void testEquals() {
SpecialParam specialParam = new SpecialParam();
specialParam.setValueAsQueryToken(myContext, Constants.PARAM_TEXT, Constants.PARAMQUALIFIER_STRING_CONTAINS, "my-test-value");
SpecialParam specialParam2 = new SpecialParam();
specialParam2.setValueAsQueryToken(myContext, Constants.PARAM_TEXT, Constants.PARAMQUALIFIER_STRING_CONTAINS, "my-test-value");
assertThat(specialParam).isEqualTo(specialParam2);
}
@Test
public void testContainsOnlyWorksForSpecificParams() {
SpecialParam specialParamText = new SpecialParam();
specialParamText.setValueAsQueryToken(myContext, Constants.PARAM_TEXT, Constants.PARAMQUALIFIER_STRING_CONTAINS, "my-test-value");
assertTrue(specialParamText.isContains());
SpecialParam specialParamContent = new SpecialParam();
specialParamContent.setValueAsQueryToken(myContext, Constants.PARAM_CONTENT, Constants.PARAMQUALIFIER_STRING_CONTAINS, "my-test-value");
assertTrue(specialParamContent.isContains());
SpecialParam nonTextSpecialParam = new SpecialParam();
nonTextSpecialParam.setValueAsQueryToken(myContext, "name", Constants.PARAMQUALIFIER_STRING_CONTAINS, "my-test-value");
assertFalse(nonTextSpecialParam.isContains());
}
}

View File

@ -23,10 +23,7 @@ import java.util.stream.Collectors;
import static ca.uhn.fhir.rest.api.Constants.PARAMQUALIFIER_STRING_TEXT;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.*;
@ExtendWith(MockitoExtension.class)
public class StringParamTest {

View File

@ -4,7 +4,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-bom</artifactId>
<version>7.3.7-SNAPSHOT</version>
<version>7.3.8-SNAPSHOT</version>
<packaging>pom</packaging>
<name>HAPI FHIR BOM</name>
@ -12,7 +12,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version>
<version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>7.3.7-SNAPSHOT</version>
<version>7.3.8-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -23,9 +23,6 @@
<suppress files="BaseMigrationTasks\.java" checks="AbstractClassName"/>
<suppress files="BaseLoincTop2000LabResultsHandler\.java" checks="AbstractClassName"/>
<!-- TODO GGG Checkstyle MDM tests can currently use hamcrest until we fix em -->
<suppress files=".*/mdm/.*" checks="RegexpSinglelineJava" id="NoHamcrestAssert"/>
<!-- Missing "Base" prefix suppressions -->
<suppress files="ResourceMetadataKeyEnum\.java" checks="AbstractClassName"/>
<suppress files="RequestDetails\.java" checks="AbstractClassName"/>

View File

@ -35,8 +35,8 @@
</module>
<module name="RegexpSinglelineJava">
<property name="id" value="NoHamcrestAssert"/>
<property name="format" value="org.hamcrest.MatcherAssert.assertThat"/>
<property name="message" value="Incorrect assertThat import used: The &quot;org.assertj.core.api.Assertions.assertThat&quot; import should be used for tests"/>
<property name="format" value="org.hamcrest."/>
<property name="message" value="Incorrect matcher import used: The &quot;org.assertj.core.api.Assertions.assertThat&quot; import should be used for tests"/>
</module>
<module name="RegexpSinglelineJava">
<property name="format" value="org\.jetbrains\.annotations\.NotNull"/>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version>
<version>7.3.8-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -26,8 +26,6 @@ import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Arrays;
import java.util.List;
@ -40,12 +38,12 @@ import static org.apache.commons.lang3.StringUtils.defaultString;
* NB since 2019-12-05: This class is kind of weirdly named now, since it can either use Flyway or not use Flyway
*/
public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends BaseCommand {
private static final Logger ourLog = LoggerFactory.getLogger(BaseFlywayMigrateDatabaseCommand.class);
public static final String MIGRATE_DATABASE = "migrate-database";
public static final String NO_COLUMN_SHRINK = "no-column-shrink";
public static final String STRICT_ORDER = "strict-order";
public static final String SKIP_VERSIONS = "skip-versions";
public static final String ENABLE_HEAVYWEIGHT_MIGRATIONS = "enable-heavyweight-migrations";
private Set<String> myFlags;
private String myMigrationTableName;
@ -100,6 +98,12 @@ public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends B
SKIP_VERSIONS,
"Versions",
"A comma separated list of schema versions to skip. E.g. 4_1_0.20191214.2,4_1_0.20191214.4");
addOptionalOption(
retVal,
null,
ENABLE_HEAVYWEIGHT_MIGRATIONS,
false,
"If this flag is set, additional migration tasks will be executed that are considered unnecessary to execute on a database with a significant amount of data loaded. This option is not generally necessary.");
return retVal;
}
@ -125,6 +129,7 @@ public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends B
boolean dryRun = theCommandLine.hasOption("r");
boolean noColumnShrink = theCommandLine.hasOption(BaseFlywayMigrateDatabaseCommand.NO_COLUMN_SHRINK);
boolean runHeavyweight = theCommandLine.hasOption(ENABLE_HEAVYWEIGHT_MIGRATIONS);
String flags = theCommandLine.getOptionValue("x");
myFlags = Arrays.stream(defaultString(flags).split(","))
@ -139,6 +144,7 @@ public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends B
migrator.createMigrationTableIfRequired();
migrator.setDryRun(dryRun);
migrator.setRunHeavyweightSkippableTasks(runHeavyweight);
migrator.setNoColumnShrink(noColumnShrink);
String skipVersions = theCommandLine.getOptionValue(BaseFlywayMigrateDatabaseCommand.SKIP_VERSIONS);
addTasks(migrator, skipVersions);

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId>
<version>7.3.7-SNAPSHOT</version>
<version>7.3.8-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>7.3.7-SNAPSHOT</version>
<version>7.3.8-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version>
<version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version>
<version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version>
<version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>7.3.7-SNAPSHOT</version>
<version>7.3.8-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version>
<version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -0,0 +1,6 @@
---
type: perf
issue: 5885
title: "When unique and non-unique combo parameters are in use on a server, FHIR Transaction and Reindex Job
performance has been optimized by pre-fetching all existing combo index rows for a large batch of resources
in a single database operation. This should yield a meaningful performance improvement on such systems."

View File

@ -0,0 +1,6 @@
---
type: perf
issue: 5885
title: "Indexing for non-unique combo Search Parameters has been improved,
using a new hash-based index that should perform significantly better in
many circumstances."

View File

@ -0,0 +1,7 @@
---
type: perf
issue: 5885
title: "Indexing for unique combo Search Parameters has been modified so that a hash
value is now stored. This hash value is not yet used in searching or enforcing uniqueness,
but will be in the future in order to reduce the space required to store the indexes and the
current size limitation on unique indexes."

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 5926
title: "A number of columns in the JPA schema use primitive types (and therefore can never have a null value) but aren't marked as non-null."

View File

@ -0,0 +1,7 @@
---
type: fix
issue: 5926
title: "A regression in HAPI FHIR 6.4.0 meant that ther JPA server schema migrator ran all tasks
even when the database was initially empty and the schema was being initialized by script.
This did not produce any incorrect results, but did impact the amount of time taken to initialize
an empty database. This has been corrected."

View File

@ -0,0 +1,7 @@
---
type: perf
issue: 5937
title: "A new configuration option, `StorageSettings#setIndexStorageOptimized(boolean)` has been added. If enabled,
the server will not write data to the `SP_NAME`, `RES_TYPE`, `SP_UPDATED` columns for all `HFJ_SPIDX_xxx` tables.
This can help reduce the overall storage size on servers where HFJ_SPIDX tables are expected to have a large
amount of data."

View File

@ -0,0 +1,8 @@
---
type: fix
issue: 6000
title: "In an MDM enabled system with multi-delete enabled, deleting
both the final source resource and it's linked golden resource
at the same time results in an error being thrown.
This has been fixed.
"

View File

@ -0,0 +1,5 @@
---
type: add
issue: 6010
jira: SMILE-8214
title: "When populated, the search score field will now be included in the entries of a response Bundle."

View File

@ -0,0 +1,7 @@
---
type: add
issue: 6014
title: "When uploading an invalid CodeSystem to the JPA server containing
duplicate codes, the server responded with an unhelpful error message
referring to a database constraint error. This has been fixed so that
a more informative error message is returned."

View File

@ -0,0 +1,10 @@
---
type: fix
issue: 6024
title: "Fixed a bug in search where requesting a count with HSearch indexing
and FilterParameter enabled and using the _filter parameter would result
in inaccurate results being returned.
This happened because the count query would use an incorrect set of parameters
to find the count, and the regular search when then try and ensure its results
matched the count query (which it couldn't because it had different parameters).
"

View File

@ -0,0 +1,4 @@
---
type: add
issue: 6031
title: "Subscriptions now support the evaluation use of FhirPath criteria and the use of the variables %current and %previous. Thanks to Jens Villadsen (@jkiddo) for the contribution!"

View File

@ -0,0 +1,7 @@
---
type: fix
issue: 6033
jira: SMILE-8429
title: "Previously, attempting to store resources with common identifies but different partitions would.
This has been fixed by adding a new configuration key defaulting to false to allow storing resources with duplicate identifiers across partitions.
This new feature can be activated by calling PartitionSettings.setConditionalCreateDuplicateIdentifiersEnabled()"

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 6034
title: "Two indexes introduced in HAPI-FHIR 6.6.0, `IDX_SP_URI_HASH_IDENTITY_V2` and `IDX_SP_URI_HASH_URI_V2` were previously created as unique indexes. This has caused issues on SQL Server due to the way that a filtered index is created. The unique clause was not necessary to this index, and has been removed."

View File

@ -0,0 +1,8 @@
---
type: fix
issue: 6040
title: "The `meta.profile` element on resources was not being respected as
canonical (ie, allowing for a version to be appended, `http://example.com/StructureDefinition/abc|1.0.0`),
and was thus being ignored during validation.
This has been fixed.
"

View File

@ -0,0 +1,7 @@
---
type: fix
issue: 6044
title: "Fixed an issue where doing a cache refresh with advanced Hibernate Search
enabled would result in an infinite loop of cache refresh -> search for
StructureDefinition -> cache refresh, etc
"

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 6046
title: "Previously, using `_text` and `_content` searches in Hibernate Search in R5 was not supported. This issue has been fixed."

View File

@ -0,0 +1,5 @@
---
type: add
issue: 6046
title: "Added support for `:contains` parameter qualifier on the `_text` and `_content` Search Parameters. When using Hibernate Search, this will cause
the search to perform an substring match on the provided value. Documentation can be found [here](/hapi-fhir/docs/server_jpa/elastic.html#performing-fulltext-search-in-luceneelasticsearch)."

View File

@ -0,0 +1,6 @@
---
type: fix
issue: 6049
title: "Rolled back org.apache.derby dependency to one that
supports JREs 11 - 17.
"

View File

@ -0,0 +1,6 @@
---
type: fix
issue: 6049
title: "Fixed a bug in SchemaMigrator that would considered
skippable migration tasks as 'failed' migrations.
"

View File

@ -0,0 +1,25 @@
## Possible migration errors on SQL Server (MSSQL)
* This affects only clients running SQL Server (MSSQL) who have custom indexes on `HFJ_SPIDX` tables, which
include `sp_name` or `res_type` columns.
* For those clients, migration of `sp_name` and `res_type` columns to nullable on `HFJ_SPIDX` tables may be completed with errors, as changing a column to nullable when a column is a
part of an index can lead to errors on SQL Server (MSSQL).
* If client wants to use existing indexes and settings, these errors can be ignored. However, if client wants to enable both [Index Storage Optimized](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/entity/StorageSettings.html#setIndexStorageOptimized(boolean))
and [Index Missing Fields](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/entity/StorageSettings.html#getIndexMissingFields()) settings, manual steps are required to change `sp_name` and `res_type` nullability.
To update columns to nullable in such a scenario, execute steps below:
1. Indexes that include `sp_name` or `res_type` columns should be dropped:
```sql
DROP INDEX IDX_SP_TOKEN_REST_TYPE_SP_NAME ON HFJ_SPIDX_TOKEN;
```
2. The nullability of `sp_name` and `res_type` columns should be updated:
```sql
ALTER TABLE HFJ_SPIDX_TOKEN ALTER COLUMN RES_TYPE varchar(100) NULL;
ALTER TABLE HFJ_SPIDX_TOKEN ALTER COLUMN SP_NAME varchar(100) NULL;
```
3. Additionally, the following index may need to be added to improve the search performance:
```sql
CREATE INDEX IDX_SP_TOKEN_MISSING_OPTIMIZED ON HFJ_SPIDX_TOKEN (HASH_IDENTITY, SP_MISSING, RES_ID, PARTITION_ID);
```

View File

@ -1,3 +1,3 @@
---
release-date: "2024-08-18"
release-date: "2024-08-15"
codename: "Copernicus"

View File

@ -3,6 +3,55 @@
The HAPI JPA Server supports optional indexing via Hibernate Search when configured to use Lucene or Elasticsearch.
This is required to support the `_content`, or `_text` search parameters.
# Performing Fulltext Search in Lucene/Elasticsearch
When enabled, searches for `_text` and `_content` are forwarded to the underlying Hibernate Search engine, which can be backed by either Elasticsearch or Lucene.
By default, search is supported in the way indicated in the [FHIR Specification on _text/_content Search](https://www.hl7.org/fhir/search.html#_text). This means that
queries like the following can be evaluated:
```http request
GET [base]/Observation?_content=cancer OR metastases OR tumor
```
To understand how this works, look at the following example. During ingestion, the fields required for `_content` and `_text` searches are stored in the backing engine, after undergoing normalization and analysis. For example consider this Observation:
```json
{
"resourceType" : "Observation",
"code" : {
"coding" : [{
"system" : "http://loinc.org",
"code" : "15074-8",
"display" : "Glucose [Moles/volume] in Blood Found during patient's visit!"
}]
}
"valueQuantity" : {
"value" : 6.3,
"unit" : "mmol/l",
"system" : "http://unitsofmeasure.org",
"code" : "mmol/L"
}
}
```
In the display section, once parsed and analyzed, will result in the followings tokens being generated to be able to be searched on:
```json
["glucose", "mole", "volume", "blood", "found", "during", "patient", "visit"]
```
You will notice that plurality is removed, and the text has been normalized, and special characters removed. When searched for, the search terms will be normalized in the same fashion.
However, the default implementation will not allow you to search for an exact match over a long string that contains special characters or other characters which could be broken apart during tokenization. E.g. an exact match for `_content=[Moles/volume]` would not return this result.
In order to perform such an exact string match in Lucene/Elasticsearch, you should modify the `_text` or `_content` Search Parameter with the `:contains` modifier, as follows:
```http request
GET [base]/Observation?_content:contains=[Moles/volume]
```
Using `:contains` on the `_text` or `_content` modifies the search engine to perform a direct substring match anywhere within the field.
# Experimental Extended Lucene/Elasticsearch Indexing
Additional indexing is implemented for simple search parameters of type token, string, and reference.
@ -68,19 +117,19 @@ The `:text` modifier provides the same [modified Simple Query Syntax](#modified-
See https://www.hl7.org/fhir/search.html#token.
## Supported Common and Special Search Parameters
| Parameter | Supported | type |
|--------------|-----------|--------|
| _id | no | |
| _lastUpdated | yes | date |
| _tag | yes | token |
| _profile | yes | URI |
| _security | yes | token |
| _text | yes | string |
| _content | yes | string |
| _list | no | |
| _has | no | |
| _type | no | |
| _source | yes | URI |
| Parameter | Supported | type |
|--------------|-----------|------------------------|
| _id | no | |
| _lastUpdated | yes | date |
| _tag | yes | token |
| _profile | yes | URI |
| _security | yes | token |
| _text | yes | string(R4) special(R5) |
| _content | yes | string(R4) special(R5) |
| _list | no | |
| _has | no | |
| _type | no | |
| _source | yes | URI |
## ValueSet autocomplete extension

View File

@ -68,3 +68,19 @@ This setting controls whether non-resource (ex: Patient is a resource, MdmLink i
Clients may want to disable this setting for performance reasons as it populates a new set of database tables when enabled.
Setting this property explicitly to false disables the feature: [Non Resource DB History](/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.html#isNonResourceDbHistoryEnabled())
# Enabling Index Storage Optimization
If enabled, the server will not write data to the `SP_NAME`, `RES_TYPE`, `SP_UPDATED` columns for all `HFJ_SPIDX_xxx` tables.
This setting may be enabled on servers where `HFJ_SPIDX_xxx` tables are expected to have a large amount of data (millions of rows) in order to reduce overall storage size.
Setting this property explicitly to true enables the feature: [Index Storage Optimized](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/entity/StorageSettings.html#setIndexStorageOptimized(boolean))
## Limitations
* This setting only applies to newly inserted and updated rows in `HFJ_SPIDX_xxx` tables. All existing rows will still have values in `SP_NAME`, `RES_TYPE` and `SP_UPDATED` columns. Executing `$reindex` operation will apply storage optimization to existing data.
* If this setting is enabled along with [Index Missing Fields](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/entity/StorageSettings.html#getIndexMissingFields()) setting, the following index may need to be added into the `HFJ_SPIDX_xxx` tables to improve the search performance: `(HASH_IDENTITY, SP_MISSING, RES_ID, PARTITION_ID)`.
* This setting should not be enabled in combination with [Include Partition in Search Hashes](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/config/PartitionSettings.html#setIncludePartitionInSearchHashes(boolean)) flag, as in this case, Partition could not be included in Search Hashes.

View File

@ -443,7 +443,9 @@ In some configurations, the partition ID is also factored into the hashes.
<img src="/hapi-fhir/docs/images/jpa_erd_search_indexes.svg" alt="Search Indexes" style="width: 100%; max-width: 900px;"/>
## Columns
<a name="HFJ_SPIDX_common"/>
## Common Search Index Columns
The following columns are common to **all HFJ_SPIDX_xxx tables**.
@ -502,7 +504,7 @@ The following columns are common to **all HFJ_SPIDX_xxx tables**.
<td>SP_NAME</td>
<td></td>
<td>String</td>
<td></td>
<td>Nullable</td>
<td>
This is the name of the search parameter being indexed.
</td>
@ -511,7 +513,7 @@ The following columns are common to **all HFJ_SPIDX_xxx tables**.
<td>RES_TYPE</td>
<td></td>
<td>String</td>
<td></td>
<td>Nullable</td>
<td>
This is the name of the resource being indexed.
</td>
@ -556,6 +558,8 @@ Sorting is done by the SP_VALUE_LOW column.
## Columns
Note: This table has the columns listed below, but it also has all common columns listed above in [Common Search Index Columns](#HFJ_SPIDX_common).
<table class="table table-striped table-condensed">
<thead>
<tr>
@ -625,6 +629,8 @@ Range queries and sorting use the HASH_IDENTITY and SP_VALUE columns.
## Columns
Note: This table has the columns listed below, but it also has all common columns listed above in [Common Search Index Columns](#HFJ_SPIDX_common).
<table class="table table-striped table-condensed">
<thead>
<tr>
@ -660,6 +666,8 @@ Sorting is done via the HASH_IDENTITY and SP_VALUE columns.
## Columns
Note: This table has the columns listed below, but it also has all common columns listed above in [Common Search Index Columns](#HFJ_SPIDX_common).
<table class="table table-striped table-condensed">
<thead>
<tr>
@ -753,6 +761,8 @@ Sorting is done via the HASH_IDENTITY and SP_VALUE_NORMALIZED columns.
## Columns
Note: This table has the columns listed below, but it also has all common columns listed above in [Common Search Index Columns](#HFJ_SPIDX_common).
<table class="table table-striped table-condensed">
<thead>
<tr>
@ -806,6 +816,8 @@ Sorting is done via the HASH_IDENTITY and SP_VALUE columns.
## Columns
Note: This table has the columns listed below, but it also has all common columns listed above in [Common Search Index Columns](#HFJ_SPIDX_common).
<table class="table table-striped table-condensed">
<thead>
<tr>
@ -876,6 +888,8 @@ Sorting is done via the HASH_IDENTITY and SP_URI columns.
## Columns
Note: This table has the columns listed below, but it also has all common columns listed above in [Common Search Index Columns](#HFJ_SPIDX_common).
<table class="table table-striped table-condensed">
<thead>
<tr>
@ -908,3 +922,135 @@ Sorting is done via the HASH_IDENTITY and SP_URI columns.
</tbody>
</table>
# HFJ_IDX_CMB_TOK_NU: Combo Non-Unique Search Param
This table is used to index [Non-Unique Combo Search Parameters](https://smilecdr.com/docs/fhir_standard/fhir_search_custom_search_parameters.html#combo-search-index-parameters).
## Columns
<table class="table table-striped table-condensed">
<thead>
<tr>
<th>Name</th>
<th>Relationships</th>
<th>Datatype</th>
<th>Nullable</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td>PID</td>
<td></td>
<td>Long</td>
<td></td>
<td>
A unique persistent identifier for the given index row.
</td>
</tr>
<tr>
<td>RES_ID</td>
<td>FK to <a href="#HFJ_RESOURCE">HFJ_RESOURCE</a></td>
<td>Long</td>
<td></td>
<td>
Contains the PID of the resource being indexed.
</td>
</tr>
<tr>
<td>IDX_STRING</td>
<td></td>
<td>String</td>
<td></td>
<td>
This column contains a FHIR search expression indicating what is being indexed. For example, if a
non-unique combo search parameter is present which indexes a combination of Observation#code and
Observation#status, this column might contain a value such as
<code>Observation?code=http://loinc.org|1234-5&status=final</code>
</td>
</tr>
<tr>
<td>HASH_COMPLETE</td>
<td></td>
<td>Long</td>
<td></td>
<td>
This column contains a hash of the value in column <code>IDX_STRING</code>.
</td>
</tr>
</tbody>
</table>
<a name="HFJ_IDX_CMP_STRING_UNIQ"/>
# HFJ_IDX_CMP_STRING_UNIQ: Combo Unique Search Param
This table is used to index [Unique Combo Search Parameters](https://smilecdr.com/docs/fhir_standard/fhir_search_custom_search_parameters.html#combo-search-index-parameters).
## Columns
<table class="table table-striped table-condensed">
<thead>
<tr>
<th>Name</th>
<th>Relationships</th>
<th>Datatype</th>
<th>Nullable</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td>PID</td>
<td></td>
<td>Long</td>
<td></td>
<td>
A unique persistent identifier for the given index row.
</td>
</tr>
<tr>
<td>RES_ID</td>
<td>FK to <a href="#HFJ_RESOURCE">HFJ_RESOURCE</a></td>
<td>Long</td>
<td></td>
<td>
Contains the PID of the resource being indexed.
</td>
</tr>
<tr>
<td>IDX_STRING</td>
<td></td>
<td>String</td>
<td></td>
<td>
This column contains a FHIR search expression indicating what is being indexed. For example, if a
unique combo search parameter is present which indexes a combination of Observation#code and
Observation#status, this column might contain a value such as
<code>Observation?code=http://loinc.org|1234-5&status=final</code>
</td>
</tr>
<tr>
<td>HASH_COMPLETE</td>
<td></td>
<td>Long</td>
<td></td>
<td>
This column contains a hash of the value in column <code>IDX_STRING</code>.
</td>
</tr>
<tr>
<td>HASH_COMPLETE_2</td>
<td></td>
<td>Long</td>
<td></td>
<td>
This column contains an additional hash of the value in column <code>IDX_STRING</code>, using a
static salt of the value prior to the hashing. This is done in order to increase the number
of bits used to hash the index string from 64 to 128.
</td>
</tr>
</tbody>
</table>

View File

@ -6,6 +6,6 @@ The [PartitionSettings](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir
The following settings can be enabled:
* **Include Partition in Search Hashes** ([JavaDoc](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/config/PartitionSettings.html#setIncludePartitionInSearchHashes(boolean))): If this feature is enabled, partition IDs will be factored into [Search Hashes](/hapi-fhir/docs/server_jpa/schema.html#search-hashes). When this flag is not set (as is the default), when a search requests a specific partition, an additional SQL WHERE predicate is added to the query to explicitly request the given partition ID. When this flag is set, this additional WHERE predicate is not necessary since the partition is factored into the hash value being searched on. Setting this flag avoids the need to manually adjust indexes against the HFJ_SPIDX tables. Note that this flag should **not be used in environments where partitioning is being used for security purposes**, since it is possible for a user to reverse engineer false hash collisions.
* **Include Partition in Search Hashes** ([JavaDoc](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/config/PartitionSettings.html#setIncludePartitionInSearchHashes(boolean))): If this feature is enabled, partition IDs will be factored into [Search Hashes](/hapi-fhir/docs/server_jpa/schema.html#search-hashes). When this flag is not set (as is the default), when a search requests a specific partition, an additional SQL WHERE predicate is added to the query to explicitly request the given partition ID. When this flag is set, this additional WHERE predicate is not necessary since the partition is factored into the hash value being searched on. Setting this flag avoids the need to manually adjust indexes against the HFJ_SPIDX tables. Note that this flag should **not be used in environments where partitioning is being used for security purposes**, since it is possible for a user to reverse engineer false hash collisions. This setting should not be enabled in combination with [Index Storage Optimized](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/entity/StorageSettings.html#isIndexStorageOptimized()) flag, as in this case Partition could not be included in Search Hashes.
* **Cross-Partition Reference Mode**: ([JavaDoc](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/config/PartitionSettings.html#setAllowReferencesAcrossPartitions(ca.uhn.fhir.jpa.model.config.PartitionSettings.CrossPartitionReferenceMode))): This setting controls whether resources in one partition should be allowed to create references to resources in other partitions.

View File

@ -11,7 +11,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version>
<version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version>
<version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version>
<version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version>
<version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -80,6 +80,7 @@ import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
import ca.uhn.fhir.jpa.interceptor.JpaConsentContextServices;
import ca.uhn.fhir.jpa.interceptor.OverridePathBasedReferentialIntegrityForDeletesInterceptor;
import ca.uhn.fhir.jpa.interceptor.validation.RepositoryValidatingRuleBuilder;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
import ca.uhn.fhir.jpa.packages.IHapiPackageCacheManager;
@ -857,12 +858,14 @@ public class JpaConfig {
PersistenceContextProvider thePersistenceContextProvider,
IResourceSearchUrlDao theResourceSearchUrlDao,
MatchUrlService theMatchUrlService,
FhirContext theFhirContext) {
FhirContext theFhirContext,
PartitionSettings thePartitionSettings) {
return new ResourceSearchUrlSvc(
thePersistenceContextProvider.getEntityManager(),
theResourceSearchUrlDao,
theMatchUrlService,
theFhirContext);
theFhirContext,
thePartitionSettings);
}
@Bean

View File

@ -19,7 +19,9 @@
*/
package ca.uhn.fhir.jpa.config;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
@ -47,6 +49,7 @@ import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc;
import ca.uhn.fhir.rest.server.IPagingProvider;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
import jakarta.annotation.PostConstruct;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -206,4 +209,15 @@ public class SearchConfig {
exceptionService() // singleton
);
}
@PostConstruct
public void validateConfiguration() {
if (myStorageSettings.isIndexStorageOptimized()
&& myPartitionSettings.isPartitioningEnabled()
&& myPartitionSettings.isIncludePartitionInSearchHashes()) {
throw new ConfigurationException(Msg.code(2525) + "Incorrect configuration. "
+ "StorageSettings#isIndexStorageOptimized and PartitionSettings.isIncludePartitionInSearchHashes "
+ "cannot be enabled at the same time.");
}
}
}

View File

@ -1284,10 +1284,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
myInterceptorBroadcaster, theRequest, Pointcut.JPA_PERFTRACE_INFO, params);
}
}
// Synchronize composite params
mySearchParamWithInlineReferencesExtractor.storeUniqueComboParameters(
newParams, entity, existingParams);
}
}

View File

@ -908,7 +908,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
RequestDetails theRequestDetails,
TransactionDetails theTransactionDetails) {
StopWatch w = new StopWatch();
TransactionDetails transactionDetails = new TransactionDetails();
TransactionDetails transactionDetails =
theTransactionDetails != null ? theTransactionDetails : new TransactionDetails();
List<ResourceTable> deletedResources = new ArrayList<>();
List<IResourcePersistentId<?>> resolvedIds =
@ -924,6 +925,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
T resourceToDelete = myJpaStorageResourceParser.toResource(myResourceType, entity, null, false);
transactionDetails.addDeletedResourceId(pid);
// Notify IServerOperationInterceptors about pre-action call
HookParams hooks = new HookParams()
.add(IBaseResource.class, resourceToDelete)
@ -988,8 +991,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
deletedResources.size(),
w.getMillis());
theTransactionDetails.addDeletedResourceIds(theResourceIds);
DeleteMethodOutcome retVal = new DeleteMethodOutcome();
retVal.setDeletedEntities(deletedResources);
retVal.setOperationOutcome(oo);
@ -1627,6 +1628,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
T resource = (T) myJpaStorageResourceParser.toResource(entity, false);
reindexSearchParameters(resource, entity, theTransactionDetails);
} catch (Exception e) {
ourLog.warn("Failure during reindex: {}", e.toString());
theReindexOutcome.addWarning("Failed to reindex resource " + entity.getIdDt() + ": " + e);
myResourceTableDao.updateIndexStatus(entity.getId(), INDEX_STATUS_INDEXING_FAILED);
}

View File

@ -224,6 +224,17 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
BaseHasResource::isHasTags,
entityChunk);
prefetchByField(
"comboStringUnique",
"myParamsComboStringUnique",
ResourceTable::isParamsComboStringUniquePresent,
entityChunk);
prefetchByField(
"comboTokenNonUnique",
"myParamsComboTokensNonUnique",
ResourceTable::isParamsComboTokensNonUniquePresent,
entityChunk);
if (myStorageSettings.getIndexMissingFields() == JpaStorageSettings.IndexEnabledEnum.ENABLED) {
prefetchByField("searchParamPresence", "mySearchParamPresents", r -> true, entityChunk);
}

View File

@ -34,6 +34,7 @@ import ca.uhn.fhir.jpa.dao.search.IHSearchSortHelper;
import ca.uhn.fhir.jpa.dao.search.LastNOperation;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.search.ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams;
import ca.uhn.fhir.jpa.model.search.ExtendedHSearchIndexData;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.search.autocomplete.ValueSetAutocompleteOptions;
@ -141,17 +142,26 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
}
@Override
public boolean supportsSomeOf(SearchParameterMap myParams) {
// keep this in sync with the guts of doSearch
public boolean canUseHibernateSearch(String theResourceType, SearchParameterMap myParams) {
boolean requiresHibernateSearchAccess = myParams.containsKey(Constants.PARAM_CONTENT)
|| myParams.containsKey(Constants.PARAM_TEXT)
|| myParams.isLastN();
// we have to use it - _text and _content searches only use hibernate
if (requiresHibernateSearchAccess) {
return true;
}
requiresHibernateSearchAccess |=
myStorageSettings.isAdvancedHSearchIndexing() && myAdvancedIndexQueryBuilder.isSupportsSomeOf(myParams);
// if the registry has not been initialized
// we cannot use HibernateSearch because it
// will, internally, trigger a new search
// when it refreshes the search parameters
// (which will cause an infinite loop)
if (!mySearchParamRegistry.isInitialized()) {
return false;
}
return requiresHibernateSearchAccess;
return myStorageSettings.isAdvancedHSearchIndexing()
&& myAdvancedIndexQueryBuilder.canUseHibernateSearch(theResourceType, myParams, mySearchParamRegistry);
}
@Override
@ -174,6 +184,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
}
// keep this in sync with supportsSomeOf();
@SuppressWarnings("rawtypes")
private ISearchQueryExecutor doSearch(
String theResourceType,
SearchParameterMap theParams,
@ -208,6 +219,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
return DEFAULT_MAX_NON_PAGED_SIZE;
}
@SuppressWarnings("rawtypes")
private SearchQueryOptionsStep<?, Long, SearchLoadingOptionsStep, ?, ?> getSearchQueryOptionsStep(
String theResourceType, SearchParameterMap theParams, IResourcePersistentId theReferencingPid) {
@ -230,6 +242,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
return query;
}
@SuppressWarnings("rawtypes")
private PredicateFinalStep buildWhereClause(
SearchPredicateFactory f,
String theResourceType,
@ -271,8 +284,12 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
* Handle other supported parameters
*/
if (myStorageSettings.isAdvancedHSearchIndexing() && theParams.getEverythingMode() == null) {
myAdvancedIndexQueryBuilder.addAndConsumeAdvancedQueryClauses(
builder, theResourceType, theParams, mySearchParamRegistry);
ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams params =
new ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams();
params.setSearchParamRegistry(mySearchParamRegistry)
.setResourceType(theResourceType)
.setSearchParameterMap(theParams);
myAdvancedIndexQueryBuilder.addAndConsumeAdvancedQueryClauses(builder, params);
}
// DROP EARLY HERE IF BOOL IS EMPTY?
});
@ -283,11 +300,13 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
return Search.session(myEntityManager);
}
@SuppressWarnings("rawtypes")
private List<IResourcePersistentId> convertLongsToResourcePersistentIds(List<Long> theLongPids) {
return theLongPids.stream().map(JpaPid::fromId).collect(Collectors.toList());
}
@Override
@SuppressWarnings({"rawtypes", "unchecked"})
public List<IResourcePersistentId> everything(
String theResourceName,
SearchParameterMap theParams,
@ -336,6 +355,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
@Transactional()
@Override
@SuppressWarnings("unchecked")
public List<IResourcePersistentId> search(
String theResourceName, SearchParameterMap theParams, RequestDetails theRequestDetails) {
validateHibernateSearchIsEnabled();
@ -347,6 +367,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
/**
* Adapt our async interface to the legacy concrete List
*/
@SuppressWarnings("rawtypes")
private List<IResourcePersistentId> toList(ISearchQueryExecutor theSearchResultStream, long theMaxSize) {
return StreamSupport.stream(Spliterators.spliteratorUnknownSize(theSearchResultStream, 0), false)
.map(JpaPid::fromId)
@ -384,6 +405,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
}
@Override
@SuppressWarnings("rawtypes")
public List<IResourcePersistentId> lastN(SearchParameterMap theParams, Integer theMaximumResults) {
ensureElastic();
dispatchEvent(IHSearchEventListener.HSearchEventType.SEARCH);

View File

@ -32,6 +32,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
import java.util.Collection;
import java.util.List;
@SuppressWarnings({"rawtypes"})
public interface IFulltextSearchSvc {
/**
@ -79,11 +80,18 @@ public interface IFulltextSearchSvc {
ExtendedHSearchIndexData extractLuceneIndexData(
IBaseResource theResource, ResourceIndexedSearchParams theNewParams);
boolean supportsSomeOf(SearchParameterMap myParams);
/**
* Returns true if the parameter map can be handled for hibernate search.
* We have to filter out any queries that might use search params
* we only know how to handle in JPA.
* -
* See {@link ca.uhn.fhir.jpa.dao.search.ExtendedHSearchSearchBuilder#addAndConsumeAdvancedQueryClauses}
*/
boolean canUseHibernateSearch(String theResourceType, SearchParameterMap theParameterMap);
/**
* Re-publish the resource to the full-text index.
*
* -
* During update, hibernate search only republishes the entity if it has changed.
* During $reindex, we want to force the re-index.
*

View File

@ -19,20 +19,30 @@
*/
package ca.uhn.fhir.jpa.dao.index;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedComboStringUniqueDao;
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndex;
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboStringUnique;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams;
import ca.uhn.fhir.jpa.util.AddRemoveCount;
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
import com.google.common.annotations.VisibleForTesting;
import jakarta.annotation.Nullable;
import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext;
import jakarta.persistence.PersistenceContextType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
@ -40,27 +50,46 @@ import java.util.Set;
@Service
public class DaoSearchParamSynchronizer {
private static final Logger ourLog = LoggerFactory.getLogger(DaoSearchParamSynchronizer.class);
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
protected EntityManager myEntityManager;
@Autowired
private JpaStorageSettings myStorageSettings;
@Autowired
private IResourceIndexedComboStringUniqueDao myResourceIndexedCompositeStringUniqueDao;
@Autowired
private FhirContext myFhirContext;
public AddRemoveCount synchronizeSearchParamsToDatabase(
ResourceIndexedSearchParams theParams,
ResourceTable theEntity,
ResourceIndexedSearchParams existingParams) {
AddRemoveCount retVal = new AddRemoveCount();
synchronize(theEntity, retVal, theParams.myStringParams, existingParams.myStringParams);
synchronize(theEntity, retVal, theParams.myTokenParams, existingParams.myTokenParams);
synchronize(theEntity, retVal, theParams.myNumberParams, existingParams.myNumberParams);
synchronize(theEntity, retVal, theParams.myQuantityParams, existingParams.myQuantityParams);
synchronize(theEntity, retVal, theParams.myQuantityNormalizedParams, existingParams.myQuantityNormalizedParams);
synchronize(theEntity, retVal, theParams.myDateParams, existingParams.myDateParams);
synchronize(theEntity, retVal, theParams.myUriParams, existingParams.myUriParams);
synchronize(theEntity, retVal, theParams.myCoordsParams, existingParams.myCoordsParams);
synchronize(theEntity, retVal, theParams.myLinks, existingParams.myLinks);
synchronize(theEntity, retVal, theParams.myComboTokenNonUnique, existingParams.myComboTokenNonUnique);
synchronize(theEntity, retVal, theParams.myStringParams, existingParams.myStringParams, null);
synchronize(theEntity, retVal, theParams.myTokenParams, existingParams.myTokenParams, null);
synchronize(theEntity, retVal, theParams.myNumberParams, existingParams.myNumberParams, null);
synchronize(theEntity, retVal, theParams.myQuantityParams, existingParams.myQuantityParams, null);
synchronize(
theEntity,
retVal,
theParams.myQuantityNormalizedParams,
existingParams.myQuantityNormalizedParams,
null);
synchronize(theEntity, retVal, theParams.myDateParams, existingParams.myDateParams, null);
synchronize(theEntity, retVal, theParams.myUriParams, existingParams.myUriParams, null);
synchronize(theEntity, retVal, theParams.myCoordsParams, existingParams.myCoordsParams, null);
synchronize(theEntity, retVal, theParams.myLinks, existingParams.myLinks, null);
synchronize(theEntity, retVal, theParams.myComboTokenNonUnique, existingParams.myComboTokenNonUnique, null);
synchronize(
theEntity,
retVal,
theParams.myComboStringUniques,
existingParams.myComboStringUniques,
new UniqueIndexPreExistenceChecker());
// make sure links are indexed
theEntity.setResourceLinks(theParams.myLinks);
@ -73,11 +102,17 @@ public class DaoSearchParamSynchronizer {
myEntityManager = theEntityManager;
}
@VisibleForTesting
public void setStorageSettings(JpaStorageSettings theStorageSettings) {
myStorageSettings = theStorageSettings;
}
private <T extends BaseResourceIndex> void synchronize(
ResourceTable theEntity,
AddRemoveCount theAddRemoveCount,
Collection<T> theNewParams,
Collection<T> theExistingParams) {
Collection<T> theExistingParams,
@Nullable IPreSaveHook<T> theAddParamPreSaveHook) {
Collection<T> newParams = theNewParams;
for (T next : newParams) {
next.setPartitionId(theEntity.getPartitionId());
@ -100,6 +135,7 @@ public class DaoSearchParamSynchronizer {
Set<T> existingParamsAsSet = new HashSet<>(theExistingParams.size());
for (Iterator<T> iterator = theExistingParams.iterator(); iterator.hasNext(); ) {
T next = iterator.next();
next.setPlaceholderHashesIfMissing();
if (!existingParamsAsSet.add(next)) {
iterator.remove();
myEntityManager.remove(next);
@ -114,7 +150,13 @@ public class DaoSearchParamSynchronizer {
List<T> paramsToRemove = subtract(theExistingParams, newParams);
List<T> paramsToAdd = subtract(newParams, theExistingParams);
if (theAddParamPreSaveHook != null) {
theAddParamPreSaveHook.preSave(paramsToRemove, paramsToAdd);
}
tryToReuseIndexEntities(paramsToRemove, paramsToAdd);
updateExistingParamsIfRequired(theExistingParams, paramsToAdd, newParams, paramsToRemove);
for (T next : paramsToRemove) {
if (!myEntityManager.contains(next)) {
@ -125,6 +167,7 @@ public class DaoSearchParamSynchronizer {
}
myEntityManager.remove(next);
}
for (T next : paramsToAdd) {
myEntityManager.merge(next);
}
@ -134,6 +177,62 @@ public class DaoSearchParamSynchronizer {
theAddRemoveCount.addToRemoveCount(paramsToRemove.size());
}
/**
* <p>
* This method performs an update of Search Parameter's fields in the case of
* <code>$reindex</code> or update operation by:
* 1. Marking existing entities for updating to apply index storage optimization,
* if it is enabled (disabled by default).
* 2. Recovering <code>SP_NAME</code>, <code>RES_TYPE</code> values of Search Parameter's fields
* for existing entities in case if index storage optimization is disabled (but was enabled previously).
* </p>
* For details, see: {@link StorageSettings#isIndexStorageOptimized()}
*/
private <T extends BaseResourceIndex> void updateExistingParamsIfRequired(
Collection<T> theExistingParams,
List<T> theParamsToAdd,
Collection<T> theNewParams,
List<T> theParamsToRemove) {
theExistingParams.stream()
.filter(BaseResourceIndexedSearchParam.class::isInstance)
.map(BaseResourceIndexedSearchParam.class::cast)
.filter(this::isSearchParameterUpdateRequired)
.filter(sp -> !theParamsToAdd.contains(sp))
.filter(sp -> !theParamsToRemove.contains(sp))
.forEach(sp -> {
// force hibernate to update Search Parameter entity by resetting SP_UPDATED value
sp.setUpdated(new Date());
recoverExistingSearchParameterIfRequired(sp, theNewParams);
theParamsToAdd.add((T) sp);
});
}
/**
* Search parameters should be updated after changing IndexStorageOptimized setting.
* If IndexStorageOptimized is disabled (and was enabled previously), this method copies paramName
* and Resource Type from extracted to existing search parameter.
*/
private <T extends BaseResourceIndex> void recoverExistingSearchParameterIfRequired(
BaseResourceIndexedSearchParam theSearchParamToRecover, Collection<T> theNewParams) {
if (!myStorageSettings.isIndexStorageOptimized()) {
theNewParams.stream()
.filter(BaseResourceIndexedSearchParam.class::isInstance)
.map(BaseResourceIndexedSearchParam.class::cast)
.filter(paramToAdd -> paramToAdd.equals(theSearchParamToRecover))
.findFirst()
.ifPresent(newParam -> {
theSearchParamToRecover.restoreParamName(newParam.getParamName());
theSearchParamToRecover.setResourceType(newParam.getResourceType());
});
}
}
private boolean isSearchParameterUpdateRequired(BaseResourceIndexedSearchParam theSearchParameter) {
return (myStorageSettings.isIndexStorageOptimized() && !theSearchParameter.isIndexStorageOptimized())
|| (!myStorageSettings.isIndexStorageOptimized() && theSearchParameter.isIndexStorageOptimized());
}
/**
* The logic here is that often times when we update a resource we are dropping
* one index row and adding another. This method tries to reuse rows that would otherwise
@ -180,4 +279,64 @@ public class DaoSearchParamSynchronizer {
}
return retVal;
}
private interface IPreSaveHook<T> {
void preSave(Collection<T> theParamsToRemove, Collection<T> theParamsToAdd);
}
private class UniqueIndexPreExistenceChecker implements IPreSaveHook<ResourceIndexedComboStringUnique> {
@Override
public void preSave(
Collection<ResourceIndexedComboStringUnique> theParamsToRemove,
Collection<ResourceIndexedComboStringUnique> theParamsToAdd) {
if (myStorageSettings.isUniqueIndexesCheckedBeforeSave()) {
for (ResourceIndexedComboStringUnique theIndex : theParamsToAdd) {
ResourceIndexedComboStringUnique existing =
myResourceIndexedCompositeStringUniqueDao.findByQueryString(theIndex.getIndexString());
if (existing != null) {
/*
* If we're reindexing, and the previous index row is being updated
* to add previously missing hashes, we may falsely detect that the index
* creation is going to fail.
*/
boolean existingIndexIsScheduledForRemoval = false;
for (var next : theParamsToRemove) {
if (existing == next) {
existingIndexIsScheduledForRemoval = true;
break;
}
}
if (existingIndexIsScheduledForRemoval) {
continue;
}
String searchParameterId = "(unknown)";
if (theIndex.getSearchParameterId() != null) {
searchParameterId = theIndex.getSearchParameterId().getValue();
}
String msg = myFhirContext
.getLocalizer()
.getMessage(
BaseHapiFhirDao.class,
"uniqueIndexConflictFailure",
existing.getResource().getResourceType(),
theIndex.getIndexString(),
existing.getResource()
.getIdDt()
.toUnqualifiedVersionless()
.getValue(),
searchParameterId);
// Use ResourceVersionConflictException here because the HapiTransactionService
// catches this and can retry it if needed
throw new ResourceVersionConflictException(Msg.code(1093) + msg);
}
}
}
}
}
}

View File

@ -19,15 +19,8 @@
*/
package ca.uhn.fhir.jpa.dao.index;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedComboStringUniqueDao;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboStringUnique;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.searchparam.extractor.BaseSearchParamWithInlineReferencesExtractor;
import ca.uhn.fhir.jpa.searchparam.extractor.ISearchParamExtractor;
@ -36,10 +29,7 @@ import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams;
import ca.uhn.fhir.jpa.searchparam.extractor.SearchParamExtractorService;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
import com.google.common.annotations.VisibleForTesting;
import jakarta.annotation.Nullable;
import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext;
import jakarta.persistence.PersistenceContextType;
@ -48,49 +38,22 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
import java.util.Collection;
import java.util.stream.Collectors;
@Service
@Lazy
public class SearchParamWithInlineReferencesExtractor extends BaseSearchParamWithInlineReferencesExtractor<JpaPid>
implements ISearchParamWithInlineReferencesExtractor {
private static final org.slf4j.Logger ourLog =
org.slf4j.LoggerFactory.getLogger(SearchParamWithInlineReferencesExtractor.class);
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
protected EntityManager myEntityManager;
@Autowired
private ISearchParamRegistry mySearchParamRegistry;
@Autowired
private SearchParamExtractorService mySearchParamExtractorService;
@Autowired
private DaoSearchParamSynchronizer myDaoSearchParamSynchronizer;
@Autowired
private IResourceIndexedComboStringUniqueDao myResourceIndexedCompositeStringUniqueDao;
@Autowired
private PartitionSettings myPartitionSettings;
@VisibleForTesting
public void setPartitionSettings(PartitionSettings thePartitionSettings) {
myPartitionSettings = thePartitionSettings;
}
@VisibleForTesting
public void setSearchParamExtractorService(SearchParamExtractorService theSearchParamExtractorService) {
mySearchParamExtractorService = theSearchParamExtractorService;
}
@VisibleForTesting
public void setSearchParamRegistry(ISearchParamRegistry theSearchParamRegistry) {
mySearchParamRegistry = theSearchParamRegistry;
}
public void populateFromResource(
RequestPartitionId theRequestPartitionId,
ResourceIndexedSearchParams theParams,
@ -116,103 +79,4 @@ public class SearchParamWithInlineReferencesExtractor extends BaseSearchParamWit
thePerformIndexing,
ISearchParamExtractor.ALL_PARAMS);
}
@Nullable
private Collection<? extends BaseResourceIndexedSearchParam> findParameterIndexes(
ResourceIndexedSearchParams theParams, RuntimeSearchParam nextCompositeOf) {
Collection<? extends BaseResourceIndexedSearchParam> paramsListForCompositePart = null;
switch (nextCompositeOf.getParamType()) {
case NUMBER:
paramsListForCompositePart = theParams.myNumberParams;
break;
case DATE:
paramsListForCompositePart = theParams.myDateParams;
break;
case STRING:
paramsListForCompositePart = theParams.myStringParams;
break;
case TOKEN:
paramsListForCompositePart = theParams.myTokenParams;
break;
case QUANTITY:
paramsListForCompositePart = theParams.myQuantityParams;
break;
case URI:
paramsListForCompositePart = theParams.myUriParams;
break;
case REFERENCE:
case SPECIAL:
case COMPOSITE:
case HAS:
break;
}
if (paramsListForCompositePart != null) {
paramsListForCompositePart = paramsListForCompositePart.stream()
.filter(t -> t.getParamName().equals(nextCompositeOf.getName()))
.collect(Collectors.toList());
}
return paramsListForCompositePart;
}
@VisibleForTesting
public void setDaoSearchParamSynchronizer(DaoSearchParamSynchronizer theDaoSearchParamSynchronizer) {
myDaoSearchParamSynchronizer = theDaoSearchParamSynchronizer;
}
public void storeUniqueComboParameters(
ResourceIndexedSearchParams theParams,
ResourceTable theEntity,
ResourceIndexedSearchParams theExistingParams) {
/*
* String Uniques
*/
if (myStorageSettings.isUniqueIndexesEnabled()) {
for (ResourceIndexedComboStringUnique next : DaoSearchParamSynchronizer.subtract(
theExistingParams.myComboStringUniques, theParams.myComboStringUniques)) {
ourLog.debug("Removing unique index: {}", next);
myEntityManager.remove(next);
theEntity.getParamsComboStringUnique().remove(next);
}
boolean haveNewStringUniqueParams = false;
for (ResourceIndexedComboStringUnique next : DaoSearchParamSynchronizer.subtract(
theParams.myComboStringUniques, theExistingParams.myComboStringUniques)) {
if (myStorageSettings.isUniqueIndexesCheckedBeforeSave()) {
ResourceIndexedComboStringUnique existing =
myResourceIndexedCompositeStringUniqueDao.findByQueryString(next.getIndexString());
if (existing != null) {
String searchParameterId = "(unknown)";
if (next.getSearchParameterId() != null) {
searchParameterId = next.getSearchParameterId()
.toUnqualifiedVersionless()
.getValue();
}
String msg = myFhirContext
.getLocalizer()
.getMessage(
BaseHapiFhirDao.class,
"uniqueIndexConflictFailure",
theEntity.getResourceType(),
next.getIndexString(),
existing.getResource()
.getIdDt()
.toUnqualifiedVersionless()
.getValue(),
searchParameterId);
// Use ResourceVersionConflictException here because the HapiTransactionService
// catches this and can retry it if needed
throw new ResourceVersionConflictException(Msg.code(1093) + msg);
}
}
ourLog.debug("Persisting unique index: {}", next);
myEntityManager.persist(next);
haveNewStringUniqueParams = true;
}
theEntity.setParamsComboStringUniquePresent(
theParams.myComboStringUniques.size() > 0 || haveNewStringUniqueParams);
}
}
}

View File

@ -36,6 +36,7 @@ import ca.uhn.fhir.rest.param.NumberParam;
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
import ca.uhn.fhir.rest.param.QuantityParam;
import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.SpecialParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.param.UriParam;
@ -122,14 +123,12 @@ public class ExtendedHSearchClauseBuilder {
}
@Nonnull
private Set<String> extractOrStringParams(List<? extends IQueryParameterType> nextAnd) {
private Set<String> extractOrStringParams(String theSearchParamName, List<? extends IQueryParameterType> nextAnd) {
Set<String> terms = new HashSet<>();
for (IQueryParameterType nextOr : nextAnd) {
String nextValueTrimmed;
if (nextOr instanceof StringParam) {
StringParam nextOrString = (StringParam) nextOr;
nextValueTrimmed =
StringUtils.defaultString(nextOrString.getValue()).trim();
if (isStringParamOrEquivalent(theSearchParamName, nextOr)) {
nextValueTrimmed = getTrimmedStringValue(nextOr);
} else if (nextOr instanceof TokenParam) {
TokenParam nextOrToken = (TokenParam) nextOr;
nextValueTrimmed = nextOrToken.getValue();
@ -150,6 +149,34 @@ public class ExtendedHSearchClauseBuilder {
return terms;
}
private String getTrimmedStringValue(IQueryParameterType nextOr) {
String value;
if (nextOr instanceof StringParam) {
value = ((StringParam) nextOr).getValue();
} else if (nextOr instanceof SpecialParam) {
value = ((SpecialParam) nextOr).getValue();
} else {
throw new IllegalArgumentException(Msg.code(2535)
+ "Failed to extract value for fulltext search from parameter. Needs to be a `string` parameter, or `_text` or `_content` special parameter."
+ nextOr);
}
return StringUtils.defaultString(value).trim();
}
/**
* String Search params are valid, so are two special params, _content and _text.
*
* @param theSearchParamName The name of the SP
* @param nextOr the or values of the query parameter.
*
* @return a boolean indicating whether we can treat this as a string.
*/
private static boolean isStringParamOrEquivalent(String theSearchParamName, IQueryParameterType nextOr) {
List<String> specialSearchParamsToTreatAsStrings = List.of(Constants.PARAM_TEXT, Constants.PARAM_CONTENT);
return (nextOr instanceof StringParam)
|| (nextOr instanceof SpecialParam && specialSearchParamsToTreatAsStrings.contains(theSearchParamName));
}
public void addTokenUnmodifiedSearch(String theSearchParamName, List<List<IQueryParameterType>> theAndOrTerms) {
if (CollectionUtils.isEmpty(theAndOrTerms)) {
return;
@ -229,22 +256,57 @@ public class ExtendedHSearchClauseBuilder {
break;
}
for (List<? extends IQueryParameterType> nextOrList : stringAndOrTerms) {
Set<String> orTerms = TermHelper.makePrefixSearchTerm(extractOrStringParams(nextOrList));
ourLog.debug("addStringTextSearch {}, {}", theSearchParamName, orTerms);
if (!orTerms.isEmpty()) {
String query = orTerms.stream().map(s -> "( " + s + " )").collect(Collectors.joining(" | "));
myRootClause.must(myRootContext
.simpleQueryString()
.field(fieldName)
.matching(query)
.defaultOperator(
BooleanOperator
.AND)); // term value may contain multiple tokens. Require all of them to be
// present.
} else {
ourLog.warn("No Terms found in query parameter {}", nextOrList);
if (isContainsSearch(theSearchParamName, stringAndOrTerms)) {
for (List<? extends IQueryParameterType> nextOrList : stringAndOrTerms) {
addPreciseMatchClauses(theSearchParamName, nextOrList, fieldName);
}
} else {
for (List<? extends IQueryParameterType> nextOrList : stringAndOrTerms) {
addSimpleQueryMatchClauses(theSearchParamName, nextOrList, fieldName);
}
}
}
/**
* This route is used for standard string searches, or `_text` or `_content`. For each term, we build a `simpleQueryString `element which allows hibernate search to search on normalized, analyzed, indexed fields.
*
* @param theSearchParamName The name of the search parameter
* @param nextOrList the list of query parameters
* @param fieldName the field name in the index document to compare with.
*/
private void addSimpleQueryMatchClauses(
String theSearchParamName, List<? extends IQueryParameterType> nextOrList, String fieldName) {
Set<String> orTerms = TermHelper.makePrefixSearchTerm(extractOrStringParams(theSearchParamName, nextOrList));
ourLog.debug("addStringTextSearch {}, {}", theSearchParamName, orTerms);
if (!orTerms.isEmpty()) {
String query = orTerms.stream().map(s -> "( " + s + " )").collect(Collectors.joining(" | "));
myRootClause.must(myRootContext
.simpleQueryString()
.field(fieldName)
.matching(query)
.defaultOperator(
BooleanOperator.AND)); // term value may contain multiple tokens. Require all of them to
// be
// present.
} else {
ourLog.warn("No Terms found in query parameter {}", nextOrList);
}
}
/**
* Note that this `match()` operation is different from out standard behaviour, which uses simpleQueryString(). This `match()` forces a precise string match, Whereas `simpleQueryString()` uses a more nebulous
* and loose check against a collection of terms. We only use this when we see ` _text:contains=` or `_content:contains=` search.
*
* @param theSearchParamName the Name of the search parameter
* @param nextOrList the list of query parameters
* @param fieldName the field name in the index document to compare with.
*/
private void addPreciseMatchClauses(
String theSearchParamName, List<? extends IQueryParameterType> nextOrList, String fieldName) {
Set<String> orTerms = TermHelper.makePrefixSearchTerm(extractOrStringParams(theSearchParamName, nextOrList));
for (String orTerm : orTerms) {
myRootClause.must(myRootContext.match().field(fieldName).matching(orTerm));
}
}
@ -252,7 +314,7 @@ public class ExtendedHSearchClauseBuilder {
String fieldPath = joinPath(SEARCH_PARAM_ROOT, theSearchParamName, INDEX_TYPE_STRING, IDX_STRING_EXACT);
for (List<? extends IQueryParameterType> nextAnd : theStringAndOrTerms) {
Set<String> terms = extractOrStringParams(nextAnd);
Set<String> terms = extractOrStringParams(theSearchParamName, nextAnd);
ourLog.debug("addStringExactSearch {} {}", theSearchParamName, terms);
List<? extends PredicateFinalStep> orTerms = terms.stream()
.map(s -> myRootContext.match().field(fieldPath).matching(s))
@ -266,7 +328,7 @@ public class ExtendedHSearchClauseBuilder {
String theSearchParamName, List<List<IQueryParameterType>> theStringAndOrTerms) {
String fieldPath = joinPath(SEARCH_PARAM_ROOT, theSearchParamName, INDEX_TYPE_STRING, IDX_STRING_NORMALIZED);
for (List<? extends IQueryParameterType> nextAnd : theStringAndOrTerms) {
Set<String> terms = extractOrStringParams(nextAnd);
Set<String> terms = extractOrStringParams(theSearchParamName, nextAnd);
ourLog.debug("addStringContainsSearch {} {}", theSearchParamName, terms);
List<? extends PredicateFinalStep> orTerms = terms.stream()
// wildcard is a term-level query, so queries aren't analyzed. Do our own normalization first.
@ -294,7 +356,7 @@ public class ExtendedHSearchClauseBuilder {
String theSearchParamName, List<List<IQueryParameterType>> theStringAndOrTerms) {
PathContext context = contextForFlatSP(theSearchParamName);
for (List<? extends IQueryParameterType> nextOrList : theStringAndOrTerms) {
Set<String> terms = extractOrStringParams(nextOrList);
Set<String> terms = extractOrStringParams(theSearchParamName, nextOrList);
ourLog.debug("addStringUnmodifiedSearch {} {}", theSearchParamName, terms);
List<PredicateFinalStep> orTerms = terms.stream()
.map(s -> buildStringUnmodifiedClause(s, context))
@ -317,7 +379,7 @@ public class ExtendedHSearchClauseBuilder {
String theSearchParamName, List<List<IQueryParameterType>> theReferenceAndOrTerms) {
String fieldPath = joinPath(SEARCH_PARAM_ROOT, theSearchParamName, "reference", "value");
for (List<? extends IQueryParameterType> nextAnd : theReferenceAndOrTerms) {
Set<String> terms = extractOrStringParams(nextAnd);
Set<String> terms = extractOrStringParams(theSearchParamName, nextAnd);
ourLog.trace("reference unchained search {}", terms);
List<? extends PredicateFinalStep> orTerms = terms.stream()
@ -832,4 +894,17 @@ public class ExtendedHSearchClauseBuilder {
return compositeClause;
}
private boolean hasAContainsModifier(List<List<IQueryParameterType>> stringAndOrTerms) {
return stringAndOrTerms.stream()
.flatMap(List::stream)
.anyMatch(next ->
Constants.PARAMQUALIFIER_STRING_CONTAINS.equalsIgnoreCase(next.getQueryParameterQualifier()));
}
private boolean isContainsSearch(String theSearchParamName, List<List<IQueryParameterType>> stringAndOrTerms) {
return (Constants.PARAM_TEXT.equalsIgnoreCase(theSearchParamName)
|| Constants.PARAM_CONTENT.equalsIgnoreCase(theSearchParamName))
&& hasAContainsModifier(stringAndOrTerms);
}
}

View File

@ -20,6 +20,7 @@
package ca.uhn.fhir.jpa.dao.search;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.jpa.model.search.ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil;
import ca.uhn.fhir.model.api.IQueryParameterType;
@ -34,6 +35,7 @@ import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.param.UriParam;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
import ca.uhn.fhir.rest.server.util.ResourceSearchParams;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import org.apache.commons.collections4.CollectionUtils;
@ -44,6 +46,7 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import static ca.uhn.fhir.rest.api.Constants.PARAMQUALIFIER_MISSING;
@ -59,17 +62,56 @@ public class ExtendedHSearchSearchBuilder {
public static final Set<String> ourUnsafeSearchParmeters = Sets.newHashSet("_id", "_meta");
/**
* Are any of the queries supported by our indexing?
* Determine if ExtendedHibernateSearchBuilder can support this parameter
* @param theParamName param name
* @param theActiveParamsForResourceType active search parameters for the desired resource type
* @return whether or not this search parameter is supported in hibernate
*/
public boolean isSupportsSomeOf(SearchParameterMap myParams) {
return myParams.getSort() != null
|| myParams.getLastUpdated() != null
|| myParams.entrySet().stream()
.filter(e -> !ourUnsafeSearchParmeters.contains(e.getKey()))
// each and clause may have a different modifier, so split down to the ORs
.flatMap(andList -> andList.getValue().stream())
.flatMap(Collection::stream)
.anyMatch(this::isParamTypeSupported);
public boolean supportsSearchParameter(String theParamName, ResourceSearchParams theActiveParamsForResourceType) {
if (theActiveParamsForResourceType == null) {
return false;
}
if (ourUnsafeSearchParmeters.contains(theParamName)) {
return false;
}
if (!theActiveParamsForResourceType.containsParamName(theParamName)) {
return false;
}
return true;
}
/**
* Are any of the queries supported by our indexing?
* -
* If not, do not use hibernate, because the results will
* be inaccurate and wrong.
*/
public boolean canUseHibernateSearch(
String theResourceType, SearchParameterMap myParams, ISearchParamRegistry theSearchParamRegistry) {
boolean canUseHibernate = true;
ResourceSearchParams resourceActiveSearchParams = theSearchParamRegistry.getActiveSearchParams(theResourceType);
for (String paramName : myParams.keySet()) {
// is this parameter supported?
if (!supportsSearchParameter(paramName, resourceActiveSearchParams)) {
canUseHibernate = false;
} else {
// are the parameter values supported?
canUseHibernate =
myParams.get(paramName).stream()
.flatMap(Collection::stream)
.collect(Collectors.toList())
.stream()
.anyMatch(this::isParamTypeSupported);
}
// if not supported, don't use
if (!canUseHibernate) {
return false;
}
}
return canUseHibernate;
}
/**
@ -166,86 +208,91 @@ public class ExtendedHSearchSearchBuilder {
}
public void addAndConsumeAdvancedQueryClauses(
ExtendedHSearchClauseBuilder builder,
String theResourceType,
SearchParameterMap theParams,
ISearchParamRegistry theSearchParamRegistry) {
ExtendedHSearchClauseBuilder theBuilder,
ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams theMethodParams) {
SearchParameterMap searchParameterMap = theMethodParams.getSearchParameterMap();
String resourceType = theMethodParams.getResourceType();
ISearchParamRegistry searchParamRegistry = theMethodParams.getSearchParamRegistry();
// copy the keys to avoid concurrent modification error
ArrayList<String> paramNames = compileParamNames(theParams);
ArrayList<String> paramNames = compileParamNames(searchParameterMap);
ResourceSearchParams activeSearchParams = searchParamRegistry.getActiveSearchParams(resourceType);
for (String nextParam : paramNames) {
if (ourUnsafeSearchParmeters.contains(nextParam)) {
continue;
}
RuntimeSearchParam activeParam = theSearchParamRegistry.getActiveSearchParam(theResourceType, nextParam);
if (activeParam == null) {
if (!supportsSearchParameter(nextParam, activeSearchParams)) {
// ignore magic params handled in JPA
continue;
}
RuntimeSearchParam activeParam = activeSearchParams.get(nextParam);
// NOTE - keep this in sync with isParamSupported() above.
switch (activeParam.getParamType()) {
case TOKEN:
List<List<IQueryParameterType>> tokenTextAndOrTerms =
theParams.removeByNameAndModifier(nextParam, Constants.PARAMQUALIFIER_TOKEN_TEXT);
builder.addStringTextSearch(nextParam, tokenTextAndOrTerms);
searchParameterMap.removeByNameAndModifier(nextParam, Constants.PARAMQUALIFIER_TOKEN_TEXT);
theBuilder.addStringTextSearch(nextParam, tokenTextAndOrTerms);
List<List<IQueryParameterType>> tokenUnmodifiedAndOrTerms =
theParams.removeByNameUnmodified(nextParam);
builder.addTokenUnmodifiedSearch(nextParam, tokenUnmodifiedAndOrTerms);
searchParameterMap.removeByNameUnmodified(nextParam);
theBuilder.addTokenUnmodifiedSearch(nextParam, tokenUnmodifiedAndOrTerms);
break;
case STRING:
List<List<IQueryParameterType>> stringTextAndOrTerms =
theParams.removeByNameAndModifier(nextParam, Constants.PARAMQUALIFIER_TOKEN_TEXT);
builder.addStringTextSearch(nextParam, stringTextAndOrTerms);
searchParameterMap.removeByNameAndModifier(nextParam, Constants.PARAMQUALIFIER_TOKEN_TEXT);
theBuilder.addStringTextSearch(nextParam, stringTextAndOrTerms);
List<List<IQueryParameterType>> stringExactAndOrTerms =
theParams.removeByNameAndModifier(nextParam, Constants.PARAMQUALIFIER_STRING_EXACT);
builder.addStringExactSearch(nextParam, stringExactAndOrTerms);
List<List<IQueryParameterType>> stringExactAndOrTerms = searchParameterMap.removeByNameAndModifier(
nextParam, Constants.PARAMQUALIFIER_STRING_EXACT);
theBuilder.addStringExactSearch(nextParam, stringExactAndOrTerms);
List<List<IQueryParameterType>> stringContainsAndOrTerms =
theParams.removeByNameAndModifier(nextParam, Constants.PARAMQUALIFIER_STRING_CONTAINS);
builder.addStringContainsSearch(nextParam, stringContainsAndOrTerms);
searchParameterMap.removeByNameAndModifier(
nextParam, Constants.PARAMQUALIFIER_STRING_CONTAINS);
theBuilder.addStringContainsSearch(nextParam, stringContainsAndOrTerms);
List<List<IQueryParameterType>> stringAndOrTerms = theParams.removeByNameUnmodified(nextParam);
builder.addStringUnmodifiedSearch(nextParam, stringAndOrTerms);
List<List<IQueryParameterType>> stringAndOrTerms =
searchParameterMap.removeByNameUnmodified(nextParam);
theBuilder.addStringUnmodifiedSearch(nextParam, stringAndOrTerms);
break;
case QUANTITY:
List<List<IQueryParameterType>> quantityAndOrTerms = theParams.removeByNameUnmodified(nextParam);
builder.addQuantityUnmodifiedSearch(nextParam, quantityAndOrTerms);
List<List<IQueryParameterType>> quantityAndOrTerms =
searchParameterMap.removeByNameUnmodified(nextParam);
theBuilder.addQuantityUnmodifiedSearch(nextParam, quantityAndOrTerms);
break;
case REFERENCE:
List<List<IQueryParameterType>> referenceAndOrTerms = theParams.removeByNameUnmodified(nextParam);
builder.addReferenceUnchainedSearch(nextParam, referenceAndOrTerms);
List<List<IQueryParameterType>> referenceAndOrTerms =
searchParameterMap.removeByNameUnmodified(nextParam);
theBuilder.addReferenceUnchainedSearch(nextParam, referenceAndOrTerms);
break;
case DATE:
List<List<IQueryParameterType>> dateAndOrTerms = nextParam.equalsIgnoreCase("_lastupdated")
? getLastUpdatedAndOrList(theParams)
: theParams.removeByNameUnmodified(nextParam);
builder.addDateUnmodifiedSearch(nextParam, dateAndOrTerms);
? getLastUpdatedAndOrList(searchParameterMap)
: searchParameterMap.removeByNameUnmodified(nextParam);
theBuilder.addDateUnmodifiedSearch(nextParam, dateAndOrTerms);
break;
case COMPOSITE:
List<List<IQueryParameterType>> compositeAndOrTerms = theParams.removeByNameUnmodified(nextParam);
List<List<IQueryParameterType>> compositeAndOrTerms =
searchParameterMap.removeByNameUnmodified(nextParam);
// RuntimeSearchParam only points to the subs by reference. Resolve here while we have
// ISearchParamRegistry
List<RuntimeSearchParam> subSearchParams =
JpaParamUtil.resolveCompositeComponentsDeclaredOrder(theSearchParamRegistry, activeParam);
builder.addCompositeUnmodifiedSearch(activeParam, subSearchParams, compositeAndOrTerms);
JpaParamUtil.resolveCompositeComponentsDeclaredOrder(searchParamRegistry, activeParam);
theBuilder.addCompositeUnmodifiedSearch(activeParam, subSearchParams, compositeAndOrTerms);
break;
case URI:
List<List<IQueryParameterType>> uriUnmodifiedAndOrTerms =
theParams.removeByNameUnmodified(nextParam);
builder.addUriUnmodifiedSearch(nextParam, uriUnmodifiedAndOrTerms);
searchParameterMap.removeByNameUnmodified(nextParam);
theBuilder.addUriUnmodifiedSearch(nextParam, uriUnmodifiedAndOrTerms);
break;
case NUMBER:
List<List<IQueryParameterType>> numberUnmodifiedAndOrTerms = theParams.remove(nextParam);
builder.addNumberUnmodifiedSearch(nextParam, numberUnmodifiedAndOrTerms);
List<List<IQueryParameterType>> numberUnmodifiedAndOrTerms = searchParameterMap.remove(nextParam);
theBuilder.addNumberUnmodifiedSearch(nextParam, numberUnmodifiedAndOrTerms);
break;
default:

View File

@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.dao.search;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
import ca.uhn.fhir.jpa.model.search.ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.searchparam.util.LastNParameterHelper;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
@ -67,8 +68,12 @@ public class LastNOperation {
b.must(f.match().field("myResourceType").matching(OBSERVATION_RES_TYPE));
ExtendedHSearchClauseBuilder builder =
new ExtendedHSearchClauseBuilder(myFhirContext, myStorageSettings, b, f);
myExtendedHSearchSearchBuilder.addAndConsumeAdvancedQueryClauses(
builder, OBSERVATION_RES_TYPE, theParams.clone(), mySearchParamRegistry);
ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams params =
new ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams();
params.setResourceType(OBSERVATION_RES_TYPE)
.setSearchParameterMap(theParams.clone())
.setSearchParamRegistry(mySearchParamRegistry);
myExtendedHSearchSearchBuilder.addAndConsumeAdvancedQueryClauses(builder, params);
}))
.aggregation(observationsByCodeKey, f -> f.fromJson(lastNAggregation.toAggregation()))
.fetch(0);

View File

@ -119,13 +119,13 @@ public class Batch2JobInstanceEntity implements Serializable {
@Column(name = "WORK_CHUNKS_PURGED", nullable = false)
private boolean myWorkChunksPurged;
@Column(name = "PROGRESS_PCT")
@Column(name = "PROGRESS_PCT", nullable = false)
private double myProgress;
@Column(name = "ERROR_MSG", length = ERROR_MSG_MAX_LENGTH, nullable = true)
private String myErrorMessage;
@Column(name = "ERROR_COUNT")
@Column(name = "ERROR_COUNT", nullable = false)
private int myErrorCount;
@Column(name = "EST_REMAINING", length = TIME_REMAINING_LENGTH, nullable = true)

View File

@ -138,6 +138,10 @@ public class Batch2WorkChunkEntity implements Serializable {
/**
* The number of times the work chunk has had its state set back to POLL_WAITING.
* <p>
* TODO: Note that this column was added in 7.2.0, so it is nullable in order to
* account for existing rows that were added before the column was added. In
* the future we should make this non-null.
*/
@Column(name = "POLL_ATTEMPTS", nullable = true)
private Integer myPollAttempts;
@ -145,7 +149,9 @@ public class Batch2WorkChunkEntity implements Serializable {
/**
* Default constructor for Hibernate.
*/
public Batch2WorkChunkEntity() {}
public Batch2WorkChunkEntity() {
myPollAttempts = 0;
}
/**
* Projection constructor for no-data path.
@ -184,7 +190,7 @@ public class Batch2WorkChunkEntity implements Serializable {
myRecordsProcessed = theRecordsProcessed;
myWarningMessage = theWarningMessage;
myNextPollTime = theNextPollTime;
myPollAttempts = thePollAttempts;
myPollAttempts = thePollAttempts != null ? thePollAttempts : 0;
}
public static Batch2WorkChunkEntity fromWorkChunk(WorkChunk theWorkChunk) {

View File

@ -58,7 +58,7 @@ public class HapiFhirEnversRevision implements Serializable {
@SequenceGenerator(name = "SEQ_HFJ_REVINFO", sequenceName = "SEQ_HFJ_REVINFO")
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_HFJ_REVINFO")
@RevisionNumber
@Column(name = "REV")
@Column(name = "REV", nullable = false)
private long myRev;
@RevisionTimestamp

View File

@ -117,11 +117,12 @@ public class TermConcept implements Serializable {
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(
name = "CODESYSTEM_PID",
nullable = false,
referencedColumnName = "PID",
foreignKey = @ForeignKey(name = "FK_CONCEPT_PID_CS_PID"))
private TermCodeSystemVersion myCodeSystem;
@Column(name = "CODESYSTEM_PID", insertable = false, updatable = false)
@Column(name = "CODESYSTEM_PID", insertable = false, updatable = false, nullable = false)
@GenericField(name = "myCodeSystemVersionPid")
private long myCodeSystemVersionPid;

View File

@ -25,7 +25,6 @@ import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.taskdef.ArbitrarySqlTask;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.taskdef.CalculateHashesTask;
import ca.uhn.fhir.jpa.migrate.taskdef.CalculateOrdinalDatesTask;
import ca.uhn.fhir.jpa.migrate.taskdef.ColumnTypeEnum;
@ -33,9 +32,12 @@ import ca.uhn.fhir.jpa.migrate.taskdef.ForceIdMigrationCopyTask;
import ca.uhn.fhir.jpa.migrate.taskdef.ForceIdMigrationFixTask;
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
import ca.uhn.fhir.jpa.migrate.tasks.api.Builder;
import ca.uhn.fhir.jpa.migrate.tasks.api.ColumnAndNullable;
import ca.uhn.fhir.jpa.migrate.tasks.api.TaskFlagEnum;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboStringUnique;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantity;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
@ -127,7 +129,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
protected void init740() {
// Start of migrations from 7.2 to 7.4
Builder version = forVersion(VersionEnum.V7_4_0);
final Builder version = forVersion(VersionEnum.V7_4_0);
{
version.onTable("HFJ_RES_SEARCH_URL")
@ -135,6 +137,309 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
.toColumn("RES_ID")
.references("HFJ_RESOURCE", "RES_ID");
}
/*
* Make a bunch of columns non-nullable. Note that we won't actually apply this migration
* on the live system as it would take a loooooong time to execute these on heavily loaded
* databases.
*/
// Skipping numbers 20240601.1 and 20240601.2 as they were found to not
// be needed during code review.
version.onTable("HFJ_RESOURCE")
.modifyColumn("20240601.3", "SP_HAS_LINKS")
.nonNullable()
.withType(ColumnTypeEnum.BOOLEAN)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("HFJ_RESOURCE")
.modifyColumn("20240601.4", "SP_COORDS_PRESENT")
.nonNullable()
.withType(ColumnTypeEnum.BOOLEAN)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("HFJ_RESOURCE")
.modifyColumn("20240601.5", "SP_DATE_PRESENT")
.nonNullable()
.withType(ColumnTypeEnum.BOOLEAN)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("HFJ_RESOURCE")
.modifyColumn("20240601.6", "SP_NUMBER_PRESENT")
.nonNullable()
.withType(ColumnTypeEnum.BOOLEAN)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("HFJ_RESOURCE")
.modifyColumn("20240601.7", "SP_QUANTITY_PRESENT")
.nonNullable()
.withType(ColumnTypeEnum.BOOLEAN)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("HFJ_RESOURCE")
.modifyColumn("20240601.8", "SP_QUANTITY_NRML_PRESENT")
.nonNullable()
.withType(ColumnTypeEnum.BOOLEAN)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("HFJ_RESOURCE")
.modifyColumn("20240601.9", "SP_STRING_PRESENT")
.nonNullable()
.withType(ColumnTypeEnum.BOOLEAN)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("HFJ_RESOURCE")
.modifyColumn("20240601.10", "SP_TOKEN_PRESENT")
.nonNullable()
.withType(ColumnTypeEnum.BOOLEAN)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("HFJ_RESOURCE")
.modifyColumn("20240601.11", "SP_URI_PRESENT")
.nonNullable()
.withType(ColumnTypeEnum.BOOLEAN)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("HFJ_RESOURCE")
.modifyColumn("20240601.12", "RES_VER")
.nonNullable()
.withType(ColumnTypeEnum.LONG)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("TRM_CONCEPT")
.modifyColumn("20240601.13", "CODESYSTEM_PID")
.nonNullable()
.withType(ColumnTypeEnum.LONG)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("BT2_JOB_INSTANCE")
.modifyColumn("20240601.14", "PROGRESS_PCT")
.nonNullable()
.withType(ColumnTypeEnum.DOUBLE)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("BT2_JOB_INSTANCE")
.modifyColumn("20240601.15", "ERROR_COUNT")
.nonNullable()
.withType(ColumnTypeEnum.INT)
.heavyweightSkipByDefault()
.failureAllowed();
version.onTable("HFJ_BINARY_STORAGE_BLOB")
.modifyColumn("20240601.16", "BLOB_SIZE")
.nonNullable()
.withType(ColumnTypeEnum.LONG)
.heavyweightSkipByDefault()
.failureAllowed();
/*
* Add RES_ID to two indexes on HFJ_RES_VER which support history operations.
* This makes server and type level _history work properly on large databases
* on postgres. These are both marked as heavyweightSkipByDefault because the
* necessary reindexing would be very expensive for a rarely used FHIR feature.
*/
version.onTable("HFJ_RES_VER")
.dropIndex("20240601.17", "IDX_RESVER_TYPE_DATE")
.heavyweightSkipByDefault();
version.onTable("HFJ_RES_VER")
.addIndex("20240601.18", "IDX_RESVER_TYPE_DATE")
.unique(false)
.withColumns("RES_TYPE", "RES_UPDATED", "RES_ID")
.heavyweightSkipByDefault();
version.onTable("HFJ_RES_VER")
.dropIndex("20240601.19", "IDX_RESVER_DATE")
.heavyweightSkipByDefault();
version.onTable("HFJ_RES_VER")
.addIndex("20240601.20", "IDX_RESVER_DATE")
.unique(false)
.withColumns("RES_UPDATED", "RES_ID")
.heavyweightSkipByDefault();
// Allow null values in SP_NAME, RES_TYPE columns for all HFJ_SPIDX_* tables. These are marked as failure
// allowed, since SQL Server won't let us change nullability on columns with indexes pointing to them.
{
Builder.BuilderWithTableName spidxCoords = version.onTable("HFJ_SPIDX_COORDS");
spidxCoords
.modifyColumn("20240617.1", "SP_NAME")
.nullable()
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
spidxCoords
.modifyColumn("20240617.2", "RES_TYPE")
.nullable()
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
Builder.BuilderWithTableName spidxDate = version.onTable("HFJ_SPIDX_DATE");
spidxDate
.modifyColumn("20240617.3", "SP_NAME")
.nullable()
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
spidxDate
.modifyColumn("20240617.4", "RES_TYPE")
.nullable()
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
Builder.BuilderWithTableName spidxNumber = version.onTable("HFJ_SPIDX_NUMBER");
spidxNumber
.modifyColumn("20240617.5", "SP_NAME")
.nullable()
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
spidxNumber
.modifyColumn("20240617.6", "RES_TYPE")
.nullable()
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
Builder.BuilderWithTableName spidxQuantity = version.onTable("HFJ_SPIDX_QUANTITY");
spidxQuantity
.modifyColumn("20240617.7", "SP_NAME")
.nullable()
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
spidxQuantity
.modifyColumn("20240617.8", "RES_TYPE")
.nullable()
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
Builder.BuilderWithTableName spidxQuantityNorm = version.onTable("HFJ_SPIDX_QUANTITY_NRML");
spidxQuantityNorm
.modifyColumn("20240617.9", "SP_NAME")
.nullable()
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
spidxQuantityNorm
.modifyColumn("20240617.10", "RES_TYPE")
.nullable()
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
Builder.BuilderWithTableName spidxString = version.onTable("HFJ_SPIDX_STRING");
spidxString
.modifyColumn("20240617.11", "SP_NAME")
.nullable()
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
spidxString
.modifyColumn("20240617.12", "RES_TYPE")
.nullable()
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
Builder.BuilderWithTableName spidxToken = version.onTable("HFJ_SPIDX_TOKEN");
spidxToken
.modifyColumn("20240617.13", "SP_NAME")
.nullable()
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
spidxToken
.modifyColumn("20240617.14", "RES_TYPE")
.nullable()
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
Builder.BuilderWithTableName spidxUri = version.onTable("HFJ_SPIDX_URI");
spidxUri.modifyColumn("20240617.15", "SP_NAME")
.nullable()
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
spidxUri.modifyColumn("20240617.16", "RES_TYPE")
.nullable()
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
{
// Please see https://github.com/hapifhir/hapi-fhir/issues/6033 for why we're doing this
version.onTable("HFJ_RES_SEARCH_URL")
.addColumn("20240618.2", "PARTITION_ID", -1)
.nullable()
.type(ColumnTypeEnum.INT);
version.onTable("HFJ_RES_SEARCH_URL")
.addColumn("20240618.3", "PARTITION_DATE")
.nullable()
.type(ColumnTypeEnum.DATE_ONLY);
version.executeRawSql("20240618.4", "UPDATE HFJ_RES_SEARCH_URL SET PARTITION_ID = -1");
version.onTable("HFJ_RES_SEARCH_URL")
.modifyColumn("20240618.5", "PARTITION_ID")
.nonNullable()
.withType(ColumnTypeEnum.INT);
version.onTable("HFJ_RES_SEARCH_URL").dropPrimaryKey("20240618.6");
version.onTable("HFJ_RES_SEARCH_URL").addPrimaryKey("20240618.7", "RES_SEARCH_URL", "PARTITION_ID");
}
}
{
// Note that these are recreations of a previous migration from 6.6.0. The original migration had these set
// as unique,
// which causes SQL Server to create a filtered index. See
// https://www.sqlshack.com/introduction-to-sql-server-filtered-indexes/
// What this means for hibernate search is that for any column that is nullable, the SQLServerDialect will
// omit the whole row from the index if
// the value of the nullable column is actually null. Removing the uniqueness constraint works around this
// problem.
Builder.BuilderWithTableName uriTable = version.onTable("HFJ_SPIDX_URI");
uriTable.dropIndex("20240620.10", "IDX_SP_URI_HASH_URI_V2");
uriTable.dropIndex("20240620.20", "IDX_SP_URI_HASH_IDENTITY_V2");
uriTable.addIndex("20240620.30", "IDX_SP_URI_HASH_URI_V2")
.unique(false)
.online(true)
.withPossibleNullableColumns(
new ColumnAndNullable("HASH_URI", true),
new ColumnAndNullable("RES_ID", false),
new ColumnAndNullable("PARTITION_ID", true));
uriTable.addIndex("20240620.40", "IDX_SP_URI_HASH_IDENTITY_V2")
.unique(false)
.online(true)
.withPossibleNullableColumns(
new ColumnAndNullable("HASH_IDENTITY", true),
new ColumnAndNullable("SP_URI", true),
new ColumnAndNullable("RES_ID", false),
new ColumnAndNullable("PARTITION_ID", true));
}
/*
* Add hash columns to the combo param index tables
*/
{
version.onTable("HFJ_IDX_CMB_TOK_NU")
.addIndex("20240625.10", "IDX_IDXCMBTOKNU_HASHC")
.unique(false)
.withColumns("HASH_COMPLETE", "RES_ID", "PARTITION_ID");
version.onTable("HFJ_IDX_CMP_STRING_UNIQ")
.addColumn("20240625.20", "HASH_COMPLETE")
.nullable()
.type(ColumnTypeEnum.LONG);
version.onTable("HFJ_IDX_CMP_STRING_UNIQ")
.addColumn("20240625.30", "HASH_COMPLETE_2")
.nullable()
.type(ColumnTypeEnum.LONG);
version.onTable("HFJ_IDX_CMP_STRING_UNIQ")
.addTask(
new CalculateHashesTask(VersionEnum.V7_4_0, "20240625.40") {
@Override
protected boolean shouldSkipTask() {
return false;
}
}.setPidColumnName("PID")
.addCalculator(
"HASH_COMPLETE",
t -> ResourceIndexedComboStringUnique.calculateHashComplete(
t.getString("IDX_STRING")))
.addCalculator(
"HASH_COMPLETE_2",
t -> ResourceIndexedComboStringUnique.calculateHashComplete2(
t.getString("IDX_STRING")))
.setColumnName("HASH_COMPLETE"));
}
}
protected void init720() {
@ -162,15 +467,15 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
binaryStorageBlobTable
.renameColumn("20240404.1", "BLOB_ID", "CONTENT_ID")
.getLastAddedTask()
.ifPresent(BaseTask::doNothing);
.ifPresent(t -> t.addFlag(TaskFlagEnum.DO_NOTHING));
binaryStorageBlobTable
.renameColumn("20240404.2", "BLOB_SIZE", "CONTENT_SIZE")
.getLastAddedTask()
.ifPresent(BaseTask::doNothing);
.ifPresent(t -> t.addFlag(TaskFlagEnum.DO_NOTHING));
binaryStorageBlobTable
.renameColumn("20240404.3", "BLOB_HASH", "CONTENT_HASH")
.getLastAddedTask()
.ifPresent(BaseTask::doNothing);
.ifPresent(t -> t.addFlag(TaskFlagEnum.DO_NOTHING));
binaryStorageBlobTable
.modifyColumn("20240404.4", "BLOB_DATA")
@ -262,7 +567,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
// Move forced_id constraints to hfj_resource and the new fhir_id column
// Note: we leave the HFJ_FORCED_ID.IDX_FORCEDID_TYPE_FID index in place to support old writers for a while.
version.addTask(new ForceIdMigrationCopyTask(version.getRelease(), "20231018.1").setDoNothing(true));
version.addTask(
new ForceIdMigrationCopyTask(version.getRelease(), "20231018.1").addFlag(TaskFlagEnum.DO_NOTHING));
Builder.BuilderWithTableName hfjResource = version.onTable("HFJ_RESOURCE");
// commented out to make numeric space for the fix task below.
@ -331,7 +637,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
}
// This fix was bad for MSSQL, it has been set to do nothing.
version.addTask(new ForceIdMigrationFixTask(version.getRelease(), "20231213.1").setDoNothing(true));
version.addTask(
new ForceIdMigrationFixTask(version.getRelease(), "20231213.1").addFlag(TaskFlagEnum.DO_NOTHING));
// This fix will work for MSSQL or Oracle.
version.addTask(new ForceIdMigrationFixTask(version.getRelease(), "20231222.1"));
@ -814,8 +1121,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
version.onTable("HFJ_RES_VER")
.modifyColumn("20230421.1", "RES_TEXT_VC")
.nullable()
.failureAllowed()
.withType(ColumnTypeEnum.TEXT);
.withType(ColumnTypeEnum.TEXT)
.failureAllowed();
{
// add hash_norm to res_id to speed up joins on a second string.
@ -1751,8 +2058,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
version.onTable("HFJ_RES_LINK")
.modifyColumn("20210505.1", "SRC_PATH")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.STRING, 500);
.withType(ColumnTypeEnum.STRING, 500)
.failureAllowed();
}
private void init530() {
@ -1813,8 +2120,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
quantityTable
.modifyColumn("20210116.1", "SP_VALUE")
.nullable()
.failureAllowed()
.withType(ColumnTypeEnum.DOUBLE);
.withType(ColumnTypeEnum.DOUBLE)
.failureAllowed();
// HFJ_RES_LINK
version.onTable("HFJ_RES_LINK")
@ -2011,8 +2318,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
version.onTable("HFJ_RES_VER")
.modifyColumn("20200220.1", "RES_ID")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.LONG);
.withType(ColumnTypeEnum.LONG)
.failureAllowed();
//
// Drop unused column
@ -2168,38 +2475,38 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
version.onTable("HFJ_SPIDX_STRING")
.modifyColumn("20200420.36", "SP_MISSING")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.BOOLEAN);
.withType(ColumnTypeEnum.BOOLEAN)
.failureAllowed();
version.onTable("HFJ_SPIDX_COORDS")
.modifyColumn("20200420.37", "SP_MISSING")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.BOOLEAN);
.withType(ColumnTypeEnum.BOOLEAN)
.failureAllowed();
version.onTable("HFJ_SPIDX_NUMBER")
.modifyColumn("20200420.38", "SP_MISSING")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.BOOLEAN);
.withType(ColumnTypeEnum.BOOLEAN)
.failureAllowed();
version.onTable("HFJ_SPIDX_TOKEN")
.modifyColumn("20200420.39", "SP_MISSING")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.BOOLEAN);
.withType(ColumnTypeEnum.BOOLEAN)
.failureAllowed();
version.onTable("HFJ_SPIDX_DATE")
.modifyColumn("20200420.40", "SP_MISSING")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.BOOLEAN);
.withType(ColumnTypeEnum.BOOLEAN)
.failureAllowed();
version.onTable("HFJ_SPIDX_URI")
.modifyColumn("20200420.41", "SP_MISSING")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.BOOLEAN);
.withType(ColumnTypeEnum.BOOLEAN)
.failureAllowed();
version.onTable("HFJ_SPIDX_QUANTITY")
.modifyColumn("20200420.42", "SP_MISSING")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.BOOLEAN);
.withType(ColumnTypeEnum.BOOLEAN)
.failureAllowed();
// Add support for integer comparisons during day-precision date search.
Builder.BuilderWithTableName spidxDate = version.onTable("HFJ_SPIDX_DATE");
@ -2309,38 +2616,38 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
version.onTable("HFJ_SPIDX_NUMBER")
.modifyColumn("20190920.1", "RES_ID")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.LONG);
.withType(ColumnTypeEnum.LONG)
.failureAllowed();
version.onTable("HFJ_SPIDX_COORDS")
.modifyColumn("20190920.2", "RES_ID")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.LONG);
.withType(ColumnTypeEnum.LONG)
.failureAllowed();
version.onTable("HFJ_SPIDX_TOKEN")
.modifyColumn("20190920.3", "RES_ID")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.LONG);
.withType(ColumnTypeEnum.LONG)
.failureAllowed();
version.onTable("HFJ_SPIDX_STRING")
.modifyColumn("20190920.4", "RES_ID")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.LONG);
.withType(ColumnTypeEnum.LONG)
.failureAllowed();
version.onTable("HFJ_SPIDX_DATE")
.modifyColumn("20190920.5", "RES_ID")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.LONG);
.withType(ColumnTypeEnum.LONG)
.failureAllowed();
version.onTable("HFJ_SPIDX_QUANTITY")
.modifyColumn("20190920.6", "RES_ID")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.LONG);
.withType(ColumnTypeEnum.LONG)
.failureAllowed();
version.onTable("HFJ_SPIDX_URI")
.modifyColumn("20190920.7", "RES_ID")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.LONG);
.withType(ColumnTypeEnum.LONG)
.failureAllowed();
// HFJ_SEARCH
version.onTable("HFJ_SEARCH")
@ -2469,33 +2776,33 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
version.onTable("HFJ_RESOURCE")
.modifyColumn("20191002.1", "RES_TYPE")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.STRING, 40);
.withType(ColumnTypeEnum.STRING, 40)
.failureAllowed();
version.onTable("HFJ_RES_VER")
.modifyColumn("20191002.2", "RES_TYPE")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.STRING, 40);
.withType(ColumnTypeEnum.STRING, 40)
.failureAllowed();
version.onTable("HFJ_HISTORY_TAG")
.modifyColumn("20191002.3", "RES_TYPE")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.STRING, 40);
.withType(ColumnTypeEnum.STRING, 40)
.failureAllowed();
version.onTable("HFJ_RES_LINK")
.modifyColumn("20191002.4", "SOURCE_RESOURCE_TYPE")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.STRING, 40);
.withType(ColumnTypeEnum.STRING, 40)
.failureAllowed();
version.onTable("HFJ_RES_LINK")
.modifyColumn("20191002.5", "TARGET_RESOURCE_TYPE")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.STRING, 40);
.withType(ColumnTypeEnum.STRING, 40)
.failureAllowed();
version.onTable("HFJ_RES_TAG")
.modifyColumn("20191002.6", "RES_TYPE")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.STRING, 40);
.withType(ColumnTypeEnum.STRING, 40)
.failureAllowed();
// TermConceptDesignation
version.startSectionWithMessage("Processing table: TRM_CONCEPT_DESIG");
@ -2765,18 +3072,18 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
version.onTable("HFJ_SPIDX_COORDS")
.modifyColumn("20190814.9", "RES_TYPE")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.STRING, 100);
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
version.onTable("HFJ_SPIDX_DATE")
.modifyColumn("20190814.10", "RES_TYPE")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.STRING, 100);
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
version.onTable("HFJ_SPIDX_STRING")
.modifyColumn("20190814.11", "RES_TYPE")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.STRING, 100);
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
version.onTable("HFJ_SPIDX_STRING")
.addColumn("20190814.12", "HASH_IDENTITY")
.nullable()
@ -2788,50 +3095,50 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
version.onTable("HFJ_SPIDX_COORDS")
.modifyColumn("20190814.14", "RES_TYPE")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.STRING, 100);
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
version.onTable("HFJ_SPIDX_QUANTITY")
.modifyColumn("20190814.15", "RES_TYPE")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.STRING, 100);
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
version.onTable("HFJ_SPIDX_QUANTITY").dropColumn("20190814.16", "HASH_UNITS_AND_VALPREFIX");
version.onTable("HFJ_SPIDX_QUANTITY").dropColumn("20190814.17", "HASH_VALPREFIX");
version.onTable("HFJ_SPIDX_NUMBER")
.modifyColumn("20190814.18", "RES_TYPE")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.STRING, 100);
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
version.onTable("HFJ_SPIDX_TOKEN")
.modifyColumn("20190814.19", "RES_TYPE")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.STRING, 100);
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
version.onTable("HFJ_SPIDX_URI")
.modifyColumn("20190814.20", "RES_TYPE")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.STRING, 100);
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
version.onTable("HFJ_SPIDX_URI")
.modifyColumn("20190814.21", "SP_URI")
.nullable()
.failureAllowed()
.withType(ColumnTypeEnum.STRING, 254);
.withType(ColumnTypeEnum.STRING, 254)
.failureAllowed();
version.onTable("TRM_CODESYSTEM")
.modifyColumn("20190814.22", "CODE_SYSTEM_URI")
.nonNullable()
.failureAllowed()
.withType(ColumnTypeEnum.STRING, 200);
.withType(ColumnTypeEnum.STRING, 200)
.failureAllowed();
version.onTable("TRM_CODESYSTEM")
.modifyColumn("20190814.23", "CS_NAME")
.nullable()
.failureAllowed()
.withType(ColumnTypeEnum.STRING, 200);
.withType(ColumnTypeEnum.STRING, 200)
.failureAllowed();
version.onTable("TRM_CODESYSTEM_VER")
.modifyColumn("20190814.24", "CS_VERSION_ID")
.nullable()
.failureAllowed()
.withType(ColumnTypeEnum.STRING, 200);
.withType(ColumnTypeEnum.STRING, 200)
.failureAllowed();
}
private void init360() { // 20180918 - 20181112

View File

@ -0,0 +1,73 @@
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.model.search;
import ca.uhn.fhir.jpa.dao.search.ExtendedHSearchClauseBuilder;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
/**
* This is a parameter class for the
* {@link ca.uhn.fhir.jpa.dao.search.ExtendedHSearchSearchBuilder#addAndConsumeAdvancedQueryClauses(ExtendedHSearchClauseBuilder, ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams)}
* method, so that we can keep the signature manageable (small) and allow for updates without breaking
* implementers so often.
*/
public class ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams {
/**
* Resource type
*/
private String myResourceType;
/**
* The registered search
*/
private SearchParameterMap mySearchParameterMap;
/**
* Search param registry
*/
private ISearchParamRegistry mySearchParamRegistry;
public String getResourceType() {
return myResourceType;
}
public ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams setResourceType(String theResourceType) {
myResourceType = theResourceType;
return this;
}
public SearchParameterMap getSearchParameterMap() {
return mySearchParameterMap;
}
public ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams setSearchParameterMap(SearchParameterMap theParams) {
mySearchParameterMap = theParams;
return this;
}
public ISearchParamRegistry getSearchParamRegistry() {
return mySearchParamRegistry;
}
public ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams setSearchParamRegistry(
ISearchParamRegistry theSearchParamRegistry) {
mySearchParamRegistry = theSearchParamRegistry;
return this;
}
}

View File

@ -79,7 +79,7 @@ public class PersistedJpaSearchFirstPageBundleProvider extends PersistedJpaBundl
ourLog.trace("Done fetching search resource PIDs");
int countOfPids = pids.size();
;
int maxSize = Math.min(theToIndex - theFromIndex, countOfPids);
thePageBuilder.setTotalRequestedResourcesFetched(countOfPids);

View File

@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.search;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.jpa.dao.data.IResourceSearchUrlDao;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.ResourceSearchUrlEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
@ -51,16 +52,19 @@ public class ResourceSearchUrlSvc {
private final MatchUrlService myMatchUrlService;
private final FhirContext myFhirContext;
private final PartitionSettings myPartitionSettings;
public ResourceSearchUrlSvc(
EntityManager theEntityManager,
IResourceSearchUrlDao theResourceSearchUrlDao,
MatchUrlService theMatchUrlService,
FhirContext theFhirContext) {
FhirContext theFhirContext,
PartitionSettings thePartitionSettings) {
myEntityManager = theEntityManager;
myResourceSearchUrlDao = theResourceSearchUrlDao;
myMatchUrlService = theMatchUrlService;
myFhirContext = theFhirContext;
myPartitionSettings = thePartitionSettings;
}
/**
@ -87,8 +91,10 @@ public class ResourceSearchUrlSvc {
String theResourceName, String theMatchUrl, ResourceTable theResourceTable) {
String canonicalizedUrlForStorage = createCanonicalizedUrlForStorage(theResourceName, theMatchUrl);
ResourceSearchUrlEntity searchUrlEntity =
ResourceSearchUrlEntity.from(canonicalizedUrlForStorage, theResourceTable);
ResourceSearchUrlEntity searchUrlEntity = ResourceSearchUrlEntity.from(
canonicalizedUrlForStorage,
theResourceTable,
myPartitionSettings.isConditionalCreateDuplicateIdentifiersEnabled());
// calling dao.save performs a merge operation which implies a trip to
// the database to see if the resource exists. Since we don't need the check, we avoid the trip by calling
// em.persist.

View File

@ -74,6 +74,7 @@ import ca.uhn.fhir.jpa.util.SqlQueryList;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.model.api.Include;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
@ -82,6 +83,7 @@ import ca.uhn.fhir.rest.api.SortOrderEnum;
import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.DateParam;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.ParameterUtil;
import ca.uhn.fhir.rest.param.ReferenceParam;
@ -95,6 +97,7 @@ import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.StringUtil;
import ca.uhn.fhir.util.UrlUtil;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Streams;
import com.healthmarketscience.sqlbuilder.Condition;
import jakarta.annotation.Nonnull;
@ -165,7 +168,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
public static boolean myUseMaxPageSize50ForTest = false;
protected final IInterceptorBroadcaster myInterceptorBroadcaster;
protected final IResourceTagDao myResourceTagDao;
String myResourceName;
private String myResourceName;
private final Class<? extends IBaseResource> myResourceType;
private final HapiFhirLocalContainerEntityManagerFactoryBean myEntityManagerFactory;
private final SqlObjectFactory mySqlBuilderFactory;
@ -206,6 +209,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
/**
* Constructor
*/
@SuppressWarnings({"rawtypes", "unchecked"})
public SearchBuilder(
IDao theDao,
String theResourceName,
@ -240,6 +244,11 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
myIdHelperService = theIdHelperService;
}
@VisibleForTesting
void setResourceName(String theName) {
myResourceName = theName;
}
@Override
public void setMaxResultsToFetch(Integer theMaxResultsToFetch) {
myMaxResultsToFetch = theMaxResultsToFetch;
@ -265,8 +274,6 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
attemptComboUniqueSpProcessing(theQueryStack, theParams, theRequest);
}
SearchContainedModeEnum searchContainedMode = theParams.getSearchContainedMode();
// Handle _id and _tag last, since they can typically be tacked onto a different parameter
List<String> paramNames = myParams.keySet().stream()
.filter(t -> !t.equals(IAnyResource.SP_RES_ID))
@ -399,7 +406,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
}
if (fulltextExecutor == null) {
fulltextExecutor = SearchQueryExecutors.from(fulltextMatchIds);
fulltextExecutor =
SearchQueryExecutors.from(fulltextMatchIds != null ? fulltextMatchIds : new ArrayList<>());
}
if (theSearchRuntimeDetails != null) {
@ -486,7 +494,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
return fulltextEnabled
&& myParams != null
&& myParams.getSearchContainedMode() == SearchContainedModeEnum.FALSE
&& myFulltextSearchSvc.supportsSomeOf(myParams)
&& myFulltextSearchSvc.canUseHibernateSearch(myResourceName, myParams)
&& myFulltextSearchSvc.supportsAllSortTerms(myResourceName, myParams);
}
@ -538,8 +546,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
pid = myIdHelperService.resolveResourcePersistentIds(myRequestPartitionId, myResourceName, idParamValue);
}
List<JpaPid> pids = myFulltextSearchSvc.everything(myResourceName, myParams, pid, theRequestDetails);
return pids;
return myFulltextSearchSvc.everything(myResourceName, myParams, pid, theRequestDetails);
}
private void doCreateChunkedQueries(
@ -862,13 +869,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
theQueryStack.addSortOnLastUpdated(ascending);
} else {
RuntimeSearchParam param = null;
if (param == null) {
// do we have a composition param defined for the whole chain?
param = mySearchParamRegistry.getActiveSearchParam(myResourceName, theSort.getParamName());
}
RuntimeSearchParam param =
mySearchParamRegistry.getActiveSearchParam(myResourceName, theSort.getParamName());
/*
* If we have a sort like _sort=subject.name and we have an
@ -896,9 +898,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
mySearchParamRegistry.getActiveSearchParam(myResourceName, referenceParam);
if (outerParam == null) {
throwInvalidRequestExceptionForUnknownSortParameter(myResourceName, referenceParam);
}
if (outerParam.hasUpliftRefchain(targetParam)) {
} else if (outerParam.hasUpliftRefchain(targetParam)) {
for (String nextTargetType : outerParam.getTargets()) {
if (referenceParamTargetType != null && !referenceParamTargetType.equals(nextTargetType)) {
continue;
@ -945,6 +945,9 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
throwInvalidRequestExceptionForUnknownSortParameter(getResourceName(), paramName);
}
// param will never be null here (the above line throws if it does)
// this is just to prevent the warning
assert param != null;
if (isNotBlank(chainName) && param.getParamType() != RestSearchParameterTypeEnum.REFERENCE) {
throw new InvalidRequestException(
Msg.code(2285) + "Invalid chain, " + paramName + " is not a reference SearchParameter");
@ -1121,11 +1124,15 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
resourceType, next, tagMap.get(next.getId()), theForHistoryOperation);
}
if (resource == null) {
ourLog.warn(
"Unable to find resource {}/{}/_history/{} in database",
next.getResourceType(),
next.getIdDt().getIdPart(),
next.getVersion());
if (next != null) {
ourLog.warn(
"Unable to find resource {}/{}/_history/{} in database",
next.getResourceType(),
next.getIdDt().getIdPart(),
next.getVersion());
} else {
ourLog.warn("Unable to find resource in database.");
}
continue;
}
@ -1196,7 +1203,6 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
RequestDetails theDetails) {
if (thePids.isEmpty()) {
ourLog.debug("The include pids are empty");
// return;
}
// Dupes will cause a crash later anyhow, but this is expensive so only do it
@ -1256,10 +1262,9 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
// only impl
// to handle lastN?
if (myStorageSettings.isAdvancedHSearchIndexing() && myStorageSettings.isStoreResourceInHSearchIndex()) {
List<Long> pidList = thePids.stream().map(pid -> (pid).getId()).collect(Collectors.toList());
List<Long> pidList = thePids.stream().map(JpaPid::getId).collect(Collectors.toList());
List<IBaseResource> resources = myFulltextSearchSvc.getResources(pidList);
return resources;
return myFulltextSearchSvc.getResources(pidList);
} else if (!Objects.isNull(myParams) && myParams.isLastN()) {
// legacy LastN implementation
return myIElasticsearchSvc.getObservationResources(thePids);
@ -1344,7 +1349,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
for (Iterator<Include> iter = includes.iterator(); iter.hasNext(); ) {
Include nextInclude = iter.next();
if (nextInclude.isRecurse() == false) {
if (!nextInclude.isRecurse()) {
iter.remove();
}
@ -1707,6 +1712,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
}
/**
* Calls Performance Trace Hook
* @param request the request deatils
* Sends a raw SQL query to the Pointcut for raw SQL queries.
*/
private void callRawSqlHookWithCurrentThreadQueries(RequestDetails request) {
@ -1874,12 +1881,12 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
}
private void attemptComboUniqueSpProcessing(
QueryStack theQueryStack3, @Nonnull SearchParameterMap theParams, RequestDetails theRequest) {
QueryStack theQueryStack, @Nonnull SearchParameterMap theParams, RequestDetails theRequest) {
RuntimeSearchParam comboParam = null;
List<String> comboParamNames = null;
List<RuntimeSearchParam> exactMatchParams =
mySearchParamRegistry.getActiveComboSearchParams(myResourceName, theParams.keySet());
if (exactMatchParams.size() > 0) {
if (!exactMatchParams.isEmpty()) {
comboParam = exactMatchParams.get(0);
comboParamNames = new ArrayList<>(theParams.keySet());
}
@ -1890,7 +1897,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
for (RuntimeSearchParam nextCandidate : candidateComboParams) {
List<String> nextCandidateParamNames =
JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, nextCandidate).stream()
.map(t -> t.getName())
.map(RuntimeSearchParam::getName)
.collect(Collectors.toList());
if (theParams.keySet().containsAll(nextCandidateParamNames)) {
comboParam = nextCandidate;
@ -1901,98 +1908,138 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
}
if (comboParam != null) {
// Since we're going to remove elements below
theParams.values().forEach(nextAndList -> ensureSubListsAreWritable(nextAndList));
StringBuilder sb = new StringBuilder();
sb.append(myResourceName);
sb.append("?");
boolean first = true;
Collections.sort(comboParamNames);
for (String nextParamName : comboParamNames) {
List<List<IQueryParameterType>> nextValues = theParams.get(nextParamName);
// TODO Hack to fix weird IOOB on the next stanza until James comes back and makes sense of this.
if (nextValues.isEmpty()) {
ourLog.error(
"query parameter {} is unexpectedly empty. Encountered while considering {} index for {}",
nextParamName,
comboParam.getName(),
theRequest.getCompleteUrl());
sb = null;
break;
}
if (nextValues.get(0).size() != 1) {
sb = null;
break;
}
// Reference params are only eligible for using a composite index if they
// are qualified
RuntimeSearchParam nextParamDef =
mySearchParamRegistry.getActiveSearchParam(myResourceName, nextParamName);
if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.REFERENCE) {
ReferenceParam param = (ReferenceParam) nextValues.get(0).get(0);
if (isBlank(param.getResourceType())) {
sb = null;
break;
}
}
List<? extends IQueryParameterType> nextAnd = nextValues.remove(0);
IQueryParameterType nextOr = nextAnd.remove(0);
String nextOrValue = nextOr.getValueAsQueryToken(myContext);
if (comboParam.getComboSearchParamType() == ComboSearchParamType.NON_UNIQUE) {
if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.STRING) {
nextOrValue = StringUtil.normalizeStringForSearchIndexing(nextOrValue);
}
}
if (first) {
first = false;
} else {
sb.append('&');
}
nextParamName = UrlUtil.escapeUrlParam(nextParamName);
nextOrValue = UrlUtil.escapeUrlParam(nextOrValue);
sb.append(nextParamName).append('=').append(nextOrValue);
if (!validateParamValuesAreValidForComboParam(theParams, comboParamNames)) {
return;
}
if (sb != null) {
String indexString = sb.toString();
ourLog.debug(
"Checking for {} combo index for query: {}", comboParam.getComboSearchParamType(), indexString);
applyComboSearchParam(theQueryStack, theParams, theRequest, comboParamNames, comboParam);
}
}
// Interceptor broadcast: JPA_PERFTRACE_INFO
StorageProcessingMessage msg = new StorageProcessingMessage()
.setMessage("Using " + comboParam.getComboSearchParamType() + " index for query for search: "
+ indexString);
HookParams params = new HookParams()
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
.add(StorageProcessingMessage.class, msg);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, theRequest, Pointcut.JPA_PERFTRACE_INFO, params);
private void applyComboSearchParam(
QueryStack theQueryStack,
@Nonnull SearchParameterMap theParams,
RequestDetails theRequest,
List<String> theComboParamNames,
RuntimeSearchParam theComboParam) {
// Since we're going to remove elements below
theParams.values().forEach(this::ensureSubListsAreWritable);
switch (comboParam.getComboSearchParamType()) {
case UNIQUE:
theQueryStack3.addPredicateCompositeUnique(indexString, myRequestPartitionId);
break;
case NON_UNIQUE:
theQueryStack3.addPredicateCompositeNonUnique(indexString, myRequestPartitionId);
break;
StringBuilder theSearchBuilder = new StringBuilder();
theSearchBuilder.append(myResourceName);
theSearchBuilder.append("?");
boolean first = true;
for (String nextParamName : theComboParamNames) {
List<List<IQueryParameterType>> nextValues = theParams.get(nextParamName);
// This should never happen, but this safety check was added along the way and
// presumably must save us in some specific race condition. I am preserving it
// in a refactor of this code base. 20240429
if (nextValues.isEmpty()) {
ourLog.error(
"query parameter {} is unexpectedly empty. Encountered while considering {} index for {}",
nextParamName,
theComboParam.getName(),
theRequest.getCompleteUrl());
continue;
}
List<? extends IQueryParameterType> nextAnd = nextValues.remove(0);
IQueryParameterType nextOr = nextAnd.remove(0);
String nextOrValue = nextOr.getValueAsQueryToken(myContext);
RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam(myResourceName, nextParamName);
if (theComboParam.getComboSearchParamType() == ComboSearchParamType.NON_UNIQUE) {
if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.STRING) {
nextOrValue = StringUtil.normalizeStringForSearchIndexing(nextOrValue);
}
}
// Remove any empty parameters remaining after this
theParams.clean();
if (first) {
first = false;
} else {
theSearchBuilder.append('&');
}
nextParamName = UrlUtil.escapeUrlParam(nextParamName);
nextOrValue = UrlUtil.escapeUrlParam(nextOrValue);
theSearchBuilder.append(nextParamName).append('=').append(nextOrValue);
}
if (theSearchBuilder != null) {
String indexString = theSearchBuilder.toString();
ourLog.debug(
"Checking for {} combo index for query: {}", theComboParam.getComboSearchParamType(), indexString);
// Interceptor broadcast: JPA_PERFTRACE_INFO
StorageProcessingMessage msg = new StorageProcessingMessage()
.setMessage("Using " + theComboParam.getComboSearchParamType() + " index for query for search: "
+ indexString);
HookParams params = new HookParams()
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
.add(StorageProcessingMessage.class, msg);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, theRequest, Pointcut.JPA_PERFTRACE_INFO, params);
switch (theComboParam.getComboSearchParamType()) {
case UNIQUE:
theQueryStack.addPredicateCompositeUnique(indexString, myRequestPartitionId);
break;
case NON_UNIQUE:
theQueryStack.addPredicateCompositeNonUnique(indexString, myRequestPartitionId);
break;
}
// Remove any empty parameters remaining after this
theParams.clean();
}
}
private boolean validateParamValuesAreValidForComboParam(
@Nonnull SearchParameterMap theParams, List<String> comboParamNames) {
boolean paramValuesAreValidForCombo = true;
for (String nextParamName : comboParamNames) {
List<List<IQueryParameterType>> nextValues = theParams.get(nextParamName);
// Multiple AND parameters are not supported for unique combo params
if (nextValues.get(0).size() != 1) {
ourLog.debug(
"Search is not a candidate for unique combo searching - Multiple AND expressions found for the same parameter");
paramValuesAreValidForCombo = false;
break;
}
List<IQueryParameterType> nextAndValue = nextValues.get(0);
for (IQueryParameterType nextOrValue : nextAndValue) {
if (nextOrValue instanceof DateParam) {
if (((DateParam) nextOrValue).getPrecision() != TemporalPrecisionEnum.DAY) {
ourLog.debug(
"Search is not a candidate for unique combo searching - Date search with non-DAY precision");
paramValuesAreValidForCombo = false;
break;
}
}
}
// Reference params are only eligible for using a composite index if they
// are qualified
RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam(myResourceName, nextParamName);
if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.REFERENCE) {
ReferenceParam param = (ReferenceParam) nextValues.get(0).get(0);
if (isBlank(param.getResourceType())) {
ourLog.debug(
"Search is not a candidate for unique combo searching - Reference with no type specified");
paramValuesAreValidForCombo = false;
break;
}
}
}
return paramValuesAreValidForCombo;
}
private <T> void ensureSubListsAreWritable(List<List<T>> theListOfLists) {

View File

@ -98,10 +98,19 @@ public abstract class BaseSearchParamPredicateBuilder extends BaseJoiningPredica
public Condition createPredicateParamMissingForNonReference(
String theResourceName, String theParamName, Boolean theMissing, RequestPartitionId theRequestPartitionId) {
ComboCondition condition = ComboCondition.and(
BinaryCondition.equalTo(getResourceTypeColumn(), generatePlaceholder(theResourceName)),
BinaryCondition.equalTo(getColumnParamName(), generatePlaceholder(theParamName)),
BinaryCondition.equalTo(getMissingColumn(), generatePlaceholder(theMissing)));
List<Condition> conditions = new ArrayList<>();
if (getStorageSettings().isIndexStorageOptimized()) {
Long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity(
getPartitionSettings(), getRequestPartitionId(), theResourceName, theParamName);
conditions.add(BinaryCondition.equalTo(getColumnHashIdentity(), generatePlaceholder(hashIdentity)));
} else {
conditions.add(BinaryCondition.equalTo(getResourceTypeColumn(), generatePlaceholder(theResourceName)));
conditions.add(BinaryCondition.equalTo(getColumnParamName(), generatePlaceholder(theParamName)));
}
conditions.add(BinaryCondition.equalTo(getMissingColumn(), generatePlaceholder(theMissing)));
ComboCondition condition = ComboCondition.and(conditions.toArray());
return combineWithRequestPartitionIdPredicate(theRequestPartitionId, condition);
}

View File

@ -20,6 +20,8 @@
package ca.uhn.fhir.jpa.search.builder.predicate;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboTokenNonUnique;
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
import com.healthmarketscience.sqlbuilder.BinaryCondition;
import com.healthmarketscience.sqlbuilder.Condition;
@ -27,7 +29,7 @@ import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn;
public class ComboNonUniqueSearchParameterPredicateBuilder extends BaseSearchParamPredicateBuilder {
private final DbColumn myColumnIndexString;
private final DbColumn myColumnHashComplete;
/**
* Constructor
@ -35,11 +37,15 @@ public class ComboNonUniqueSearchParameterPredicateBuilder extends BaseSearchPar
public ComboNonUniqueSearchParameterPredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) {
super(theSearchSqlBuilder, theSearchSqlBuilder.addTable("HFJ_IDX_CMB_TOK_NU"));
myColumnIndexString = getTable().addColumn("IDX_STRING");
myColumnHashComplete = getTable().addColumn("HASH_COMPLETE");
}
public Condition createPredicateHashComplete(RequestPartitionId theRequestPartitionId, String theIndexString) {
BinaryCondition predicate = BinaryCondition.equalTo(myColumnIndexString, generatePlaceholder(theIndexString));
PartitionablePartitionId partitionId =
PartitionablePartitionId.toStoragePartition(theRequestPartitionId, getPartitionSettings());
long hash = ResourceIndexedComboTokenNonUnique.calculateHashComplete(
getPartitionSettings(), partitionId, theIndexString);
BinaryCondition predicate = BinaryCondition.equalTo(myColumnHashComplete, generatePlaceholder(hash));
return combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate);
}
}

View File

@ -370,7 +370,8 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder im
.collect(Collectors.joining(" or ")));
} else {
builder.append("If you know what you're looking for, try qualifying it using the form: '");
builder.append(theParamName).append(":[resourceType]");
builder.append(theParamName).append(":[resourceType]=[id] or ");
builder.append(theParamName).append("=[resourceType]/[id]");
builder.append("'");
}
String message = builder.toString();

View File

@ -524,8 +524,12 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService {
String theParamTypeName) {
Parameters.ParametersParameterComponent retVal =
super.addIndexValue(theAction, theParent, theParam, theParamTypeName);
retVal.addPart().setName("Latitude").setValue(new DecimalType(theParam.getLatitude()));
retVal.addPart().setName("Longitude").setValue(new DecimalType(theParam.getLongitude()));
if (theParam.getLatitude() != null) {
retVal.addPart().setName("Latitude").setValue(new DecimalType(theParam.getLatitude()));
}
if (theParam.getLongitude() != null) {
retVal.addPart().setName("Longitude").setValue(new DecimalType(theParam.getLongitude()));
}
return retVal;
}
}

View File

@ -50,8 +50,10 @@ import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.util.ObjectUtil;
import ca.uhn.fhir.util.UrlUtil;
import ca.uhn.fhir.util.ValidateUtil;
import jakarta.annotation.Nonnull;
import jakarta.persistence.EntityManager;
@ -294,6 +296,8 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
theResourceEntity.getIdDt().getValue(),
theCodeSystem.getContentElement().getValueAsString());
detectDuplicatesInCodeSystem(theCodeSystem);
Long pid = (Long) theCodeSystem.getUserData(RESOURCE_PID_KEY);
assert pid != null;
JpaPid codeSystemResourcePid = JpaPid.fromId(pid);
@ -339,6 +343,30 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
}
}
private static void detectDuplicatesInCodeSystem(CodeSystem theCodeSystem) {
detectDuplicatesInCodeSystem(theCodeSystem.getConcept(), new HashSet<>());
}
private static void detectDuplicatesInCodeSystem(
List<CodeSystem.ConceptDefinitionComponent> theCodeList, Set<String> theFoundCodesBuffer) {
for (var next : theCodeList) {
if (isNotBlank(next.getCode())) {
if (!theFoundCodesBuffer.add(next.getCode())) {
/*
* Note: We could possibly modify this behaviour to be forgiving, and just
* ignore duplicates. The only issue is that concepts can have properties,
* designations, etc. and it could be dangerous to just pick one and ignore the
* other. So the safer thing seems to be to just throw an error.
*/
throw new PreconditionFailedException(Msg.code(2528) + "Duplicate concept detected in CodeSystem: "
+ UrlUtil.sanitizeUrlPart(next.getCode()));
}
}
// Test child concepts within the parent concept
detectDuplicatesInCodeSystem(next.getConcept(), theFoundCodesBuffer);
}
}
@Override
@Transactional
public IIdType storeNewCodeSystemVersion(

View File

@ -1,5 +1,6 @@
package ca.uhn.fhir.jpa.dao.index;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndex;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamNumber;
@ -61,6 +62,7 @@ public class DaoSearchParamSynchronizerTest {
THE_SEARCH_PARAM_NUMBER.setResource(resourceTable);
subject.setEntityManager(entityManager);
subject.setStorageSettings(new JpaStorageSettings());
}
@Test

View File

@ -39,7 +39,7 @@ class SearchBuilderTest {
@BeforeEach
public void beforeEach() {
mySearchBuilder.myResourceName = "QuestionnaireResponse";
mySearchBuilder.setResourceName("QuestionnaireResponse");
when(myDaoRegistry.getRegisteredDaoTypes()).thenReturn(ourCtx.getResourceTypes());
}

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version>
<version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -26,6 +26,10 @@ import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.search.BaseSourceSearchParameterTestCases;
import ca.uhn.fhir.jpa.search.CompositeSearchParameterTestCases;
import ca.uhn.fhir.jpa.search.QuantitySearchParameterTestCases;
import ca.uhn.fhir.jpa.search.builder.SearchBuilder;
import ca.uhn.fhir.jpa.search.lastn.ElasticsearchRestClientFactory;
import ca.uhn.fhir.jpa.search.lastn.ElasticsearchSvcImpl;
import ca.uhn.fhir.jpa.search.lastn.json.ObservationJson;
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
@ -41,6 +45,7 @@ import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.SpecialParam;
import ca.uhn.fhir.rest.param.StringOrListParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenParam;
@ -54,8 +59,18 @@ import ca.uhn.fhir.test.utilities.LogbackLevelOverrideExtension;
import ca.uhn.fhir.test.utilities.docker.RequiresDocker;
import ca.uhn.fhir.validation.FhirValidator;
import ca.uhn.fhir.validation.ValidationResult;
import ca.uhn.test.util.LogbackTestExtension;
import ch.qos.logback.classic.Level;
import ch.qos.logback.classic.spi.ILoggingEvent;
import co.elastic.clients.elasticsearch.ElasticsearchClient;
import co.elastic.clients.elasticsearch.core.SearchRequest;
import co.elastic.clients.elasticsearch.core.SearchResponse;
import com.fasterxml.jackson.databind.node.ObjectNode;
import jakarta.annotation.Nonnull;
import jakarta.json.JsonValue;
import jakarta.persistence.EntityManager;
import org.apache.commons.lang3.RandomStringUtils;
import org.elasticsearch.client.RequestOptions;
import org.hl7.fhir.instance.model.api.IBaseCoding;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
@ -103,6 +118,7 @@ import org.springframework.test.context.support.DirtiesContextTestExecutionListe
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.web.util.UriComponents;
import org.springframework.web.util.UriComponentsBuilder;
import org.testcontainers.elasticsearch.ElasticsearchContainer;
import java.io.IOException;
import java.net.URLEncoder;
@ -118,6 +134,7 @@ import static ca.uhn.fhir.jpa.model.util.UcumServiceUtil.UCUM_CODESYSTEM_URL;
import static ca.uhn.fhir.rest.api.Constants.CHARSET_UTF8;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
@ -168,6 +185,9 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
TestDaoSearch myTestDaoSearch;
@RegisterExtension
LogbackLevelOverrideExtension myLogbackLevelOverrideExtension = new LogbackLevelOverrideExtension();
@RegisterExtension
LogbackTestExtension myLogbackTestExtension = new LogbackTestExtension();
@Autowired
@Qualifier("myCodeSystemDaoR4")
private IFhirResourceDao<CodeSystem> myCodeSystemDao;
@ -203,9 +223,13 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
private IFhirResourceDao<QuestionnaireResponse> myQuestionnaireResponseDao;
@Autowired
private TestHSearchEventDispatcher myHSearchEventDispatcher;
@Autowired
ElasticsearchContainer myElasticsearchContainer;
@Mock
private IHSearchEventListener mySearchEventListener;
@Autowired
private ElasticsearchSvcImpl myElasticsearchSvc;
@BeforeEach
@ -294,7 +318,7 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
}
@Test
public void testResourceTextSearch() {
public void testResourceContentSearch() {
Observation obs1 = new Observation();
obs1.getCode().setText("Systolic Blood Pressure");
@ -311,15 +335,182 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
SearchParameterMap map;
map = new SearchParameterMap();
map.add(ca.uhn.fhir.rest.api.Constants.PARAM_CONTENT, new StringParam("systolic"));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(map))).containsExactlyInAnyOrder(toValues(id1));
{ //Content works as String Param
map = new SearchParameterMap();
map.add(ca.uhn.fhir.rest.api.Constants.PARAM_CONTENT, new StringParam("systolic"));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(map))).containsExactlyInAnyOrder(toValues(id1));
map = new SearchParameterMap();
map.add(Constants.PARAM_CONTENT, new StringParam("blood"));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(map))).containsExactlyInAnyOrder(toValues(id1, id2));
map = new SearchParameterMap();
map.add(Constants.PARAM_CONTENT, new StringParam("blood"));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(map))).containsExactlyInAnyOrder(toValues(id1, id2));
}
{ //_content works as Special Param
map = new SearchParameterMap();
map.add(ca.uhn.fhir.rest.api.Constants.PARAM_CONTENT, new SpecialParam().setValue("systolic"));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(map))).containsExactlyInAnyOrder(toValues(id1));
map = new SearchParameterMap();
map.add(Constants.PARAM_CONTENT, new SpecialParam().setValue("blood"));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(map))).containsExactlyInAnyOrder(toValues(id1, id2));
}
}
@Test
public void testResourceTextSearch() {
Observation obs1 = new Observation();
obs1.getCode().setText("Systolic Blood Pressure");
obs1.setStatus(Observation.ObservationStatus.FINAL);
obs1.setValue(new Quantity(123));
obs1.getNoteFirstRep().setText("obs1");
obs1.getText().setDivAsString("systolic blood pressure");
obs1.getText().setStatus(Narrative.NarrativeStatus.ADDITIONAL);
IIdType id1 = myObservationDao.create(obs1, mySrd).getId().toUnqualifiedVersionless();
Observation obs2 = new Observation();
obs2.getCode().setText("Diastolic Blood Pressure");
obs2.setStatus(Observation.ObservationStatus.FINAL);
obs2.setValue(new Quantity(81));
obs2.getText().setDivAsString("diastolic blood pressure");
obs2.getText().setStatus(Narrative.NarrativeStatus.ADDITIONAL);
IIdType id2 = myObservationDao.create(obs2, mySrd).getId().toUnqualifiedVersionless();
SearchParameterMap map;
{ //_text works as a string param
map = new SearchParameterMap();
map.add(Constants.PARAM_TEXT, new StringParam("systolic"));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(map))).containsExactlyInAnyOrder(toValues(id1));
map = new SearchParameterMap();
map.add(Constants.PARAM_TEXT, new StringParam("blood"));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(map))).containsExactlyInAnyOrder(toValues(id1, id2));
}
{ //_text works as a special param
map = new SearchParameterMap();
map.add(Constants.PARAM_TEXT, new SpecialParam().setValue("systolic"));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(map))).containsExactlyInAnyOrder(toValues(id1));
map = new SearchParameterMap();
map.add(Constants.PARAM_TEXT, new SpecialParam().setValue("blood"));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(map))).containsExactlyInAnyOrder(toValues(id1, id2));
}
}
@Test
public void testTextContainsFunctionality() {
String slug = "my-special-@char!";
Observation obs1 = new Observation();
obs1.getCode().setText("Systolic Blood Pressure");
obs1.setStatus(Observation.ObservationStatus.FINAL);
obs1.setValue(new Quantity(123));
obs1.getNoteFirstRep().setText("obs1");
obs1.getText().setDivAsString(slug);
obs1.getText().setStatus(Narrative.NarrativeStatus.ADDITIONAL);
IIdType id1 = myObservationDao.create(obs1, mySrd).getId().toUnqualifiedVersionless();
Observation obs2 = new Observation();
obs2.getCode().setText("Diastolic Blood Pressure");
obs2.setStatus(Observation.ObservationStatus.FINAL);
obs2.setValue(new Quantity(81));
obs2.getText().setDivAsString("diastolic blood pressure");
obs2.getText().setStatus(Narrative.NarrativeStatus.ADDITIONAL);
myObservationDao.create(obs2, mySrd).getId().toUnqualifiedVersionless();
SearchParameterMap map;
{ //_text
//With :contains
map = new SearchParameterMap();
map.add(Constants.PARAM_TEXT, new SpecialParam().setValue(slug).setContains(true));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(map))).containsExactlyInAnyOrder(toValues(id1));
//Without :contains
map = new SearchParameterMap();
map.add(Constants.PARAM_TEXT, new SpecialParam().setValue(slug));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(map))).isEmpty();
}
}
@Test
public void testLudicrouslyLongNarrative() throws IOException {
String slug = "myveryveryveryveryveryveryveryveryveryeryveryveryveryveryveryveryveryveryeryveryveryveryveryveryveryveryveryeryveryveryveryveryveryveryveryveryeryveryveryveryveryveryveryveryverylongemailaddress@hotmail.com";
Observation obs1 = new Observation();
obs1.getCode().setText("Systolic Blood Pressure");
obs1.setStatus(Observation.ObservationStatus.FINAL);
obs1.setValue(new Quantity(123));
obs1.getNoteFirstRep().setText("obs1");
obs1.getText().setDivAsString(get15000CharacterNarrativeIncludingSlugAtStart(slug));
obs1.getText().setStatus(Narrative.NarrativeStatus.ADDITIONAL);
IIdType id1 = myObservationDao.create(obs1, mySrd).getId().toUnqualifiedVersionless();
Observation obs2 = new Observation();
obs2.getCode().setText("Diastolic Blood Pressure");
obs2.setStatus(Observation.ObservationStatus.FINAL);
obs2.setValue(new Quantity(81));
obs2.getText().setDivAsString("diastolic blood pressure");
obs2.getText().setStatus(Narrative.NarrativeStatus.ADDITIONAL);
IIdType id2 = myObservationDao.create(obs2, mySrd).getId().toUnqualifiedVersionless();
Observation obs3 = new Observation();
obs3.getCode().setText("Systolic Blood Pressure");
obs3.setStatus(Observation.ObservationStatus.FINAL);
obs3.setValue(new Quantity(323));
obs3.getNoteFirstRep().setText("obs3");
obs3.getText().setDivAsString(get15000CharacterNarrativeIncludingSlugAtEnd(slug));
obs3.getText().setStatus(Narrative.NarrativeStatus.ADDITIONAL);
IIdType id3 = myObservationDao.create(obs3, mySrd).getId().toUnqualifiedVersionless();
SearchParameterMap map;
{ //_text works as a special param
map = new SearchParameterMap();
map.add(Constants.PARAM_TEXT, new SpecialParam().setValue(slug).setContains(true));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(map))).containsExactlyInAnyOrder(toValues(id1, id3));
map = new SearchParameterMap();
map.add(Constants.PARAM_TEXT, new SpecialParam().setValue("blood").setContains(true));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(map))).containsExactlyInAnyOrder(toValues(id2));
}
{ //_text works as a string param
map = new SearchParameterMap();
map.add(Constants.PARAM_TEXT, new StringParam(slug).setContains(true));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(map))).containsExactlyInAnyOrder(toValues(id1, id3));
map = new SearchParameterMap();
map.add(Constants.PARAM_TEXT, new StringParam("blood").setContains(true));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(map))).containsExactlyInAnyOrder(toValues(id2));
}
}
private String get15000CharacterNarrativeIncludingSlugAtEnd(String theSlug) {
StringBuilder builder = new StringBuilder();
int remainingNarrativeLength = 15000 - theSlug.length();
builder.append(RandomStringUtils.randomAlphanumeric(remainingNarrativeLength));
builder.append(" ");
builder.append(theSlug);
return builder.toString();
}
private String get15000CharacterNarrativeIncludingSlugAtStart(String theSlug) {
StringBuilder builder = new StringBuilder();
int remainingNarrativeLength = 15000 - theSlug.length();
builder.append(theSlug);
builder.append(" ");
builder.append(RandomStringUtils.randomAlphanumeric(remainingNarrativeLength));
return builder.toString();
}
@Test
public void testResourceReferenceSearch() {
IIdType patId, encId, obsId;
@ -742,19 +933,21 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
*/
@Test
public void testDirectPathWholeResourceNotIndexedWorks() {
// setup
myLogbackLevelOverrideExtension.setLogLevel(SearchBuilder.class, Level.WARN);
IIdType id1 = myTestDataBuilder.createObservation(List.of(myTestDataBuilder.withObservationCode("http://example.com/", "theCode")));
// set it after creating resource, so search doesn't find it in the index
myStorageSettings.setStoreResourceInHSearchIndex(true);
myCaptureQueriesListener.clear();
List<IBaseResource> result = searchForFastResources("Observation?code=theCode");
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
List<IBaseResource> result = searchForFastResources("Observation?code=theCode&_count=10&_total=accurate");
assertThat(result).hasSize(1);
assertEquals(((Observation) result.get(0)).getIdElement().getIdPart(), id1.getIdPart());
assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread().size()).as("JPA search for IDs and for resources").isEqualTo(2);
List<ILoggingEvent> events = myLogbackTestExtension.filterLoggingEventsWithPredicate(e -> e.getLevel() == Level.WARN);
assertFalse(events.isEmpty());
assertTrue(events.stream().anyMatch(e -> e.getFormattedMessage().contains("Some resources were not found in index. Make sure all resources were indexed. Resorting to database search.")));
// restore changed property
JpaStorageSettings defaultConfig = new JpaStorageSettings();

View File

@ -9,9 +9,8 @@ import ca.uhn.fhir.test.utilities.docker.RequiresDocker;
import org.apache.commons.io.IOUtils;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeDiagnosingMatcher;
import org.assertj.core.api.AbstractAssert;
import org.assertj.core.api.AbstractIterableAssert;
import org.hl7.fhir.instance.model.api.IBaseCoding;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Coding;
@ -33,6 +32,7 @@ import org.springframework.test.context.junit.jupiter.SpringExtension;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.time.Instant;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Objects;
@ -91,9 +91,8 @@ public class ResourceProviderR4ElasticTest extends BaseResourceProviderR4Test {
ourLog.info("testAutocompleteDirectionExisting {}", text);
assertNotNull(valueSet);
List<ValueSet.ValueSetExpansionContainsComponent> expansions = valueSet.getExpansion().getContains();
// TODO CHECKSTYLE KHS
// assertThat(expansions, hasItem(valueSetExpansionMatching(mean_blood_pressure)));
// assertThat(expansions).doesNotContain(valueSetExpansionMatching(blood_count));
ValueSetExpansionIterableAssert.assertThat(expansions).hasExpansionWithCoding(mean_blood_pressure);
ValueSetExpansionIterableAssert.assertThat(expansions).doesNotHaveExpansionWithCoding(blood_count);
}
}
@ -110,19 +109,51 @@ public class ResourceProviderR4ElasticTest extends BaseResourceProviderR4Test {
myObservationDao.create(observation, mySrd).getId().toUnqualifiedVersionless();
}
public static Matcher<ValueSet.ValueSetExpansionContainsComponent> valueSetExpansionMatching(IBaseCoding theTarget) {
return new TypeSafeDiagnosingMatcher<ValueSet.ValueSetExpansionContainsComponent>() {
@Override
public void describeTo(Description description) {
description.appendText("ValueSetExpansionContainsComponent matching ").appendValue(theTarget.getSystem() + "|" + theTarget.getCode());
}
public static class ValueSetExpansionAssert extends AbstractAssert<ValueSetExpansionAssert, ValueSet.ValueSetExpansionContainsComponent> {
@Override
protected boolean matchesSafely(ValueSet.ValueSetExpansionContainsComponent theItem, Description mismatchDescription) {
return Objects.equals(theItem.getSystem(), theTarget.getSystem()) &&
Objects.equals(theItem.getCode(), theTarget.getCode());
protected ValueSetExpansionAssert(ValueSet.ValueSetExpansionContainsComponent valueSetExpansionContainsComponent) {
super(valueSetExpansionContainsComponent, ValueSetExpansionAssert.class);
}
}
public static class ValueSetExpansionIterableAssert extends AbstractIterableAssert<ValueSetExpansionIterableAssert, Collection<ValueSet.ValueSetExpansionContainsComponent>, ValueSet.ValueSetExpansionContainsComponent, ValueSetExpansionAssert> {
protected ValueSetExpansionIterableAssert(Collection<ValueSet.ValueSetExpansionContainsComponent> actual) {
super(actual, ValueSetExpansionIterableAssert.class);
}
@Override
protected ValueSetExpansionAssert toAssert(ValueSet.ValueSetExpansionContainsComponent value, String description) {
return new ValueSetExpansionAssert(value).as(description);
}
public static ValueSetExpansionIterableAssert assertThat(Collection<ValueSet.ValueSetExpansionContainsComponent> actual) {
return new ValueSetExpansionIterableAssert(actual);
}
@Override
protected ValueSetExpansionIterableAssert newAbstractIterableAssert(Iterable<? extends ValueSet.ValueSetExpansionContainsComponent> iterable) {
return new ValueSetExpansionIterableAssert((Collection<ValueSet.ValueSetExpansionContainsComponent>) iterable);
}
public ValueSetExpansionIterableAssert hasExpansionWithCoding(IBaseCoding theCoding) {
String otherSystem = theCoding.getSystem();
String otherCode = theCoding.getCode();
boolean hasMatchingExpansion = actual.stream().anyMatch(item -> Objects.equals(item.getSystem(), otherSystem) && Objects.equals(item.getCode(), otherCode));
if (!hasMatchingExpansion) {
failWithMessage("Expansion list should contain an expansion with system " + otherSystem + " and code " + otherCode);
}
};
return this;
}
public ValueSetExpansionIterableAssert doesNotHaveExpansionWithCoding(IBaseCoding theCoding) {
String otherSystem = theCoding.getSystem();
String otherCode = theCoding.getCode();
boolean hasMatchingExpansion = actual.stream().anyMatch(expansion -> Objects.equals(expansion.getCode(), otherCode) && Objects.equals(expansion.getSystem(), otherSystem));
if (hasMatchingExpansion) {
failWithMessage("Expected not to find a matching expansion, but we found one!");
}
return this;
}
}
@Test

View File

@ -8,5 +8,17 @@
<root level="info">
<appender-ref ref="STDOUT" />
</root>
<!--Uncomment the below if you are doing Elasticsearch debugging -->
<!-- <logger name="org.hibernate.search.elasticsearch.request" additivity="false" level="trace">-->
<!-- <appender-ref ref="STDOUT" />-->
<!-- </logger>-->
<!-- <logger name="org.elasticsearch.client" level="debug" additivity="false">-->
<!-- <appender-ref ref="STDOUT" />-->
<!-- </logger>-->
<!-- <logger name="tracer" level="TRACE" additivity="false">-->
<!-- <appender-ref ref="STDOUT" />-->
<!-- </logger>-->
</configuration>

View File

@ -3,7 +3,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version>
<version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -3,7 +3,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version>
<version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.3.7-SNAPSHOT</version>
<version>7.3.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -12,12 +12,7 @@ import ca.uhn.fhir.jpa.mdm.config.MdmSubmitterConfig;
import ca.uhn.fhir.jpa.mdm.config.TestMdmConfigR4;
import ca.uhn.fhir.jpa.mdm.dao.MdmLinkDaoSvc;
import ca.uhn.fhir.jpa.mdm.helper.MdmLinkHelper;
import ca.uhn.fhir.jpa.mdm.matcher.IsLinkedTo;
import ca.uhn.fhir.jpa.mdm.matcher.IsMatchedToAGoldenResource;
import ca.uhn.fhir.jpa.mdm.matcher.IsPossibleDuplicateOf;
import ca.uhn.fhir.jpa.mdm.matcher.IsPossibleLinkedTo;
import ca.uhn.fhir.jpa.mdm.matcher.IsPossibleMatchWith;
import ca.uhn.fhir.jpa.mdm.matcher.IsSameGoldenResourceAs;
import ca.uhn.fhir.jpa.mdm.matcher.GoldenResourceMatchingAssert;
import ca.uhn.fhir.jpa.mdm.svc.MdmMatchLinkSvc;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
@ -45,8 +40,6 @@ import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.commons.lang3.StringUtils;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.model.*;
@ -159,6 +152,9 @@ abstract public class BaseMdmR4Test extends BaseJpaR4Test {
myMdmLinkDaoSvc.save(theMdmLink);
}
protected GoldenResourceMatchingAssert mdmAssertThat(IAnyResource theResource) {
return GoldenResourceMatchingAssert.assertThat(theResource, myIdHelperService, myMdmLinkDaoSvc);
}
@Nonnull
protected Patient createGoldenPatient() {
return createPatient(new Patient(), true, false);
@ -504,54 +500,6 @@ abstract public class BaseMdmR4Test extends BaseJpaR4Test {
return thePractitioner;
}
private Matcher<IAnyResource> wrapMatcherInTransaction(Supplier<Matcher<IAnyResource>> theFunction) {
return new Matcher<IAnyResource>() {
@Override
public boolean matches(Object actual) {
return runInTransaction(() -> theFunction.get().matches(actual));
}
@Override
public void describeMismatch(Object actual, Description mismatchDescription) {
runInTransaction(() -> theFunction.get().describeMismatch(actual, mismatchDescription));
}
@Override
public void _dont_implement_Matcher___instead_extend_BaseMatcher_() {
}
@Override
public void describeTo(Description description) {
runInTransaction(() -> theFunction.get().describeTo(description));
}
};
}
protected Matcher<IAnyResource> sameGoldenResourceAs(IAnyResource... theBaseResource) {
return wrapMatcherInTransaction(() -> IsSameGoldenResourceAs.sameGoldenResourceAs(myIdHelperService, myMdmLinkDaoSvc, theBaseResource));
}
protected Matcher<IAnyResource> linkedTo(IAnyResource... theBaseResource) {
return wrapMatcherInTransaction(() -> IsLinkedTo.linkedTo(myIdHelperService, myMdmLinkDaoSvc, theBaseResource));
}
protected Matcher<IAnyResource> possibleLinkedTo(IAnyResource... theBaseResource) {
return wrapMatcherInTransaction(() -> IsPossibleLinkedTo.possibleLinkedTo(myIdHelperService, myMdmLinkDaoSvc, theBaseResource));
}
protected Matcher<IAnyResource> possibleMatchWith(IAnyResource... theBaseResource) {
return wrapMatcherInTransaction(() -> IsPossibleMatchWith.possibleMatchWith(myIdHelperService, myMdmLinkDaoSvc, theBaseResource));
}
protected Matcher<IAnyResource> possibleDuplicateOf(IAnyResource... theBaseResource) {
return wrapMatcherInTransaction(() -> IsPossibleDuplicateOf.possibleDuplicateOf(myIdHelperService, myMdmLinkDaoSvc, theBaseResource));
}
protected Matcher<IAnyResource> matchedToAGoldenResource() {
return wrapMatcherInTransaction(() -> IsMatchedToAGoldenResource.matchedToAGoldenResource(myIdHelperService, myMdmLinkDaoSvc));
}
protected Patient getOnlyGoldenPatient() {
List<IBaseResource> resources = getAllGoldenPatients();
assertEquals(1, resources.size());

View File

@ -6,6 +6,7 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.mdm.model.mdmevents.MdmLinkEvent;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.server.TransactionLogMessages;
import ca.uhn.fhir.rest.server.messaging.ResourceOperationMessage;
import ca.uhn.test.concurrency.PointcutLatch;
@ -51,7 +52,7 @@ public class MdmHelperR4 extends BaseMdmHelper {
String resourceType = myFhirContext.getResourceType(theResource);
IFhirResourceDao<IBaseResource> dao = myDaoRegistry.getResourceDao(resourceType);
return isExternalHttpRequest ? dao.create(theResource, myMockSrd): dao.create(theResource);
return isExternalHttpRequest ? dao.create(theResource, myMockSrd): dao.create(theResource, new SystemRequestDetails());
}
public DaoMethodOutcome doUpdateResource(IBaseResource theResource, boolean isExternalHttpRequest) {

View File

@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.mdm.interceptor;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.entity.MdmLink;
import ca.uhn.fhir.jpa.mdm.BaseMdmR4Test;
@ -16,6 +17,7 @@ import ca.uhn.fhir.mdm.model.CanonicalEID;
import ca.uhn.fhir.mdm.model.MdmCreateOrUpdateParams;
import ca.uhn.fhir.mdm.model.MdmTransactionContext;
import ca.uhn.fhir.mdm.rules.config.MdmSettings;
import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
@ -27,6 +29,8 @@ import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.ContactPoint;
import org.hl7.fhir.r4.model.DateType;
import org.hl7.fhir.r4.model.Enumerations;
import org.hl7.fhir.r4.model.Medication;
import org.hl7.fhir.r4.model.Organization;
@ -34,11 +38,14 @@ import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.SearchParameter;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Example;
import org.springframework.test.context.ContextConfiguration;
import java.util.Collections;
import java.util.Date;
import java.util.List;
@ -50,6 +57,7 @@ import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import static org.slf4j.LoggerFactory.getLogger;
@ -101,6 +109,70 @@ public class MdmStorageInterceptorIT extends BaseMdmR4Test {
assertLinkCount(0);
}
@ParameterizedTest
@ValueSource(booleans = { true, false })
public void deleteResourcesByUrl_withMultipleDeleteCatchingSourceAndGoldenResource_deletesWithoutThrowing(boolean theIncludeOtherResources) throws InterruptedException {
// setup
boolean allowMultipleDelete = myStorageSettings.isAllowMultipleDelete();
myStorageSettings.setAllowMultipleDelete(true);
int linkCount = 0;
int resourceCount = 0;
myMdmHelper.createWithLatch(buildJanePatient());
resourceCount += 2; // patient + golden
linkCount++;
// add some other resources to make it more complex
if (theIncludeOtherResources) {
Date birthday = new Date();
Patient patient = new Patient();
patient.getNameFirstRep().addGiven("yui");
patient.setBirthDate(birthday);
patient.setTelecom(Collections.singletonList(new ContactPoint()
.setSystem(ContactPoint.ContactPointSystem.PHONE)
.setValue("555-567-5555")));
DateType dateType = new DateType(birthday);
patient.addIdentifier().setSystem(TEST_ID_SYSTEM).setValue("ID.YUI.123");
dateType.setPrecision(TemporalPrecisionEnum.DAY);
patient.setBirthDateElement(dateType);
patient.setActive(true);
for (int i = 0; i < 2; i++) {
String familyName = i == 0 ? "hirasawa" : "kotegawa";
patient.getNameFirstRep().setFamily(familyName);
myMdmHelper.createWithLatch(patient);
resourceCount++;
linkCount++; // every resource creation creates 1 link
}
resourceCount++; // for the Golden Resource
// verify we have at least this many resources
SearchParameterMap map = new SearchParameterMap();
map.setLoadSynchronous(true);
IBundleProvider provider = myPatientDao.search(map, new SystemRequestDetails());
assertEquals(resourceCount, provider.size());
// verify we have the links
assertEquals(linkCount, myMdmLinkDao.count());
}
try {
// test
// filter will delete everything
DeleteMethodOutcome outcome = myPatientDao.deleteByUrl("Patient?_lastUpdated=ge2024-01-01", new SystemRequestDetails());
// validation
assertNotNull(outcome);
List<MdmLink> links = myMdmLinkDao.findAll();
assertTrue(links.isEmpty());
SearchParameterMap map = new SearchParameterMap();
map.setLoadSynchronous(true);
IBundleProvider provider = myPatientDao.search(map, new SystemRequestDetails());
assertTrue(provider.getAllResources().isEmpty());
} finally {
myStorageSettings.setAllowMultipleDelete(allowMultipleDelete);
}
}
@Test
public void testGoldenResourceDeleted_whenOnlyMatchedResourceDeleted() throws InterruptedException {
// Given

View File

@ -1,82 +0,0 @@
package ca.uhn.fhir.jpa.mdm.matcher;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.mdm.dao.MdmLinkDaoSvc;
import ca.uhn.fhir.mdm.api.IMdmLink;
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
import ca.uhn.fhir.mdm.util.MdmResourceUtil;
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import jakarta.annotation.Nullable;
import org.hamcrest.TypeSafeMatcher;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
public abstract class BaseGoldenResourceMatcher extends TypeSafeMatcher<IAnyResource> {
private static final Logger ourLog = LoggerFactory.getLogger(BaseGoldenResourceMatcher.class);
protected IIdHelperService myIdHelperService;
protected MdmLinkDaoSvc myMdmLinkDaoSvc;
protected Collection<IAnyResource> myBaseResources;
protected String myTargetType;
protected BaseGoldenResourceMatcher(IIdHelperService theIdHelperService, MdmLinkDaoSvc theMdmLinkDaoSvc, IAnyResource... theBaseResource) {
myIdHelperService = theIdHelperService;
myMdmLinkDaoSvc = theMdmLinkDaoSvc;
myBaseResources = Arrays.stream(theBaseResource).collect(Collectors.toList());
}
@Nullable
protected IResourcePersistentId getMatchedResourcePidFromResource(IAnyResource theResource) {
IResourcePersistentId retval;
boolean isGoldenRecord = MdmResourceUtil.isMdmManaged(theResource);
if (isGoldenRecord) {
return myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), theResource);
}
IMdmLink matchLink = getMatchedMdmLink(theResource);
if (matchLink == null) {
return null;
} else {
retval = matchLink.getGoldenResourcePersistenceId();
myTargetType = matchLink.getMdmSourceType();
}
return retval;
}
protected List<IResourcePersistentId> getPossibleMatchedGoldenResourcePidsFromTarget(IAnyResource theBaseResource) {
return getMdmLinksForTarget(theBaseResource, MdmMatchResultEnum.POSSIBLE_MATCH)
.stream()
.map(IMdmLink::getGoldenResourcePersistenceId).collect(Collectors.toList());
}
protected IMdmLink getMatchedMdmLink(IAnyResource thePatientOrPractitionerResource) {
List<? extends IMdmLink> mdmLinks = getMdmLinksForTarget(thePatientOrPractitionerResource, MdmMatchResultEnum.MATCH);
if (mdmLinks.size() == 0) {
return null;
} else if (mdmLinks.size() == 1) {
return mdmLinks.get(0);
} else {
throw new IllegalStateException("Its illegal to have more than 1 match for a given target! we found " + mdmLinks.size() + " for resource with id: " + thePatientOrPractitionerResource.getIdElement().toUnqualifiedVersionless());
}
}
protected List<? extends IMdmLink> getMdmLinksForTarget(IAnyResource theTargetResource, MdmMatchResultEnum theMatchResult) {
IResourcePersistentId pidOrNull = myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), theTargetResource);
List<? extends IMdmLink> matchLinkForTarget = myMdmLinkDaoSvc.getMdmLinksBySourcePidAndMatchResult(pidOrNull, theMatchResult);
if (!matchLinkForTarget.isEmpty()) {
return matchLinkForTarget;
} else {
return new ArrayList<>();
}
}
}

View File

@ -0,0 +1,215 @@
package ca.uhn.fhir.jpa.mdm.matcher;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.mdm.dao.MdmLinkDaoSvc;
import ca.uhn.fhir.mdm.api.IMdmLink;
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
import ca.uhn.fhir.mdm.util.MdmResourceUtil;
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import jakarta.annotation.Nullable;
import org.assertj.core.api.AbstractAssert;
import org.hl7.fhir.instance.model.api.IAnyResource;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
/**
* Assertion class for asserting matching of golden resources.
*/
public class GoldenResourceMatchingAssert extends AbstractAssert<GoldenResourceMatchingAssert, IAnyResource> {
private IResourcePersistentId actualGoldenResourcePid;
private IResourcePersistentId actualSourceResourcePid;
private IIdHelperService myIdHelperService;
private MdmLinkDaoSvc myMdmLinkDaoSvc;
protected GoldenResourceMatchingAssert(IAnyResource actual, IIdHelperService theIdHelperService, MdmLinkDaoSvc theMdmLinkDaoSvc) {
super(actual, GoldenResourceMatchingAssert.class);
myIdHelperService = theIdHelperService;
myMdmLinkDaoSvc = theMdmLinkDaoSvc;
actualGoldenResourcePid = getGoldenResourcePid(actual);
actualSourceResourcePid = myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), actual);
}
public static GoldenResourceMatchingAssert assertThat(IAnyResource actual, IIdHelperService theIdHelperService, MdmLinkDaoSvc theMdmLinkDaoSvc) {
return new GoldenResourceMatchingAssert(actual, theIdHelperService, theMdmLinkDaoSvc);
}
// Method to compare with another resource
public GoldenResourceMatchingAssert is_MATCH_to(IAnyResource other) {
IResourcePersistentId otherGoldenPid = getGoldenResourcePid(other);
if (!actualGoldenResourcePid.equals(otherGoldenPid)) {
failWithActualExpectedAndMessage(actualGoldenResourcePid, otherGoldenPid, "Did not match golden resource pids!");
}
return this;
}
public GoldenResourceMatchingAssert is_not_MATCH_to(IAnyResource other) {
IResourcePersistentId otherGoldenPid = getGoldenResourcePid(other);
if (actualGoldenResourcePid != null && actualGoldenResourcePid.equals(otherGoldenPid)) {
failWithActualExpectedAndMessage(actualGoldenResourcePid, otherGoldenPid, "Matched when it should not have!");
}
return this;
}
public GoldenResourceMatchingAssert is_NO_MATCH_to(IAnyResource other) {
IResourcePersistentId otherGoldenPid = getGoldenResourcePid(other);
if (actualGoldenResourcePid != null && actualGoldenResourcePid.equals(otherGoldenPid)) {
failWithActualExpectedAndMessage(actualGoldenResourcePid, otherGoldenPid, "Both resources are linked to the same Golden pid!");
}
return this;
}
public GoldenResourceMatchingAssert is_POSSIBLE_MATCH_to(IAnyResource other) {
boolean possibleMatch = hasPossibleMatchWith(other);
if (!possibleMatch) {
failWithMessage("No POSSIBLE_MATCH between these two resources.");
}
return this;
}
private boolean hasPossibleMatchWith(IAnyResource other) {
IResourcePersistentId otherSourcePid = myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), other);
IResourcePersistentId otherGoldenPid = getGoldenResourcePid(other);
//Check for direct matches in either direction.
// A POSSIBLE_MATCH -> B
if (actualGoldenResourcePid != null) {
Optional directForwardLink = myMdmLinkDaoSvc.getMdmLinksByGoldenResourcePidSourcePidAndMatchResult(actualGoldenResourcePid, otherSourcePid, MdmMatchResultEnum.POSSIBLE_MATCH);
if (directForwardLink.isPresent()) {
return true;
}
}
// B -> POSSIBLE_MATCH -> A
if (otherGoldenPid != null) {
Optional directBackwardLink = myMdmLinkDaoSvc.getMdmLinksByGoldenResourcePidSourcePidAndMatchResult(otherGoldenPid, actualSourceResourcePid, MdmMatchResultEnum.POSSIBLE_MATCH);
if (directBackwardLink.isPresent()) {
return true;
}
}
// Check for indirect possible matches, e.g.
// A -> POSSIBLE_MATCH -> B
// C -> POSSIBLE_MATCH -> B
// this implies
// A -> POSSIBLE_MATCH ->C
boolean possibleMatch = false;
Set<IResourcePersistentId> goldenPids = new HashSet<>();
List<? extends IMdmLink> possibleLinksForOther = myMdmLinkDaoSvc.getMdmLinksBySourcePidAndMatchResult(otherSourcePid, MdmMatchResultEnum.POSSIBLE_MATCH);
Set<IResourcePersistentId> otherPossibles = possibleLinksForOther.stream().map(IMdmLink::getGoldenResourcePersistenceId).collect(Collectors.toSet());
goldenPids.addAll(otherPossibles);
// Compare and inflate with all possible matches from the actual. If we hit a collision, we know that the implies POSSIBLE_MATCH exists.
List<? extends IMdmLink> possibleLinksForActual = myMdmLinkDaoSvc.getMdmLinksBySourcePidAndMatchResult(actualSourceResourcePid, MdmMatchResultEnum.POSSIBLE_MATCH);
Set<IResourcePersistentId> actualPossiblePids = possibleLinksForActual.stream().map(IMdmLink::getGoldenResourcePersistenceId).collect(Collectors.toSet());
possibleMatch = isPossibleMatch(actualPossiblePids, goldenPids, possibleMatch);
return possibleMatch;
}
private static boolean isPossibleMatch(Set<IResourcePersistentId> matchedPids, Set<IResourcePersistentId> goldenPids, boolean possibleMatch) {
for (IResourcePersistentId pid : matchedPids) {
if (goldenPids.contains(pid)) {
return true;
} else {
goldenPids.add(pid);
}
}
return false;
}
public boolean possibleDuplicateLinkExistsBetween(IResourcePersistentId goldenPid1, IResourcePersistentId goldenPid2) {
Optional possibleForwardsLink = myMdmLinkDaoSvc.getMdmLinksByGoldenResourcePidSourcePidAndMatchResult(goldenPid1, goldenPid2, MdmMatchResultEnum.POSSIBLE_DUPLICATE);
Optional possibleBackwardsLink = myMdmLinkDaoSvc.getMdmLinksByGoldenResourcePidSourcePidAndMatchResult(goldenPid2, goldenPid1, MdmMatchResultEnum.POSSIBLE_DUPLICATE);
return possibleBackwardsLink.isPresent() || possibleForwardsLink.isPresent();
}
public GoldenResourceMatchingAssert is_POSSIBLE_DUPLICATE_to(IAnyResource other) {
IResourcePersistentId otherGoldenPid = getGoldenResourcePid(other);
if (actualGoldenResourcePid == null || otherGoldenPid == null) {
failWithMessage("For a POSSIBLE_DUPLICATE, both resources must have a MATCH. This is not the case for these resources.");
}
boolean possibleDuplicateExists = possibleDuplicateLinkExistsBetween(actualGoldenResourcePid, otherGoldenPid);
if (!possibleDuplicateExists) {
failWithActualExpectedAndMessage("No POSSIBLE_DUPLICATE found between " + actualGoldenResourcePid + " and " + otherGoldenPid,
"POSSIBLE_DUPLICATE found between " + actualGoldenResourcePid + " and " + otherGoldenPid,
"No POSSIBLE_DUPLICATE links were found between golden resources");
}
return this;
}
@Nullable
protected IResourcePersistentId getGoldenResourcePid(IAnyResource theResource) {
IResourcePersistentId retval;
boolean isGoldenRecord = MdmResourceUtil.isMdmManaged(theResource);
if (isGoldenRecord) {
return myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), theResource);
}
IMdmLink matchLink = getMatchedMdmLink(theResource);
if (matchLink == null) {
return null;
} else {
retval = matchLink.getGoldenResourcePersistenceId();
}
return retval;
}
protected IMdmLink getMatchedMdmLink(IAnyResource thePatientOrPractitionerResource) {
List<? extends IMdmLink> mdmLinks = getMdmLinksForTarget(thePatientOrPractitionerResource, MdmMatchResultEnum.MATCH);
if (mdmLinks.size() == 0) {
return null;
} else if (mdmLinks.size() == 1) {
return mdmLinks.get(0);
} else {
throw new IllegalStateException("Its illegal to have more than 1 match for a given target! we found " + mdmLinks.size() + " for resource with id: " + thePatientOrPractitionerResource.getIdElement().toUnqualifiedVersionless());
}
}
protected List<? extends IMdmLink> getMdmLinksForTarget(IAnyResource theTargetResource, MdmMatchResultEnum theMatchResult) {
IResourcePersistentId pidOrNull = myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), theTargetResource);
List<? extends IMdmLink> matchLinkForTarget = myMdmLinkDaoSvc.getMdmLinksBySourcePidAndMatchResult(pidOrNull, theMatchResult);
if (!matchLinkForTarget.isEmpty()) {
return matchLinkForTarget;
} else {
return new ArrayList<>();
}
}
public GoldenResourceMatchingAssert hasGoldenResourceMatch() {
IResourcePersistentId otherGoldenPid = getGoldenResourcePid(actual);
if (otherGoldenPid == null) {
failWithMessage("Expected resource to be matched to a golden resource. Found no such matches. ");
}
return this;
}
public GoldenResourceMatchingAssert doesNotHaveGoldenResourceMatch() {
IResourcePersistentId otherGoldenPid = getGoldenResourcePid(actual);
if (otherGoldenPid != null) {
failWithMessage("Expected resource to have no golden resource match, but it did.");
}
return this;
}
public GoldenResourceMatchingAssert is_not_POSSIBLE_DUPLICATE_to(IAnyResource other) {
IResourcePersistentId otherGoldenResourcePid = getGoldenResourcePid(other);
boolean possibleDuplicateExists = possibleDuplicateLinkExistsBetween(actualGoldenResourcePid, otherGoldenResourcePid);
if (possibleDuplicateExists) {
failWithMessage("Possible duplicate exists between both resources!");
}
return this;
}
}

View File

@ -1,48 +0,0 @@
package ca.uhn.fhir.jpa.mdm.matcher;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.mdm.dao.MdmLinkDaoSvc;
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hl7.fhir.instance.model.api.IAnyResource;
import java.util.List;
import java.util.stream.Collectors;
/**
* A Matcher which allows us to check that a target patient/practitioner at a given link level.
* is linked to a set of patients/practitioners via a golden resource.
*/
public class IsLinkedTo extends BaseGoldenResourceMatcher {
private List<IResourcePersistentId> baseResourceGoldenResourcePids;
private IResourcePersistentId incomingResourceGoldenResourcePid;
protected IsLinkedTo(IIdHelperService theIdHelperService, MdmLinkDaoSvc theMdmLinkDaoSvc, IAnyResource... theBaseResource) {
super(theIdHelperService, theMdmLinkDaoSvc, theBaseResource);
}
@Override
protected boolean matchesSafely(IAnyResource theIncomingResource) {
incomingResourceGoldenResourcePid = getMatchedResourcePidFromResource(theIncomingResource);
//OK, lets grab all the golden resource pids of the resources passed in via the constructor.
baseResourceGoldenResourcePids = myBaseResources.stream()
.map(this::getMatchedResourcePidFromResource)
.collect(Collectors.toList());
//The resources are linked if all golden resource pids match the incoming golden resource pid.
return baseResourceGoldenResourcePids.stream()
.allMatch(pid -> pid.equals(incomingResourceGoldenResourcePid));
}
@Override
public void describeTo(Description theDescription) {
}
public static Matcher<IAnyResource> linkedTo(IIdHelperService theIdHelperService, MdmLinkDaoSvc theMdmLinkDaoSvc, IAnyResource... theBaseResource) {
return new IsLinkedTo(theIdHelperService, theMdmLinkDaoSvc, theBaseResource);
}
}

View File

@ -1,38 +0,0 @@
package ca.uhn.fhir.jpa.mdm.matcher;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.mdm.dao.MdmLinkDaoSvc;
import ca.uhn.fhir.mdm.api.IMdmLink;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeMatcher;
import org.hl7.fhir.instance.model.api.IAnyResource;
import java.util.Optional;
public class IsMatchedToAGoldenResource extends TypeSafeMatcher<IAnyResource> {
private final IIdHelperService myIdHelperService;
private final MdmLinkDaoSvc myMdmLinkDaoSvc;
public IsMatchedToAGoldenResource(IIdHelperService theIdHelperService, MdmLinkDaoSvc theMdmLinkDaoSvc) {
myIdHelperService = theIdHelperService;
myMdmLinkDaoSvc = theMdmLinkDaoSvc;
}
@Override
protected boolean matchesSafely(IAnyResource theIncomingResource) {
Optional<? extends IMdmLink> matchedLinkForTargetPid = myMdmLinkDaoSvc.getMatchedLinkForSourcePid(myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), theIncomingResource));
return matchedLinkForTargetPid.isPresent();
}
@Override
public void describeTo(Description theDescription) {
theDescription.appendText("target was not linked to a Golden Resource.");
}
public static Matcher<IAnyResource> matchedToAGoldenResource(IIdHelperService theIdHelperService, MdmLinkDaoSvc theMdmLinkDaoSvc) {
return new IsMatchedToAGoldenResource(theIdHelperService, theMdmLinkDaoSvc);
}
}

View File

@ -1,62 +0,0 @@
package ca.uhn.fhir.jpa.mdm.matcher;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.mdm.dao.MdmLinkDaoSvc;
import ca.uhn.fhir.mdm.api.IMdmLink;
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hl7.fhir.instance.model.api.IAnyResource;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
public class IsPossibleDuplicateOf extends BaseGoldenResourceMatcher {
/**
* Matcher with tells us if there is an MdmLink with between these two resources that are considered POSSIBLE DUPLICATE.
* For use only on GoldenResource.
*/
private IResourcePersistentId incomingGoldenResourcePid;
protected IsPossibleDuplicateOf(IIdHelperService theIdHelperService, MdmLinkDaoSvc theMdmLinkDaoSvc, IAnyResource... theBaseResource) {
super(theIdHelperService, theMdmLinkDaoSvc, theBaseResource);
}
@Override
protected boolean matchesSafely(IAnyResource theIncomingResource) {
incomingGoldenResourcePid = getMatchedResourcePidFromResource(theIncomingResource);
List<IResourcePersistentId> goldenResourcePidsToMatch = myBaseResources.stream()
.map(this::getMatchedResourcePidFromResource)
.collect(Collectors.toList());
//Returns true if there is a POSSIBLE_DUPLICATE between the incoming resource, and all of the resources passed in via the constructor.
return goldenResourcePidsToMatch.stream()
.map(baseResourcePid -> {
Optional<? extends IMdmLink> duplicateLink = myMdmLinkDaoSvc.getMdmLinksByGoldenResourcePidSourcePidAndMatchResult(baseResourcePid, incomingGoldenResourcePid, MdmMatchResultEnum.POSSIBLE_DUPLICATE);
if (!duplicateLink.isPresent()) {
duplicateLink = myMdmLinkDaoSvc.getMdmLinksByGoldenResourcePidSourcePidAndMatchResult(incomingGoldenResourcePid, baseResourcePid, MdmMatchResultEnum.POSSIBLE_DUPLICATE);
}
return duplicateLink;
}).allMatch(Optional::isPresent);
}
@Override
public void describeTo(Description theDescription) {
theDescription.appendText("Resource was not duplicate of Resource/" + incomingGoldenResourcePid);
}
@Override
protected void describeMismatchSafely(IAnyResource item, Description mismatchDescription) {
super.describeMismatchSafely(item, mismatchDescription);
mismatchDescription.appendText("No MdmLink With POSSIBLE_DUPLICATE was found");
}
public static Matcher<IAnyResource> possibleDuplicateOf(IIdHelperService theIdHelperService, MdmLinkDaoSvc theMdmLinkDaoSvc, IAnyResource... theBaseResource) {
return new IsPossibleDuplicateOf(theIdHelperService, theMdmLinkDaoSvc, theBaseResource);
}
}

View File

@ -1,48 +0,0 @@
package ca.uhn.fhir.jpa.mdm.matcher;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.mdm.dao.MdmLinkDaoSvc;
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hl7.fhir.instance.model.api.IAnyResource;
import java.util.List;
import java.util.stream.Collectors;
/**
* A Matcher which allows us to check that a target resource at a given link level
* is linked to a set of target resources via a golden resource.
*/
public class IsPossibleLinkedTo extends BaseGoldenResourceMatcher {
private List<IResourcePersistentId> baseResourceGoldenResourcePids;
private IResourcePersistentId incomingResourceGoldenResourcePid;
protected IsPossibleLinkedTo(IIdHelperService theIdHelperService, MdmLinkDaoSvc theMdmLinkDaoSvc, IAnyResource... theTargetResources) {
super(theIdHelperService, theMdmLinkDaoSvc, theTargetResources);
}
@Override
protected boolean matchesSafely(IAnyResource theGoldenResource) {
incomingResourceGoldenResourcePid = myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), theGoldenResource);
//OK, lets grab all the golden resource pids of the resources passed in via the constructor.
baseResourceGoldenResourcePids = myBaseResources.stream()
.flatMap(iBaseResource -> getPossibleMatchedGoldenResourcePidsFromTarget(iBaseResource).stream())
.collect(Collectors.toList());
//The resources are linked if all golden resource pids match the incoming golden resource pid.
return baseResourceGoldenResourcePids.stream()
.allMatch(pid -> pid.equals(incomingResourceGoldenResourcePid));
}
@Override
public void describeTo(Description theDescription) {
}
public static Matcher<IAnyResource> possibleLinkedTo(IIdHelperService theIdHelperService, MdmLinkDaoSvc theMdmLinkDaoSvc, IAnyResource... theBaseResource) {
return new IsPossibleLinkedTo(theIdHelperService, theMdmLinkDaoSvc, theBaseResource);
}
}

View File

@ -1,61 +0,0 @@
package ca.uhn.fhir.jpa.mdm.matcher;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.mdm.dao.MdmLinkDaoSvc;
import ca.uhn.fhir.mdm.api.IMdmLink;
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hl7.fhir.instance.model.api.IAnyResource;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
/**
* Matcher with tells us if there is an MdmLink with between these two resources that are considered POSSIBLE_MATCH
*/
public class IsPossibleMatchWith extends BaseGoldenResourceMatcher {
protected IsPossibleMatchWith(IIdHelperService theIdHelperService, MdmLinkDaoSvc theMdmLinkDaoSvc, IAnyResource... theBaseResource) {
super(theIdHelperService, theMdmLinkDaoSvc, theBaseResource);
}
@Override
protected boolean matchesSafely(IAnyResource theIncomingResource) {
List<? extends IMdmLink> mdmLinks = getMdmLinksForTarget(theIncomingResource, MdmMatchResultEnum.POSSIBLE_MATCH);
List<IResourcePersistentId> goldenResourcePidsToMatch = myBaseResources.stream()
.map(this::getMatchedResourcePidFromResource)
.filter(Objects::nonNull)
.collect(Collectors.toList());
if (goldenResourcePidsToMatch.isEmpty()) {
goldenResourcePidsToMatch = myBaseResources.stream()
.flatMap(iBaseResource -> getPossibleMatchedGoldenResourcePidsFromTarget(iBaseResource).stream())
.collect(Collectors.toList());
}
List<IResourcePersistentId> mdmLinkGoldenResourcePids = mdmLinks
.stream().map(IMdmLink::getGoldenResourcePersistenceId)
.collect(Collectors.toList());
return mdmLinkGoldenResourcePids.containsAll(goldenResourcePidsToMatch);
}
@Override
public void describeTo(Description theDescription) {
theDescription.appendText(" no link found with POSSIBLE_MATCH to the requested PIDS");
}
@Override
protected void describeMismatchSafely(IAnyResource item, Description mismatchDescription) {
super.describeMismatchSafely(item, mismatchDescription);
mismatchDescription.appendText("No MDM Link With POSSIBLE_MATCH was found");
}
public static Matcher<IAnyResource> possibleMatchWith(IIdHelperService theIdHelperService, MdmLinkDaoSvc theMdmLinkDaoSvc, IAnyResource... theBaseResource) {
return new IsPossibleMatchWith(theIdHelperService, theMdmLinkDaoSvc, theBaseResource);
}
}

View File

@ -1,47 +0,0 @@
package ca.uhn.fhir.jpa.mdm.matcher;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.mdm.dao.MdmLinkDaoSvc;
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hl7.fhir.instance.model.api.IAnyResource;
import java.util.List;
import java.util.stream.Collectors;
public class IsSameGoldenResourceAs extends BaseGoldenResourceMatcher {
private List<IResourcePersistentId> goldenResourcePidsToMatch;
private IResourcePersistentId incomingGoldenResourcePid;
public IsSameGoldenResourceAs(IIdHelperService theIdHelperService, MdmLinkDaoSvc theMdmLinkDaoSvc, IAnyResource... theBaseResource) {
super(theIdHelperService, theMdmLinkDaoSvc, theBaseResource);
}
@Override
protected boolean matchesSafely(IAnyResource theIncomingResource) {
incomingGoldenResourcePid = getMatchedResourcePidFromResource(theIncomingResource);
goldenResourcePidsToMatch = myBaseResources.stream().map(this::getMatchedResourcePidFromResource).collect(Collectors.toList());
boolean allToCheckAreSame = goldenResourcePidsToMatch.stream().allMatch(pid -> pid.equals(goldenResourcePidsToMatch.get(0)));
if (!allToCheckAreSame) {
throw new IllegalStateException("You wanted to do a source resource comparison, but the pool of source resources you submitted for checking don't match! We won't even check the incoming source resource against them.");
}
return goldenResourcePidsToMatch.contains(incomingGoldenResourcePid);
}
@Override
public void describeTo(Description theDescription) {
theDescription.appendText(String.format(" %s linked to source resource %s/%s", myTargetType, myTargetType, goldenResourcePidsToMatch));
}
@Override
protected void describeMismatchSafely(IAnyResource item, Description mismatchDescription) {
super.describeMismatchSafely(item, mismatchDescription);
mismatchDescription.appendText(String.format(" was actually linked to %s/%s", myTargetType, incomingGoldenResourcePid));
}
public static Matcher<IAnyResource> sameGoldenResourceAs(IIdHelperService theIdHelperService, MdmLinkDaoSvc theMdmLinkDaoSvc, IAnyResource... theBaseResource) {
return new IsSameGoldenResourceAs(theIdHelperService, theMdmLinkDaoSvc, theBaseResource);
}
}

Some files were not shown because too many files have changed in this diff Show More