Merge remote-tracking branch 'origin/master' into ng_20201218_survivorship_poc
This commit is contained in:
commit
cfc798cb2b
|
@ -358,9 +358,9 @@ This operation returns the merged Golden Resource (`toGoldenResourceId`).
|
|||
|
||||
## Querying the Patient Resource
|
||||
|
||||
When MDM is enabled, the [$match operation](http://hl7.org/fhir/patient-operation-match.html) will be enabled on the JPA Server.
|
||||
When MDM is enabled, the [$match operation](http://hl7.org/fhir/patient-operation-match.html) will be enabled on the JPA Server for Patient and Practitioner resources.
|
||||
|
||||
This operation allows a Patient resource to be submitted to the endpoint, and the system will attempt to find and return any Patient resources that match it according to the matching rules. The response includes a search score field that is calculated by averaging the number of matched rules against total rules checked for the Patient resource. Appropriate match grade extension is also included.
|
||||
This operation allows a Patient or Practitioner resource to be submitted to the endpoint, and the system will attempt to find and return any Patient resources that match it according to the matching rules. The response includes a search score field that is calculated by averaging the number of matched rules against total rules checked for the Patient resource. Appropriate match grade extension is also included.
|
||||
|
||||
For example, the following request may be submitted:
|
||||
|
||||
|
@ -428,12 +428,12 @@ Sample response for the Patient match is included below:
|
|||
|
||||
## Querying the Other Supported MDM Resources via `/$mdm-match`
|
||||
|
||||
Query operations on any other supported MDM type is also allowed. This operation will find resources that match the provided parameters according to the matching rules. The response includes a search score field that is calculated by averaging the number of matched rules against total rules checked for the Patient resource. Appropriate match grade extension is also included in the response.
|
||||
Query operations on any other supported MDM type are also allowed via the server-level operation `/$mdm-match`. This operation will find resources that match the provided parameters according to the matching rules. The response includes a search score field that is calculated by averaging the number of matched rules against total rules checked for the Patient resource. Appropriate match grade extension is also included in the response.
|
||||
|
||||
The request below may be submitted to search for `Orgaization` in case it defined as a supported MDM type:
|
||||
The request below may be submitted to search for `Organization` in case it defined as a supported MDM type:
|
||||
|
||||
```http
|
||||
POST /Organization/$mdm-match
|
||||
POST /$mdm-match
|
||||
Content-Type: application/fhir+json; charset=UTF-8
|
||||
|
||||
{
|
||||
|
@ -449,8 +449,9 @@ Content-Type: application/fhir+json; charset=UTF-8
|
|||
]
|
||||
}
|
||||
```
|
||||
MDM will respond with the appropriate resource bundle.
|
||||
|
||||
MDM will respond with the appropriate resource bundle.
|
||||
Note that the request goes to the root of the FHIR server, and not the `Organization` endpoint. Since this is not in the FHIR spec directly, it was decided that this would be a separate operation from the Patient/Practitioner `/$match` operation.
|
||||
|
||||
## Clearing MDM Links
|
||||
|
||||
|
|
|
@ -1191,13 +1191,13 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
return entity;
|
||||
}
|
||||
|
||||
@NotNull
|
||||
@Nonnull
|
||||
protected ResourceTable readEntityLatestVersion(IIdType theId, RequestDetails theRequestDetails) {
|
||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, getResourceName());
|
||||
return readEntityLatestVersion(theId, requestPartitionId);
|
||||
}
|
||||
|
||||
@NotNull
|
||||
@Nonnull
|
||||
private ResourceTable readEntityLatestVersion(IIdType theId, @Nullable RequestPartitionId theRequestPartitionId) {
|
||||
validateResourceTypeAndThrowInvalidRequestException(theId);
|
||||
|
||||
|
@ -1302,7 +1302,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
if (theRequest.isSubRequest()) {
|
||||
Integer max = myDaoConfig.getMaximumSearchResultCountInTransaction();
|
||||
if (max != null) {
|
||||
Validate.inclusiveBetween(1, Integer.MAX_VALUE, max.intValue(), "Maximum search result count in transaction ust be a positive integer");
|
||||
Validate.inclusiveBetween(1, Integer.MAX_VALUE, max, "Maximum search result count in transaction ust be a positive integer");
|
||||
theParams.setLoadSynchronousUpTo(myDaoConfig.getMaximumSearchResultCountInTransaction());
|
||||
}
|
||||
}
|
||||
|
@ -1311,7 +1311,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
if (offset != null || !isPagingProviderDatabaseBacked(theRequest)) {
|
||||
theParams.setLoadSynchronous(true);
|
||||
if (offset != null) {
|
||||
Validate.inclusiveBetween(0, Integer.MAX_VALUE, offset.intValue(), "Offset must be a positive integer");
|
||||
Validate.inclusiveBetween(0, Integer.MAX_VALUE, offset, "Offset must be a positive integer");
|
||||
}
|
||||
theParams.setOffset(offset);
|
||||
}
|
||||
|
@ -1320,7 +1320,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
if (count != null) {
|
||||
Integer maxPageSize = theRequest.getServer().getMaximumPageSize();
|
||||
if (maxPageSize != null) {
|
||||
Validate.inclusiveBetween(1, theRequest.getServer().getMaximumPageSize(), count.intValue(), "Count must be positive integer and less than " + maxPageSize);
|
||||
Validate.inclusiveBetween(1, theRequest.getServer().getMaximumPageSize(), count, "Count must be positive integer and less than " + maxPageSize);
|
||||
}
|
||||
theParams.setCount(count);
|
||||
} else if (theRequest.getServer().getDefaultPageSize() != null) {
|
||||
|
|
|
@ -174,7 +174,8 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
private SqlObjectFactory mySqlBuilderFactory;
|
||||
@Autowired
|
||||
private HibernatePropertiesProvider myDialectProvider;
|
||||
|
||||
|
||||
private boolean hasNextIteratorQuery = false;
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
|
@ -424,6 +425,10 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
sqlBuilder.addPredicate(lastUpdatedPredicates);
|
||||
}
|
||||
|
||||
//-- exclude the pids already in the previous iterator
|
||||
if (hasNextIteratorQuery)
|
||||
sqlBuilder.excludeResourceIdsPredicate(myPidSet);
|
||||
|
||||
/*
|
||||
* Sort
|
||||
*
|
||||
|
@ -436,7 +441,6 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
createSort(queryStack3, sort);
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Now perform the search
|
||||
*/
|
||||
|
@ -444,7 +448,7 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
if (generatedSql.isMatchNothing()) {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
|
||||
SearchQueryExecutor executor = mySqlBuilderFactory.newSearchQueryExecutor(generatedSql, myMaxResultsToFetch);
|
||||
return Optional.of(executor);
|
||||
}
|
||||
|
@ -1232,8 +1236,10 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
close();
|
||||
if (myQueryList != null && myQueryList.size() > 0) {
|
||||
myResultsIterator = myQueryList.remove(0);
|
||||
hasNextIteratorQuery = true;
|
||||
} else {
|
||||
myResultsIterator = SearchQueryExecutor.emptyExecutor();
|
||||
hasNextIteratorQuery = false;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1243,7 +1249,7 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
if (myNext == null) {
|
||||
fetchNext();
|
||||
}
|
||||
return !NO_MORE.equals(myNext);
|
||||
return !NO_MORE.equals(myNext);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -45,6 +45,7 @@ import ca.uhn.fhir.jpa.search.builder.predicate.TokenPredicateBuilder;
|
|||
import ca.uhn.fhir.jpa.search.builder.predicate.UriPredicateBuilder;
|
||||
import ca.uhn.fhir.rest.param.DateRangeParam;
|
||||
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
|
||||
|
||||
import com.healthmarketscience.sqlbuilder.BinaryCondition;
|
||||
import com.healthmarketscience.sqlbuilder.ComboCondition;
|
||||
import com.healthmarketscience.sqlbuilder.Condition;
|
||||
|
@ -66,11 +67,13 @@ import org.hibernate.engine.spi.RowSelection;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
@ -542,6 +545,21 @@ public class SearchQueryBuilder {
|
|||
addPredicate(predicate);
|
||||
}
|
||||
|
||||
public void excludeResourceIdsPredicate(Set<ResourcePersistentId> theExsitinghPidSetToExclude) {
|
||||
|
||||
// Do nothing if it's empty
|
||||
if (theExsitinghPidSetToExclude == null || theExsitinghPidSetToExclude.isEmpty())
|
||||
return;
|
||||
|
||||
List<Long> excludePids = ResourcePersistentId.toLongList(theExsitinghPidSetToExclude);
|
||||
|
||||
ourLog.trace("excludePids = " + excludePids);
|
||||
|
||||
DbColumn resourceIdColumn = getOrCreateFirstPredicateBuilder().getResourceIdColumn();
|
||||
InCondition predicate = new InCondition(resourceIdColumn, generatePlaceholders(excludePids));
|
||||
predicate.setNegate(true);
|
||||
addPredicate(predicate);
|
||||
}
|
||||
|
||||
public BinaryCondition createConditionForValueWithComparator(ParamPrefixEnum theComparator, DbColumn theColumn, Object theValue) {
|
||||
switch (theComparator) {
|
||||
|
|
|
@ -3,10 +3,13 @@ package ca.uhn.fhir.jpa.provider.r4;
|
|||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotEquals;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
|
@ -19,6 +22,7 @@ import org.hl7.fhir.r4.model.Device;
|
|||
import org.hl7.fhir.r4.model.Encounter;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Observation.ObservationComponentComponent;
|
||||
import org.hl7.fhir.r4.model.Observation.ObservationStatus;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Period;
|
||||
import org.hl7.fhir.r4.model.Quantity;
|
||||
|
@ -51,7 +55,7 @@ public class ResourceProviderHasParamR4Test extends BaseResourceProviderR4Test {
|
|||
myDaoConfig.setSearchPreFetchThresholds(new DaoConfig().getSearchPreFetchThresholds());
|
||||
myDaoConfig.setAllowContainsSearches(new DaoConfig().isAllowContainsSearches());
|
||||
myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields());
|
||||
|
||||
|
||||
myClient.unregisterInterceptor(myCapturingInterceptor);
|
||||
}
|
||||
|
||||
|
@ -587,11 +591,36 @@ public class ResourceProviderHasParamR4Test extends BaseResourceProviderR4Test {
|
|||
String uri = ourServerBase + "/Patient?_has:Observation:subject:code-value-quantity=http://" + UrlUtil.escapeUrlParam("loinc.org|2345-7$gt180") + "&_has:Encounter:subject:date=gt1950" + "&_has:Encounter:subject:class=" + UrlUtil.escapeUrlParam("urn:system|IMP");
|
||||
|
||||
ourLog.info("uri = " + uri);
|
||||
|
||||
List<String> ids = searchAndReturnUnqualifiedVersionlessIdValues(uri);
|
||||
|
||||
assertThat(ids, contains(pid0.getValue()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMultipleHasParameter_NOT_IN() throws Exception {
|
||||
|
||||
for (int i=0; i<10; i++) {
|
||||
createPatientWithObs(10);
|
||||
}
|
||||
|
||||
String uri = ourServerBase + "/Patient?_has:Observation:subject:code-value-quantity=http://" + UrlUtil.escapeUrlParam("loinc.org|2345-7$gt180") + "&_has:Observation:subject:date=gt1950" + "&_has:Observation:subject:status=final&_count=4";
|
||||
|
||||
ourLog.info("uri = " + uri);
|
||||
myCaptureQueriesListener.clear();
|
||||
|
||||
searchAndReturnUnqualifiedVersionlessIdValues(uri);
|
||||
|
||||
List<String> queries = myCaptureQueriesListener.getSelectQueries().stream().map(t -> t.getSql(true, false)).collect(Collectors.toList());
|
||||
|
||||
List<String> notInListQueries = new ArrayList<>();
|
||||
for (String query : queries) {
|
||||
if (query.contains("RES_ID NOT IN"))
|
||||
notInListQueries.add(query);
|
||||
}
|
||||
|
||||
assertNotEquals(0, notInListQueries.size());
|
||||
}
|
||||
|
||||
private List<String> searchAndReturnUnqualifiedVersionlessIdValues(String uri) throws IOException {
|
||||
List<String> ids;
|
||||
HttpGet get = new HttpGet(uri);
|
||||
|
@ -605,5 +634,26 @@ public class ResourceProviderHasParamR4Test extends BaseResourceProviderR4Test {
|
|||
return ids;
|
||||
}
|
||||
|
||||
|
||||
private void createPatientWithObs(int obsNum) {
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.addIdentifier().setSystem("urn:system").setValue("001");
|
||||
patient.addName().setFamily("Tester").addGiven("Joe");
|
||||
IIdType pid = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Observation o1 = new Observation();
|
||||
o1.setStatus(ObservationStatus.FINAL);
|
||||
o1.getSubject().setReferenceElement(pid);
|
||||
o1.setEffective(new DateTimeType("2001-02-01"));
|
||||
CodeableConcept cc = o1.getCode();
|
||||
cc.addCoding().setCode("2345-7").setSystem("http://loinc.org");
|
||||
o1.setValue(new Quantity().setValue(200));
|
||||
cc = new CodeableConcept();
|
||||
cc.addCoding().setCode("2345-7").setSystem("http://loinc.org");
|
||||
o1.addCategory(cc);
|
||||
|
||||
for (int i=0; i<obsNum; i++) {
|
||||
myObservationDao.create(o1).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -65,17 +65,14 @@ public class ProviderConstants {
|
|||
public static final String MDM_MATCH = "$mdm-match";
|
||||
public static final String MDM_MATCH_RESOURCE = "resource";
|
||||
public static final String MDM_RESOURCE_TYPE = "resourceType";
|
||||
|
||||
public static final String MDM_MERGE_GOLDEN_RESOURCES = "$mdm-merge-golden-resources";
|
||||
public static final String MDM_MERGE_GR_FROM_GOLDEN_RESOURCE_ID = "fromGoldenResourceId";
|
||||
public static final String MDM_MERGE_GR_TO_GOLDEN_RESOURCE_ID = "toGoldenResourceId";
|
||||
public static final String MDM_MERGE_RESOURCE = "resource";
|
||||
public static final String MDM_MERGE_RESOURCE_ID = "resource.id";
|
||||
|
||||
public static final String MDM_UPDATE_LINK = "$mdm-update-link";
|
||||
public static final String MDM_UPDATE_LINK_GOLDEN_RESOURCE_ID = "goldenResourceId";
|
||||
public static final String MDM_UPDATE_LINK_RESOURCE_ID = "resourceId";
|
||||
public static final String MDM_UPDATE_LINK_RESOURCE = "resource";
|
||||
public static final String MDM_UPDATE_LINK_MATCH_RESULT = "matchResult";
|
||||
|
||||
public static final String MDM_QUERY_LINKS = "$mdm-query-links";
|
||||
|
|
2
pom.xml
2
pom.xml
|
@ -763,7 +763,7 @@
|
|||
<httpcore_version>4.4.13</httpcore_version>
|
||||
<httpclient_version>4.5.13</httpclient_version>
|
||||
<jackson_version>2.12.1</jackson_version>
|
||||
<jackson_databind_version>2.11.2</jackson_databind_version>
|
||||
<jackson_databind_version>2.11.3</jackson_databind_version>
|
||||
<maven_assembly_plugin_version>3.1.0</maven_assembly_plugin_version>
|
||||
<maven_license_plugin_version>1.8</maven_license_plugin_version>
|
||||
<resteasy_version>4.0.0.Beta3</resteasy_version>
|
||||
|
|
|
@ -0,0 +1,89 @@
|
|||
# This is manually run to deploy SNAPSHOT versions of HAPI to oss.sonaypte.org
|
||||
# We don't need to trigger on any pull request or branch change, so we disable such behavior
|
||||
pr: none
|
||||
trigger: none
|
||||
|
||||
# We'll run the process on the latest version of unbuntu because they tend to be the fastest
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
|
||||
# We cannot store things like gpg passwords and sonatype credentials as plain text within the
|
||||
# pipeline's yaml file, so we've created variable groups in our library to store sensitive variables.
|
||||
# Pipelines do not load these groups by default, and we need to define which groups to load before
|
||||
# running any steps.
|
||||
variables:
|
||||
- group: GPG_VARIABLE_GROUP
|
||||
- group: SONATYPE_VARIABLE_GROUP
|
||||
|
||||
steps:
|
||||
|
||||
# We need a valid signing key to sign our builds for deployment to sonatype. We have uploaded
|
||||
# both our private and public keys to Azure as 'secure files' that we load into individual pipelines.
|
||||
|
||||
# 1. Load the public key file
|
||||
- task: DownloadSecureFile@1
|
||||
displayName: 'Load public key from secure files.'
|
||||
inputs:
|
||||
secureFile: public.pgp
|
||||
|
||||
# 2. Load the private key file
|
||||
- task: DownloadSecureFile@1
|
||||
displayName: 'Load private key from secure files.'
|
||||
inputs:
|
||||
secureFile: private.pgp
|
||||
|
||||
# Although we have imported the key files into our workspace, GPG has no knowledge that these keys exist.
|
||||
# We use a bash script to import both the private and puablic keys into gpg for future signing.
|
||||
|
||||
# 3. Import keys into gpg
|
||||
- bash: |
|
||||
gpg --import --no-tty --batch --yes $(Agent.TempDirectory)/public.pgp
|
||||
gpg --import --no-tty --batch --yes $(Agent.TempDirectory)/private.pgp
|
||||
gpg --list-keys --keyid-format LONG
|
||||
gpg --list-secret-keys --keyid-format LONG
|
||||
displayName: 'Import signing keys into gpg.'
|
||||
|
||||
# For creating a snapshot release with maven, we need to build a fake settings.xml file locally where
|
||||
# we can set our credentials for both sonatype and gpg. Then maven can read
|
||||
# for it to read from. This is done for the master branch merges only.
|
||||
|
||||
# 4. Create local settings.xml file
|
||||
- bash: |
|
||||
cat >$(System.DefaultWorkingDirectory)/settings.xml <<EOL
|
||||
<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0
|
||||
https://maven.apache.org/xsd/settings-1.0.0.xsd">
|
||||
<servers>
|
||||
<server>
|
||||
<id>ossrh</id>
|
||||
<username>$(SONATYPE_USERNAME)</username>
|
||||
<password>$(SONATYPE_PASSWORD)</password>
|
||||
</server>
|
||||
</servers>
|
||||
<profiles>
|
||||
<profile>
|
||||
<id>SIGN_ARTIFACTS</id>
|
||||
<activation>
|
||||
<activeByDefault>true</activeByDefault>
|
||||
</activation>
|
||||
<properties>
|
||||
<gpg.passphrase>$(GPG_PASSPHRASE)</gpg.passphrase>
|
||||
</properties>
|
||||
</profile>
|
||||
</profiles>
|
||||
</settings>
|
||||
EOL
|
||||
displayName: 'Create .mvn/settings.xml'
|
||||
|
||||
# With our settings.xml created locally, we can now run maven (pointing to our created settings.xml file) to deploy
|
||||
# the HAPI SNAPSHOT build.
|
||||
|
||||
# 5. Deploy SNAPSHOT build to sonatype
|
||||
- task: Maven@3
|
||||
displayName: 'Deploy to Sonatype staging'
|
||||
inputs:
|
||||
mavenPomFile: '$(System.DefaultWorkingDirectory)/pom.xml'
|
||||
goals: deploy
|
||||
options: '--settings $(System.DefaultWorkingDirectory)/settings.xml -P DIST,ALLMODULES'
|
||||
publishJUnitResults: false
|
Loading…
Reference in New Issue