Batch option processing cleanup (#4466)
* BulkDataExportOptions null safety
This commit is contained in:
parent
8c287f0269
commit
10060bef7f
|
@ -738,7 +738,6 @@ public class ExtendedHSearchClauseBuilder {
|
||||||
subMatch = buildNumericClause(value, componentContext);
|
subMatch = buildNumericClause(value, componentContext);
|
||||||
break;
|
break;
|
||||||
case REFERENCE:
|
case REFERENCE:
|
||||||
// wipmb Can we use reference in composite?
|
|
||||||
|
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
|
|
|
@ -84,7 +84,7 @@ public class ExtendedHSearchIndexExtractor {
|
||||||
|
|
||||||
retVal.setForcedId(theResource.getIdElement().getIdPart());
|
retVal.setForcedId(theResource.getIdElement().getIdPart());
|
||||||
|
|
||||||
// wipmb mb add a flag ot DaoConfig to suppress this
|
// todo add a flag ot DaoConfig to suppress this
|
||||||
extractAutocompleteTokens(theResource, retVal);
|
extractAutocompleteTokens(theResource, retVal);
|
||||||
|
|
||||||
theNewParams.myStringParams.stream()
|
theNewParams.myStringParams.stream()
|
||||||
|
|
|
@ -163,7 +163,6 @@ class HSearchCompositeSearchIndexDataImpl implements CompositeSearchIndexData {
|
||||||
assert false: "composite components can't be composite";
|
assert false: "composite components can't be composite";
|
||||||
break;
|
break;
|
||||||
|
|
||||||
// wipmb Can any of these be part of a composite?
|
|
||||||
case REFERENCE:
|
case REFERENCE:
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
|
|
@ -143,9 +143,7 @@ public class MemberMatcherR4Helper {
|
||||||
updateConsentPatientAndPerformer(theConsent, thePatient);
|
updateConsentPatientAndPerformer(theConsent, thePatient);
|
||||||
myConsentModifier.accept(theConsent);
|
myConsentModifier.accept(theConsent);
|
||||||
|
|
||||||
// WIPMB REVIEW QUESTION Which partition should we target?
|
// Trust RequestTenantPartitionInterceptor or PatientIdPartitionInterceptor to assign the partition.
|
||||||
// Will RequestTenantPartitionInterceptor or PatientIdPartitionInterceptor do the right thing?
|
|
||||||
// Can we use the userdata field to hint at target partition?
|
|
||||||
myConsentDao.create(theConsent, theRequestDetails);
|
myConsentDao.create(theConsent, theRequestDetails);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -376,7 +376,6 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||||
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE, params);
|
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE, params);
|
||||||
}
|
}
|
||||||
|
|
||||||
// todo MB extract this and move to FullText svc
|
|
||||||
// can we skip the database entirely and return the pid list from here?
|
// can we skip the database entirely and return the pid list from here?
|
||||||
boolean canSkipDatabase =
|
boolean canSkipDatabase =
|
||||||
// if we processed an AND clause, and it returned nothing, then nothing can match.
|
// if we processed an AND clause, and it returned nothing, then nothing can match.
|
||||||
|
@ -429,7 +428,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||||
failIfUsed(Constants.PARAM_CONTENT);
|
failIfUsed(Constants.PARAM_CONTENT);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO MB someday we'll want a query planner to figure out if we _should_ or _must_ use the ft index, not just if we can.
|
// someday we'll want a query planner to figure out if we _should_ or _must_ use the ft index, not just if we can.
|
||||||
return fulltextEnabled && myParams != null &&
|
return fulltextEnabled && myParams != null &&
|
||||||
myParams.getSearchContainedMode() == SearchContainedModeEnum.FALSE &&
|
myParams.getSearchContainedMode() == SearchContainedModeEnum.FALSE &&
|
||||||
myFulltextSearchSvc.supportsSomeOf(myParams);
|
myFulltextSearchSvc.supportsSomeOf(myParams);
|
||||||
|
|
|
@ -113,7 +113,6 @@ public class ExtendedHSearchIndexData {
|
||||||
* Add if not already present.
|
* Add if not already present.
|
||||||
*/
|
*/
|
||||||
public void addTokenIndexDataIfNotPresent(String theSpName, String theSystem, String theValue) {
|
public void addTokenIndexDataIfNotPresent(String theSpName, String theSystem, String theValue) {
|
||||||
// todo MB create a BaseCodingDt that respects equals
|
|
||||||
boolean isPresent = mySearchParamTokens.get(theSpName).stream()
|
boolean isPresent = mySearchParamTokens.get(theSpName).stream()
|
||||||
.anyMatch(c -> Objects.equals(c.getSystem(), theSystem) && Objects.equals(c.getCode(), theValue));
|
.anyMatch(c -> Objects.equals(c.getSystem(), theSystem) && Objects.equals(c.getCode(), theValue));
|
||||||
if (!isPresent) {
|
if (!isPresent) {
|
||||||
|
|
|
@ -114,10 +114,8 @@ public class HSearchIndexWriter {
|
||||||
}
|
}
|
||||||
|
|
||||||
DocumentElement tokenIndexNode = getSearchParamIndexNode(theSearchParam, INDEX_TYPE_TOKEN);
|
DocumentElement tokenIndexNode = getSearchParamIndexNode(theSearchParam, INDEX_TYPE_TOKEN);
|
||||||
// TODO mb we can use a token_filter with pattern_capture to generate all three off a single value. Do this next, after merge.
|
|
||||||
writeTokenFields(tokenIndexNode, theValue);
|
writeTokenFields(tokenIndexNode, theValue);
|
||||||
ourLog.debug("Adding Search Param Token: {} -- {}", theSearchParam, theValue);
|
ourLog.debug("Adding Search Param Token: {} -- {}", theSearchParam, theValue);
|
||||||
// TODO mb should we write the strings here too? Or leave it to the old spidx indexing?
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void writeTokenFields(DocumentElement theDocumentElement, IBaseCoding theValue) {
|
public void writeTokenFields(DocumentElement theDocumentElement, IBaseCoding theValue) {
|
||||||
|
|
|
@ -151,11 +151,11 @@ public class SearchParamTextPropertyBinder implements PropertyBinder, PropertyBr
|
||||||
IndexSchemaObjectField nestedSpField = indexSchemaElement.objectField(HSearchIndexWriter.NESTED_SEARCH_PARAM_ROOT, ObjectStructure.FLATTENED);
|
IndexSchemaObjectField nestedSpField = indexSchemaElement.objectField(HSearchIndexWriter.NESTED_SEARCH_PARAM_ROOT, ObjectStructure.FLATTENED);
|
||||||
nestedSpField.toReference();
|
nestedSpField.toReference();
|
||||||
|
|
||||||
// TODO MB: the lucene/elastic independent api is hurting a bit here.
|
// Note: the lucene/elastic independent api is hurting a bit here.
|
||||||
// For lucene, we need a separate field for each analyzer. So we'll add string (for :exact), and text (for :text).
|
// For lucene, we need a separate field for each analyzer. So we'll add string (for :exact), and text (for :text).
|
||||||
// They aren't marked stored, so there's no space cost beyond the index for each.
|
// They aren't marked stored, so there's no space cost beyond the index for each.
|
||||||
// But for elastic, I'd rather have a single field defined, with multi-field sub-fields. The index cost is the same,
|
// But for elastic, we'd rather have a single field defined, with multi-field sub-fields. The index cost is the same,
|
||||||
// but elastic will actually store all fields in the source document.
|
// but elastic will actually store all fields in the source document and consume disk.
|
||||||
|
|
||||||
// So triplicate the storage for now. :-(
|
// So triplicate the storage for now. :-(
|
||||||
String stringPathGlob = "*.string";
|
String stringPathGlob = "*.string";
|
||||||
|
|
|
@ -386,8 +386,7 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
||||||
// Bug workaround: the component expressions are null in the FhirContextSearchParamRegistry. We can't do anything with them.
|
// Bug workaround: the component expressions are null in the FhirContextSearchParamRegistry. We can't do anything with them.
|
||||||
c.getExpression() == null ||
|
c.getExpression() == null ||
|
||||||
|
|
||||||
// wipmb hack hack alert:
|
// TODO mb Bug workaround: we don't support the %resource variable, but standard SPs on MolecularSequence use it.
|
||||||
// Bug workaround: we don't support the %resource variable, but standard SPs on MolecularSequence use it.
|
|
||||||
// Skip them for now.
|
// Skip them for now.
|
||||||
c.getExpression().contains("%resource");
|
c.getExpression().contains("%resource");
|
||||||
}
|
}
|
||||||
|
|
|
@ -265,7 +265,6 @@ public class SearchParamExtractorService {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Composites
|
// Composites
|
||||||
// wipmb should we have config to skip this cpu work? Check to see if HSearch is enabled?
|
|
||||||
// dst2 composites use stuff like value[x] , and we don't support them.
|
// dst2 composites use stuff like value[x] , and we don't support them.
|
||||||
if (myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3)) {
|
if (myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3)) {
|
||||||
ISearchParamExtractor.SearchParamSet<ResourceIndexedSearchParamComposite> composites = extractSearchParamComposites(theResource);
|
ISearchParamExtractor.SearchParamSet<ResourceIndexedSearchParamComposite> composites = extractSearchParamComposites(theResource);
|
||||||
|
|
|
@ -150,7 +150,7 @@ public enum JpaParamUtil {
|
||||||
public static List<RuntimeSearchParam> resolveComponentParameters(ISearchParamRegistry theSearchParamRegistry, RuntimeSearchParam theParamDef) {
|
public static List<RuntimeSearchParam> resolveComponentParameters(ISearchParamRegistry theSearchParamRegistry, RuntimeSearchParam theParamDef) {
|
||||||
List<RuntimeSearchParam> compositeList = resolveCompositeComponentsDeclaredOrder(theSearchParamRegistry, theParamDef);
|
List<RuntimeSearchParam> compositeList = resolveCompositeComponentsDeclaredOrder(theSearchParamRegistry, theParamDef);
|
||||||
|
|
||||||
// wipmb why is this sorted? Is the param order flipped too during query-time?
|
// todo mb why is this sorted? Is the param order flipped too during query-time?
|
||||||
compositeList.sort((Comparator.comparing(RuntimeSearchParam::getName)));
|
compositeList.sort((Comparator.comparing(RuntimeSearchParam::getName)));
|
||||||
|
|
||||||
return compositeList;
|
return compositeList;
|
||||||
|
|
|
@ -53,7 +53,7 @@ public class FhirResourceDaoR4SearchFtTest extends BaseJpaR4Test {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* TODO mb Extract these tests and run on all: jpa, lucene, es, and mongo. {@link FhirResourceDaoR4SearchWithElasticSearchIT}
|
* TODO mb Extract these tests and run on all: jpa, lucene, es, etc. {@link FhirResourceDaoR4SearchWithElasticSearchIT}
|
||||||
* {@link FhirResourceDaoR4SearchWithElasticSearchIT#testStringSearch}
|
* {@link FhirResourceDaoR4SearchWithElasticSearchIT#testStringSearch}
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -413,7 +413,7 @@ public class FhirResourceDaoR4StandardQueriesNoFTTest extends BaseJpaTest {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// wipmb re-enable this. Some of these fail!
|
// todo mb re-enable this. Some of these fail!
|
||||||
@Disabled
|
@Disabled
|
||||||
@Nested
|
@Nested
|
||||||
class QuantityAndNormalizedQuantitySearch extends QuantitySearchParameterTestCases {
|
class QuantityAndNormalizedQuantitySearch extends QuantitySearchParameterTestCases {
|
||||||
|
|
|
@ -5,6 +5,7 @@ import ca.uhn.fhir.jpa.api.model.BulkExportParameters;
|
||||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||||
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
|
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
|
||||||
import ca.uhn.fhir.util.SearchParameterUtil;
|
import ca.uhn.fhir.util.SearchParameterUtil;
|
||||||
|
import org.apache.commons.collections4.CollectionUtils;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -33,7 +34,7 @@ public class BulkExportBatch2TestUtils {
|
||||||
// if none are provided, the job submitter adds them
|
// if none are provided, the job submitter adds them
|
||||||
// but we cannot manually start the job without correct parameters
|
// but we cannot manually start the job without correct parameters
|
||||||
// so we "correct" them here
|
// so we "correct" them here
|
||||||
if (theOptions.getResourceTypes() == null || theOptions.getResourceTypes().isEmpty()) {
|
if (CollectionUtils.isEmpty(theOptions.getResourceTypes())) {
|
||||||
addAllResourceTypes(parameters, theCtx);
|
addAllResourceTypes(parameters, theCtx);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
|
|
@ -22,13 +22,11 @@ package ca.uhn.fhir.rest.api.server.bulk;
|
||||||
|
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
|
|
||||||
|
import javax.annotation.Nonnull;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
public class BulkDataExportOptions {
|
public class BulkDataExportOptions {
|
||||||
public BulkDataExportOptions() {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
public enum ExportStyle {
|
public enum ExportStyle {
|
||||||
PATIENT,
|
PATIENT,
|
||||||
|
@ -73,7 +71,11 @@ public class BulkDataExportOptions {
|
||||||
return myOutputFormat;
|
return myOutputFormat;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Nonnull
|
||||||
public Set<String> getResourceTypes() {
|
public Set<String> getResourceTypes() {
|
||||||
|
if (myResourceTypes == null) {
|
||||||
|
myResourceTypes = Set.of();
|
||||||
|
}
|
||||||
return myResourceTypes;
|
return myResourceTypes;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -81,7 +83,11 @@ public class BulkDataExportOptions {
|
||||||
return mySince;
|
return mySince;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Nonnull
|
||||||
public Set<String> getFilters() {
|
public Set<String> getFilters() {
|
||||||
|
if (myFilters == null) {
|
||||||
|
myFilters = Set.of();
|
||||||
|
}
|
||||||
return myFilters;
|
return myFilters;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -32,6 +32,8 @@ import java.util.Collection;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
import static org.apache.commons.collections4.CollectionUtils.isEmpty;
|
||||||
|
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
|
||||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
|
|
||||||
public class RuleBulkExportImpl extends BaseRule {
|
public class RuleBulkExportImpl extends BaseRule {
|
||||||
|
@ -60,8 +62,8 @@ public class RuleBulkExportImpl extends BaseRule {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (myResourceTypes != null && !myResourceTypes.isEmpty()) {
|
if (isNotEmpty(myResourceTypes)) {
|
||||||
if (options.getResourceTypes() == null) {
|
if (isEmpty(options.getResourceTypes())) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
for (String next : options.getResourceTypes()) {
|
for (String next : options.getResourceTypes()) {
|
||||||
|
|
|
@ -23,6 +23,7 @@ package ca.uhn.fhir.batch2.jobs.export;
|
||||||
import ca.uhn.fhir.batch2.api.IJobParametersValidator;
|
import ca.uhn.fhir.batch2.api.IJobParametersValidator;
|
||||||
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
|
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
|
||||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||||
|
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
|
||||||
import ca.uhn.fhir.rest.api.Constants;
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
@ -31,11 +32,12 @@ import javax.annotation.Nonnull;
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
public class BulkExportJobParametersValidator implements IJobParametersValidator<BulkExportJobParameters> {
|
public class BulkExportJobParametersValidator implements IJobParametersValidator<BulkExportJobParameters> {
|
||||||
|
|
||||||
public static final String UNSUPPORTED_BINARY_TYPE = "Binary";
|
/** @deprecated use BulkDataExportProvider.UNSUPPORTED_BINARY_TYPE instead */
|
||||||
|
@Deprecated(since="6.3.10")
|
||||||
|
public static final String UNSUPPORTED_BINARY_TYPE = BulkDataExportProvider.UNSUPPORTED_BINARY_TYPE;
|
||||||
@Autowired
|
@Autowired
|
||||||
private DaoRegistry myDaoRegistry;
|
private DaoRegistry myDaoRegistry;
|
||||||
|
|
||||||
|
|
|
@ -278,7 +278,6 @@ public class DaoConfig {
|
||||||
* queried within Hibernate Search.
|
* queried within Hibernate Search.
|
||||||
*
|
*
|
||||||
* @since 5.6.0
|
* @since 5.6.0
|
||||||
* TODO mb test more with this true
|
|
||||||
*/
|
*/
|
||||||
private boolean myAdvancedHSearchIndexing = false;
|
private boolean myAdvancedHSearchIndexing = false;
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -78,10 +78,11 @@ import java.util.Date;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.NoSuchElementException;
|
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import static org.apache.commons.collections4.CollectionUtils.isEmpty;
|
||||||
|
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
|
||||||
import static org.apache.commons.lang3.StringUtils.isEmpty;
|
import static org.apache.commons.lang3.StringUtils.isEmpty;
|
||||||
import static org.slf4j.LoggerFactory.getLogger;
|
import static org.slf4j.LoggerFactory.getLogger;
|
||||||
|
|
||||||
|
@ -89,6 +90,8 @@ import static org.slf4j.LoggerFactory.getLogger;
|
||||||
public class BulkDataExportProvider {
|
public class BulkDataExportProvider {
|
||||||
public static final String FARM_TO_TABLE_TYPE_FILTER_REGEX = "(?:,)(?=[A-Z][a-z]+\\?)";
|
public static final String FARM_TO_TABLE_TYPE_FILTER_REGEX = "(?:,)(?=[A-Z][a-z]+\\?)";
|
||||||
public static final List<String> PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES = List.of("Practitioner", "Organization");
|
public static final List<String> PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES = List.of("Practitioner", "Organization");
|
||||||
|
/** Bulk data $export does not include the Binary type */
|
||||||
|
public static final String UNSUPPORTED_BINARY_TYPE = "Binary";
|
||||||
private static final Logger ourLog = getLogger(BulkDataExportProvider.class);
|
private static final Logger ourLog = getLogger(BulkDataExportProvider.class);
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
|
@ -118,7 +121,7 @@ public class BulkDataExportProvider {
|
||||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType<Date> theSince,
|
@OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType<Date> theSince,
|
||||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theTypeFilter,
|
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theTypeFilter,
|
||||||
ServletRequestDetails theRequestDetails
|
ServletRequestDetails theRequestDetails
|
||||||
) throws Exception {
|
) {
|
||||||
// JPA export provider
|
// JPA export provider
|
||||||
validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT);
|
validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT);
|
||||||
|
|
||||||
|
@ -145,9 +148,9 @@ public class BulkDataExportProvider {
|
||||||
parameters.setOriginalRequestUrl(theRequestDetails.getCompleteUrl());
|
parameters.setOriginalRequestUrl(theRequestDetails.getCompleteUrl());
|
||||||
|
|
||||||
// If no _type parameter is provided, default to all resource types except Binary
|
// If no _type parameter is provided, default to all resource types except Binary
|
||||||
if (theOptions.getResourceTypes() == null || theOptions.getResourceTypes().isEmpty()) {
|
if (isEmpty(theOptions.getResourceTypes())) {
|
||||||
List<String> resourceTypes = new ArrayList<>(myDaoRegistry.getRegisteredDaoTypes());
|
List<String> resourceTypes = new ArrayList<>(myDaoRegistry.getRegisteredDaoTypes());
|
||||||
resourceTypes.remove("Binary");
|
resourceTypes.remove(UNSUPPORTED_BINARY_TYPE);
|
||||||
parameters.setResourceTypes(resourceTypes);
|
parameters.setResourceTypes(resourceTypes);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -204,18 +207,18 @@ public class BulkDataExportProvider {
|
||||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theTypeFilter,
|
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theTypeFilter,
|
||||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_MDM, min = 0, max = 1, typeName = "boolean") IPrimitiveType<Boolean> theMdm,
|
@OperationParam(name = JpaConstants.PARAM_EXPORT_MDM, min = 0, max = 1, typeName = "boolean") IPrimitiveType<Boolean> theMdm,
|
||||||
ServletRequestDetails theRequestDetails
|
ServletRequestDetails theRequestDetails
|
||||||
) throws Exception {
|
) {
|
||||||
ourLog.debug("Received Group Bulk Export Request for Group {}", theIdParam);
|
ourLog.debug("Received Group Bulk Export Request for Group {}", theIdParam);
|
||||||
ourLog.debug("_type={}", theIdParam);
|
ourLog.debug("_type={}", theIdParam);
|
||||||
ourLog.debug("_since={}", theSince);
|
ourLog.debug("_since={}", theSince);
|
||||||
ourLog.debug("_typeFilter={}", theTypeFilter);
|
ourLog.debug("_typeFilter={}", theTypeFilter);
|
||||||
ourLog.debug("_mdm=", theMdm);
|
ourLog.debug("_mdm={}", theMdm);
|
||||||
|
|
||||||
validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT);
|
validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT);
|
||||||
|
|
||||||
BulkDataExportOptions bulkDataExportOptions = buildGroupBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theIdParam, theMdm);
|
BulkDataExportOptions bulkDataExportOptions = buildGroupBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theIdParam, theMdm);
|
||||||
|
|
||||||
if (bulkDataExportOptions.getResourceTypes() != null && !bulkDataExportOptions.getResourceTypes().isEmpty()) {
|
if (isNotEmpty(bulkDataExportOptions.getResourceTypes())) {
|
||||||
validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes());
|
validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes());
|
||||||
} else {
|
} else {
|
||||||
// all patient resource types
|
// all patient resource types
|
||||||
|
@ -257,7 +260,7 @@ public class BulkDataExportProvider {
|
||||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theTypeFilter,
|
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theTypeFilter,
|
||||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_PATIENT, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> thePatient,
|
@OperationParam(name = JpaConstants.PARAM_EXPORT_PATIENT, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> thePatient,
|
||||||
ServletRequestDetails theRequestDetails
|
ServletRequestDetails theRequestDetails
|
||||||
) throws Exception {
|
) {
|
||||||
validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT);
|
validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT);
|
||||||
BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, thePatient);
|
BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, thePatient);
|
||||||
validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes());
|
validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes());
|
||||||
|
@ -276,7 +279,7 @@ public class BulkDataExportProvider {
|
||||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType<Date> theSince,
|
@OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType<Date> theSince,
|
||||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theTypeFilter,
|
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theTypeFilter,
|
||||||
ServletRequestDetails theRequestDetails
|
ServletRequestDetails theRequestDetails
|
||||||
) throws Exception {
|
) {
|
||||||
validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT);
|
validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT);
|
||||||
BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theIdParam);
|
BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theIdParam);
|
||||||
validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes());
|
validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes());
|
||||||
|
|
|
@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.util;
|
||||||
import ca.uhn.fhir.jpa.api.model.BulkExportParameters;
|
import ca.uhn.fhir.jpa.api.model.BulkExportParameters;
|
||||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||||
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
|
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
|
||||||
|
import org.apache.commons.collections4.CollectionUtils;
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -40,16 +41,16 @@ public class BulkExportUtils {
|
||||||
parameters.setStartDate(theOptions.getSince());
|
parameters.setStartDate(theOptions.getSince());
|
||||||
parameters.setOutputFormat(theOptions.getOutputFormat());
|
parameters.setOutputFormat(theOptions.getOutputFormat());
|
||||||
parameters.setExportStyle(theOptions.getExportStyle());
|
parameters.setExportStyle(theOptions.getExportStyle());
|
||||||
if (theOptions.getFilters() != null) {
|
if (CollectionUtils.isNotEmpty(theOptions.getFilters())) {
|
||||||
parameters.setFilters(new ArrayList<>(theOptions.getFilters()));
|
parameters.setFilters(new ArrayList<>(theOptions.getFilters()));
|
||||||
}
|
}
|
||||||
if (theOptions.getGroupId() != null) {
|
if (theOptions.getGroupId() != null) {
|
||||||
parameters.setGroupId(theOptions.getGroupId().getValue());
|
parameters.setGroupId(theOptions.getGroupId().getValue());
|
||||||
}
|
}
|
||||||
if (theOptions.getResourceTypes() != null) {
|
if (CollectionUtils.isNotEmpty(theOptions.getResourceTypes())) {
|
||||||
parameters.setResourceTypes(new ArrayList<>(theOptions.getResourceTypes()));
|
parameters.setResourceTypes(new ArrayList<>(theOptions.getResourceTypes()));
|
||||||
}
|
}
|
||||||
if (theOptions.getPatientIds() != null) {
|
if (CollectionUtils.isNotEmpty(theOptions.getPatientIds())) {
|
||||||
parameters.setPatientIds(theOptions.getPatientIds().stream().map(IIdType::getValue).collect(Collectors.toList()));
|
parameters.setPatientIds(theOptions.getPatientIds().stream().map(IIdType::getValue).collect(Collectors.toList()));
|
||||||
}
|
}
|
||||||
parameters.setExpandMdm(theOptions.isExpandMdm());
|
parameters.setExpandMdm(theOptions.isExpandMdm());
|
||||||
|
|
|
@ -359,7 +359,6 @@ public interface ITestDataBuilder {
|
||||||
return withReference("managingOrganization", theHasMember);
|
return withReference("managingOrganization", theHasMember);
|
||||||
}
|
}
|
||||||
|
|
||||||
// todo mb extract these to something like TestDataBuilderBacking. Maybe split out create* into child interface since people skip it.
|
|
||||||
/**
|
/**
|
||||||
* Users of this API must implement this method
|
* Users of this API must implement this method
|
||||||
*/
|
*/
|
||||||
|
@ -393,7 +392,6 @@ public interface ITestDataBuilder {
|
||||||
IIdType doUpdateResource(IBaseResource theResource);
|
IIdType doUpdateResource(IBaseResource theResource);
|
||||||
}
|
}
|
||||||
|
|
||||||
// todo mb make this the norm.
|
|
||||||
interface WithSupport extends ITestDataBuilder {
|
interface WithSupport extends ITestDataBuilder {
|
||||||
Support getTestDataBuilderSupport();
|
Support getTestDataBuilderSupport();
|
||||||
|
|
||||||
|
@ -416,7 +414,6 @@ public interface ITestDataBuilder {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Dummy support to use ITestDataBuilder as just a builder, not a DAO
|
* Dummy support to use ITestDataBuilder as just a builder, not a DAO
|
||||||
* todo mb Maybe we should split out the builder into a super-interface and drop this?
|
|
||||||
*/
|
*/
|
||||||
class SupportNoDao implements Support {
|
class SupportNoDao implements Support {
|
||||||
final FhirContext myFhirContext;
|
final FhirContext myFhirContext;
|
||||||
|
|
Loading…
Reference in New Issue