Don't unescape plus in match URLs (#2658)
* Don't unescape plus in match URLs * Perf improvements * Add changelog * Test fix * Work on tests * Fixes * Test fix
This commit is contained in:
parent
b31bb945ad
commit
40ecafcd31
|
@ -9,6 +9,7 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
|||
import com.google.common.escape.Escaper;
|
||||
import com.google.common.net.PercentEscaper;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.text.StringSubstitutor;
|
||||
import org.apache.http.NameValuePair;
|
||||
import org.apache.http.client.utils.URLEncodedUtils;
|
||||
import org.apache.http.message.BasicNameValuePair;
|
||||
|
@ -527,11 +528,24 @@ public class UrlUtil {
|
|||
if (questionMarkIndex != -1) {
|
||||
matchUrl = matchUrl.substring(questionMarkIndex + 1);
|
||||
}
|
||||
matchUrl = matchUrl.replace("|", "%7C");
|
||||
matchUrl = matchUrl.replace("=>=", "=%3E%3D");
|
||||
matchUrl = matchUrl.replace("=<=", "=%3C%3D");
|
||||
matchUrl = matchUrl.replace("=>", "=%3E");
|
||||
matchUrl = matchUrl.replace("=<", "=%3C");
|
||||
|
||||
final String[] searchList = new String[]{
|
||||
"+",
|
||||
"|",
|
||||
"=>=",
|
||||
"=<=",
|
||||
"=>",
|
||||
"=<"
|
||||
};
|
||||
final String[] replacementList = new String[]{
|
||||
"%2B",
|
||||
"%7C",
|
||||
"=%3E%3D",
|
||||
"=%3C%3D",
|
||||
"=%3E",
|
||||
"=%3C"
|
||||
};
|
||||
matchUrl = StringUtils.replaceEach(matchUrl, searchList, replacementList);
|
||||
if (matchUrl.contains(" ")) {
|
||||
throw new InvalidRequestException("Failed to parse match URL[" + theMatchUrl + "] - URL is invalid (must not contain spaces)");
|
||||
}
|
||||
|
|
|
@ -1,8 +1,13 @@
|
|||
package ca.uhn.fhir.util;
|
||||
|
||||
import org.apache.http.message.BasicNameValuePair;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class UrlUtilTest {
|
||||
|
||||
|
@ -82,4 +87,23 @@ public class UrlUtilTest {
|
|||
assertEquals(" ", UrlUtil.sanitizeUrlPart(" \0 "));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTranslateMatchUrl_UrlWithSpaces() {
|
||||
// Real space
|
||||
assertThat(UrlUtil.translateMatchUrl("Observation?names=homer%20simpson"),
|
||||
containsInAnyOrder(new BasicNameValuePair("names", "homer simpson")));
|
||||
|
||||
// Treat + as an actual + and not a space
|
||||
assertThat(UrlUtil.translateMatchUrl("Observation?names=homer+simpson"),
|
||||
containsInAnyOrder(new BasicNameValuePair("names", "homer+simpson")));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTranslateMatchUrl_UrlWithPipe() {
|
||||
// Real space
|
||||
assertThat(UrlUtil.translateMatchUrl("Observation?names=homer|simpson"),
|
||||
containsInAnyOrder(new BasicNameValuePair("names", "homer|simpson")));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: change
|
||||
issue: 2653
|
||||
title: "When performing a conditional create/update/delete on a JPA server, if the match URL
|
||||
contained a plus character, this character was interpreted as a space (per legacy URL encoding
|
||||
rules) even though this has proven to not be the intended behaviour in real life applications.
|
||||
Plus characters will now be treated literally as a plus character in these URLs."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2653
|
||||
title: "When performing a conditional create operation on a JPA server, the system will now
|
||||
verify that the conditional URL actually matches the data supplied in the resource body,
|
||||
and aborts the conditional create if it does not."
|
|
@ -53,6 +53,8 @@ import ca.uhn.fhir.jpa.search.PersistedJpaBundleProviderFactory;
|
|||
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.extractor.LogicalReferenceHelper;
|
||||
import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams;
|
||||
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult;
|
||||
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher;
|
||||
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.jpa.util.AddRemoveCount;
|
||||
|
@ -77,6 +79,7 @@ import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
|||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
|
||||
|
@ -92,7 +95,6 @@ import com.google.common.hash.HashFunction;
|
|||
import com.google.common.hash.Hashing;
|
||||
import org.apache.commons.lang3.NotImplementedException;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseCoding;
|
||||
|
@ -179,10 +181,10 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
public static final String OO_SEVERITY_ERROR = "error";
|
||||
public static final String OO_SEVERITY_INFO = "information";
|
||||
public static final String OO_SEVERITY_WARN = "warning";
|
||||
public static final String XACT_USERDATA_KEY_RESOLVED_TAG_DEFINITIONS = BaseHapiFhirDao.class.getName() + "_RESOLVED_TAG_DEFINITIONS";
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BaseHapiFhirDao.class);
|
||||
private static final Map<FhirVersionEnum, FhirContext> ourRetrievalContexts = new HashMap<>();
|
||||
private static final String PROCESSING_SUB_REQUEST = "BaseHapiFhirDao.processingSubRequest";
|
||||
public static final String XACT_USERDATA_KEY_RESOLVED_TAG_DEFINITIONS = BaseHapiFhirDao.class.getName() + "_RESOLVED_TAG_DEFINITIONS";
|
||||
private static boolean ourValidationDisabledForUnitTest;
|
||||
private static boolean ourDisableIncrementOnUpdateForUnitTest = false;
|
||||
|
||||
|
@ -211,6 +213,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
@Autowired
|
||||
protected DaoRegistry myDaoRegistry;
|
||||
@Autowired
|
||||
protected InMemoryResourceMatcher myInMemoryResourceMatcher;
|
||||
@Autowired
|
||||
ExpungeService myExpungeService;
|
||||
@Autowired
|
||||
private HistoryBuilderFactory myHistoryBuilderFactory;
|
||||
|
@ -1082,6 +1086,15 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
myDaoSearchParamSynchronizer = theDaoSearchParamSynchronizer;
|
||||
}
|
||||
|
||||
private void verifyMatchUrlForConditionalCreate(IBaseResource theResource, String theIfNoneExist, ResourceTable entity, ResourceIndexedSearchParams theParams) {
|
||||
// Make sure that the match URL was actually appropriate for the supplied resource
|
||||
InMemoryMatchResult outcome = myInMemoryResourceMatcher.match(theIfNoneExist, theResource, theParams);
|
||||
if (outcome.supported() && !outcome.matched()) {
|
||||
throw new InvalidRequestException("Failed to process conditional create. The supplied resource did not satisfy the conditional URL.");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public ResourceTable updateEntity(RequestDetails theRequest, final IBaseResource theResource, IBasePersistedResource
|
||||
|
@ -1148,6 +1161,17 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
}
|
||||
|
||||
if (changed.isChanged()) {
|
||||
|
||||
// Make sure that the match URL was actually appropriate for the supplied
|
||||
// resource. We only do this for version 1 right now since technically it
|
||||
// is possible (and legal) for someone to be using a conditional update
|
||||
// to match a resource and then update it in a way that it no longer
|
||||
// matches. We could certainly make this configurable though in the
|
||||
// future.
|
||||
if (entity.getVersion() <= 1L && entity.getCreatedByMatchUrl() != null) {
|
||||
verifyMatchUrlForConditionalCreate(theResource, entity.getCreatedByMatchUrl(), entity, newParams);
|
||||
}
|
||||
|
||||
entity.setUpdated(theTransactionDetails.getTransactionDate());
|
||||
if (theResource instanceof IResource) {
|
||||
entity.setLanguage(((IResource) theResource).getLanguage().getValue());
|
||||
|
|
|
@ -56,6 +56,7 @@ import ca.uhn.fhir.jpa.search.cache.SearchCacheStatusEnum;
|
|||
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams;
|
||||
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
|
@ -257,6 +258,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
ResourceTable entity = new ResourceTable();
|
||||
entity.setResourceType(toResourceName(theResource));
|
||||
entity.setPartitionId(theRequestPartitionId);
|
||||
entity.setCreatedByMatchUrl(theIfNoneExist);
|
||||
|
||||
if (isNotBlank(theIfNoneExist)) {
|
||||
Set<ResourcePersistentId> match = myMatchResourceUrlService.processMatchUrl(theIfNoneExist, myResourceType, theRequest);
|
||||
|
@ -274,7 +276,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
};
|
||||
|
||||
Supplier<IIdType> idSupplier = () -> {
|
||||
return myTxTemplate.execute(tx-> {
|
||||
return myTxTemplate.execute(tx -> {
|
||||
IIdType retVal = myIdHelperService.translatePidIdToForcedId(myFhirContext, myResourceName, pid);
|
||||
if (!retVal.hasVersionIdPart()) {
|
||||
IIdType idWithVersion = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.RESOURCE_CONDITIONAL_CREATE_VERSION, pid.getIdAsLong());
|
||||
|
@ -356,6 +358,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
|
||||
if (theIfNoneExist != null) {
|
||||
// Pre-cache the match URL
|
||||
myMatchResourceUrlService.matchUrlResolved(theIfNoneExist, new ResourcePersistentId(entity.getResourceId()));
|
||||
}
|
||||
|
||||
|
@ -659,7 +662,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
private <MT extends IBaseMetaType> void doMetaAdd(MT theMetaAdd, BaseHasResource theEntity, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) {
|
||||
IBaseResource oldVersion = toResource(theEntity, false);
|
||||
|
||||
|
@ -809,7 +811,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
return myResourceName;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Class<T> getResourceType() {
|
||||
return myResourceType;
|
||||
|
@ -1810,4 +1811,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
}
|
||||
|
||||
private static ResourceIndexedSearchParams toResourceIndexedSearchParams(ResourceTable theEntity) {
|
||||
return new ResourceIndexedSearchParams(theEntity);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -68,6 +68,7 @@ public class MemoryCacheService {
|
|||
case CONCEPT_TRANSLATION_REVERSE:
|
||||
timeoutSeconds = myDaoConfig.getTranslationCachesExpireAfterWriteInMinutes() * 1000;
|
||||
break;
|
||||
case HISTORY_COUNT:
|
||||
case TAG_DEFINITION:
|
||||
case PERSISTENT_ID:
|
||||
case RESOURCE_LOOKUP:
|
||||
|
@ -103,6 +104,17 @@ public class MemoryCacheService {
|
|||
getCache(theCache).put(theKey, theValue);
|
||||
}
|
||||
|
||||
/**
|
||||
* This method registers a transaction synchronization that puts an entry in the cache
|
||||
* if and when the current database transaction successfully commits. If the
|
||||
* transaction is rolled back, the key+value passed into this method will
|
||||
* not be added to the cache.
|
||||
*
|
||||
* This is useful for situations where you want to store something that has been
|
||||
* resolved in the DB during the current transaction, but it's not yet guaranteed
|
||||
* that this item will successfully save to the DB. Use this method in that case
|
||||
* in order to avoid cache poisoning.
|
||||
*/
|
||||
public <K, V> void putAfterCommit(CacheEnum theCache, K theKey, V theValue) {
|
||||
if (TransactionSynchronizationManager.isSynchronizationActive()) {
|
||||
TransactionSynchronizationManager.registerSynchronization(new TransactionSynchronization() {
|
||||
|
@ -116,6 +128,7 @@ public class MemoryCacheService {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public <K, V> Map<K, V> getAllPresent(CacheEnum theCache, Iterable<K> theKeys) {
|
||||
return (Map<K, V>) getCache(theCache).getAllPresent(theKeys);
|
||||
}
|
||||
|
|
|
@ -11,6 +11,8 @@ import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
|||
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.config.BaseConfig;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamTokenDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSet;
|
||||
|
@ -25,7 +27,6 @@ import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
|
|||
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
|
||||
import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc;
|
||||
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionLoader;
|
||||
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
|
||||
import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener;
|
||||
|
@ -37,6 +38,7 @@ import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
|||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.test.BaseTest;
|
||||
import ca.uhn.fhir.test.utilities.LoggingExtension;
|
||||
import ca.uhn.fhir.test.utilities.ProxyUtil;
|
||||
|
@ -123,16 +125,6 @@ public abstract class BaseJpaTest extends BaseTest {
|
|||
TestUtil.setShouldRandomizeTimezones(false);
|
||||
}
|
||||
|
||||
public static Map<?,?> buildHeapLuceneHibernateSearchProperties() {
|
||||
Map<String, String> props = new HashMap<>();
|
||||
props.put(BackendSettings.backendKey(BackendSettings.TYPE), "lucene");
|
||||
props.put(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), HapiLuceneAnalysisConfigurer.class.getName());
|
||||
props.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_TYPE), "local-heap");
|
||||
props.put(BackendSettings.backendKey(LuceneBackendSettings.LUCENE_VERSION), "LUCENE_CURRENT");
|
||||
props.put(HibernateOrmMapperSettings.ENABLED, "true");
|
||||
return props;
|
||||
}
|
||||
|
||||
@RegisterExtension
|
||||
public LoggingExtension myLoggingExtension = new LoggingExtension();
|
||||
@Mock(answer = Answers.RETURNS_DEEP_STUBS)
|
||||
|
@ -155,6 +147,10 @@ public abstract class BaseJpaTest extends BaseTest {
|
|||
@Autowired
|
||||
protected SubscriptionLoader mySubscriptionLoader;
|
||||
@Autowired
|
||||
protected IResourceLinkDao myResourceLinkDao;
|
||||
@Autowired
|
||||
protected IResourceIndexedSearchParamTokenDao myResourceIndexedSearchParamTokenDao;
|
||||
@Autowired
|
||||
private IdHelperService myIdHelperService;
|
||||
@Autowired
|
||||
private MemoryCacheService myMemoryCacheService;
|
||||
|
@ -237,6 +233,18 @@ public abstract class BaseJpaTest extends BaseTest {
|
|||
|
||||
protected abstract PlatformTransactionManager getTxManager();
|
||||
|
||||
protected void logAllResourceLinks() {
|
||||
runInTransaction(() -> {
|
||||
ourLog.info("Resource Links:\n * {}", myResourceLinkDao.findAll().stream().map(t -> t.toString()).collect(Collectors.joining("\n * ")));
|
||||
});
|
||||
}
|
||||
|
||||
protected void logAllTokenIndexes() {
|
||||
runInTransaction(() -> {
|
||||
ourLog.info("Token indexes:\n * {}", myResourceIndexedSearchParamTokenDao.findAll().stream().map(t -> t.toString()).collect(Collectors.joining("\n * ")));
|
||||
});
|
||||
}
|
||||
|
||||
public TransactionTemplate newTxTemplate() {
|
||||
TransactionTemplate retVal = new TransactionTemplate(getTxManager());
|
||||
retVal.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
|
||||
|
@ -461,6 +469,74 @@ public abstract class BaseJpaTest extends BaseTest {
|
|||
Thread.sleep(500);
|
||||
}
|
||||
|
||||
protected TermValueSetConceptDesignation assertTermConceptContainsDesignation(TermValueSetConcept theConcept, String theLanguage, String theUseSystem, String theUseCode, String theUseDisplay, String theDesignationValue) {
|
||||
Stream<TermValueSetConceptDesignation> stream = theConcept.getDesignations().stream();
|
||||
if (theLanguage != null) {
|
||||
stream = stream.filter(designation -> theLanguage.equalsIgnoreCase(designation.getLanguage()));
|
||||
}
|
||||
if (theUseSystem != null) {
|
||||
stream = stream.filter(designation -> theUseSystem.equalsIgnoreCase(designation.getUseSystem()));
|
||||
}
|
||||
if (theUseCode != null) {
|
||||
stream = stream.filter(designation -> theUseCode.equalsIgnoreCase(designation.getUseCode()));
|
||||
}
|
||||
if (theUseDisplay != null) {
|
||||
stream = stream.filter(designation -> theUseDisplay.equalsIgnoreCase(designation.getUseDisplay()));
|
||||
}
|
||||
if (theDesignationValue != null) {
|
||||
stream = stream.filter(designation -> theDesignationValue.equalsIgnoreCase(designation.getValue()));
|
||||
}
|
||||
|
||||
Optional<TermValueSetConceptDesignation> first = stream.findFirst();
|
||||
if (!first.isPresent()) {
|
||||
String failureMessage = String.format("Concept %s did not contain designation [%s|%s|%s|%s|%s] ", theConcept.toString(), theLanguage, theUseSystem, theUseCode, theUseDisplay, theDesignationValue);
|
||||
fail(failureMessage);
|
||||
return null;
|
||||
} else {
|
||||
return first.get();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
protected TermValueSetConcept assertTermValueSetContainsConceptAndIsInDeclaredOrder(TermValueSet theValueSet, String theSystem, String theCode, String theDisplay, Integer theDesignationCount) {
|
||||
List<TermValueSetConcept> contains = theValueSet.getConcepts();
|
||||
|
||||
Stream<TermValueSetConcept> stream = contains.stream();
|
||||
if (theSystem != null) {
|
||||
stream = stream.filter(concept -> theSystem.equalsIgnoreCase(concept.getSystem()));
|
||||
}
|
||||
if (theCode != null) {
|
||||
stream = stream.filter(concept -> theCode.equalsIgnoreCase(concept.getCode()));
|
||||
}
|
||||
if (theDisplay != null) {
|
||||
stream = stream.filter(concept -> theDisplay.equalsIgnoreCase(concept.getDisplay()));
|
||||
}
|
||||
if (theDesignationCount != null) {
|
||||
stream = stream.filter(concept -> concept.getDesignations().size() == theDesignationCount);
|
||||
}
|
||||
|
||||
Optional<TermValueSetConcept> first = stream.findFirst();
|
||||
if (!first.isPresent()) {
|
||||
String failureMessage = String.format("Expanded ValueSet %s did not contain concept [%s|%s|%s] with [%d] designations", theValueSet.getId(), theSystem, theCode, theDisplay, theDesignationCount);
|
||||
fail(failureMessage);
|
||||
return null;
|
||||
} else {
|
||||
TermValueSetConcept termValueSetConcept = first.get();
|
||||
assertEquals(termValueSetConcept.getOrder(), theValueSet.getConcepts().indexOf(termValueSetConcept));
|
||||
return termValueSetConcept;
|
||||
}
|
||||
}
|
||||
|
||||
public static Map<?, ?> buildHeapLuceneHibernateSearchProperties() {
|
||||
Map<String, String> props = new HashMap<>();
|
||||
props.put(BackendSettings.backendKey(BackendSettings.TYPE), "lucene");
|
||||
props.put(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), HapiLuceneAnalysisConfigurer.class.getName());
|
||||
props.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_TYPE), "local-heap");
|
||||
props.put(BackendSettings.backendKey(LuceneBackendSettings.LUCENE_VERSION), "LUCENE_CURRENT");
|
||||
props.put(HibernateOrmMapperSettings.ENABLED, "true");
|
||||
return props;
|
||||
}
|
||||
|
||||
@BeforeAll
|
||||
public static void beforeClassRandomizeLocale() {
|
||||
randomizeLocale();
|
||||
|
@ -602,63 +678,5 @@ public abstract class BaseJpaTest extends BaseTest {
|
|||
Thread.sleep(500);
|
||||
}
|
||||
|
||||
protected TermValueSetConceptDesignation assertTermConceptContainsDesignation(TermValueSetConcept theConcept, String theLanguage, String theUseSystem, String theUseCode, String theUseDisplay, String theDesignationValue) {
|
||||
Stream<TermValueSetConceptDesignation> stream = theConcept.getDesignations().stream();
|
||||
if (theLanguage != null) {
|
||||
stream = stream.filter(designation -> theLanguage.equalsIgnoreCase(designation.getLanguage()));
|
||||
}
|
||||
if (theUseSystem != null) {
|
||||
stream = stream.filter(designation -> theUseSystem.equalsIgnoreCase(designation.getUseSystem()));
|
||||
}
|
||||
if (theUseCode != null) {
|
||||
stream = stream.filter(designation -> theUseCode.equalsIgnoreCase(designation.getUseCode()));
|
||||
}
|
||||
if (theUseDisplay != null) {
|
||||
stream = stream.filter(designation -> theUseDisplay.equalsIgnoreCase(designation.getUseDisplay()));
|
||||
}
|
||||
if (theDesignationValue != null) {
|
||||
stream = stream.filter(designation -> theDesignationValue.equalsIgnoreCase(designation.getValue()));
|
||||
}
|
||||
|
||||
Optional<TermValueSetConceptDesignation> first = stream.findFirst();
|
||||
if (!first.isPresent()) {
|
||||
String failureMessage = String.format("Concept %s did not contain designation [%s|%s|%s|%s|%s] ", theConcept.toString(), theLanguage, theUseSystem, theUseCode, theUseDisplay, theDesignationValue);
|
||||
fail(failureMessage);
|
||||
return null;
|
||||
} else {
|
||||
return first.get();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
protected TermValueSetConcept assertTermValueSetContainsConceptAndIsInDeclaredOrder(TermValueSet theValueSet, String theSystem, String theCode, String theDisplay, Integer theDesignationCount) {
|
||||
List<TermValueSetConcept> contains = theValueSet.getConcepts();
|
||||
|
||||
Stream<TermValueSetConcept> stream = contains.stream();
|
||||
if (theSystem != null) {
|
||||
stream = stream.filter(concept -> theSystem.equalsIgnoreCase(concept.getSystem()));
|
||||
}
|
||||
if (theCode != null ) {
|
||||
stream = stream.filter(concept -> theCode.equalsIgnoreCase(concept.getCode()));
|
||||
}
|
||||
if (theDisplay != null){
|
||||
stream = stream.filter(concept -> theDisplay.equalsIgnoreCase(concept.getDisplay()));
|
||||
}
|
||||
if (theDesignationCount != null) {
|
||||
stream = stream.filter(concept -> concept.getDesignations().size() == theDesignationCount);
|
||||
}
|
||||
|
||||
Optional<TermValueSetConcept> first = stream.findFirst();
|
||||
if (!first.isPresent()) {
|
||||
String failureMessage = String.format("Expanded ValueSet %s did not contain concept [%s|%s|%s] with [%d] designations", theValueSet.getId(), theSystem, theCode, theDisplay, theDesignationCount);
|
||||
fail(failureMessage);
|
||||
return null;
|
||||
} else {
|
||||
TermValueSetConcept termValueSetConcept = first.get();
|
||||
assertEquals(termValueSetConcept.getOrder(), theValueSet.getConcepts().indexOf(termValueSetConcept));
|
||||
return termValueSetConcept;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -2511,7 +2511,7 @@ public class FhirSystemDaoDstu3Test extends BaseJpaDstu3SystemTest {
|
|||
assertEquals("Patient/P1/_history/1", new IdType(resp.getEntry().get(0).getResponse().getLocation()).toUnqualified().getValue());
|
||||
|
||||
p = new Patient();
|
||||
p.setActive(true);
|
||||
p.setActive(false);
|
||||
p.addName().setFamily("AAA");
|
||||
|
||||
b = new Bundle();
|
||||
|
|
|
@ -216,14 +216,10 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
|
|||
@Qualifier("myResourceCountsCache")
|
||||
protected ResourceCountCache myResourceCountsCache;
|
||||
@Autowired
|
||||
protected IResourceLinkDao myResourceLinkDao;
|
||||
@Autowired
|
||||
protected ISearchParamPresentDao mySearchParamPresentDao;
|
||||
@Autowired
|
||||
protected IResourceIndexedSearchParamStringDao myResourceIndexedSearchParamStringDao;
|
||||
@Autowired
|
||||
protected IResourceIndexedSearchParamTokenDao myResourceIndexedSearchParamTokenDao;
|
||||
@Autowired
|
||||
protected IResourceIndexedSearchParamCoordsDao myResourceIndexedSearchParamCoordsDao;
|
||||
@Autowired
|
||||
protected IResourceIndexedSearchParamQuantityDao myResourceIndexedSearchParamQuantityDao;
|
||||
|
|
|
@ -1,19 +1,24 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantity;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantityNormalized;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
|
||||
import ca.uhn.fhir.jpa.model.util.UcumServiceUtil;
|
||||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.param.QuantityParam;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.util.BundleBuilder;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
@ -53,6 +58,7 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
import static org.hamcrest.Matchers.matchesPattern;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
|
@ -67,6 +73,62 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
|
|||
myModelConfig.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConditionalCreateWithPlusInUrl() {
|
||||
Observation obs = new Observation();
|
||||
obs.addIdentifier().setValue("20210427133226.444+0800");
|
||||
DaoMethodOutcome outcome = myObservationDao.create(obs, "identifier=20210427133226.444+0800", new SystemRequestDetails());
|
||||
assertTrue(outcome.getCreated());
|
||||
|
||||
logAllTokenIndexes();
|
||||
myCaptureQueriesListener.clear();
|
||||
obs = new Observation();
|
||||
obs.addIdentifier().setValue("20210427133226.444+0800");
|
||||
outcome = myObservationDao.create(obs, "identifier=20210427133226.444+0800", new SystemRequestDetails());
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertFalse(outcome.getCreated());
|
||||
}
|
||||
|
||||
/**
|
||||
* Simulate a client error: Identifier has a "+" but URL has an escaped space character
|
||||
*/
|
||||
@Test
|
||||
public void testConditionalCreateFailsIfMatchUrlDoesntMatch() {
|
||||
Observation obs = new Observation();
|
||||
obs.addIdentifier().setValue("A+B");
|
||||
try {
|
||||
myObservationDao.create(obs, "identifier=A%20B", new SystemRequestDetails());
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("Failed to process conditional create. The supplied resource did not satisfy the conditional URL.", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Simulate a client error: Identifier has a "+" but URL has an escaped space character
|
||||
*/
|
||||
@Test
|
||||
public void testConditionalCreateFailsIfMatchUrlDoesntMatch_InTransaction() {
|
||||
BundleBuilder bb = new BundleBuilder(myFhirCtx);
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.setId(IdType.newRandomUuid());
|
||||
patient.setActive(true);
|
||||
bb.addTransactionCreateEntry(patient);
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.getSubject().setReference(patient.getId());
|
||||
obs.addIdentifier().setValue("A+B");
|
||||
bb.addTransactionCreateEntry(obs).conditional("identifier=A%20B");
|
||||
|
||||
try {
|
||||
mySystemDao.transaction(new SystemRequestDetails(), (Bundle) bb.getBundle());
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("Failed to process conditional create. The supplied resource did not satisfy the conditional URL.", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateResourceWithKoreanText() throws IOException {
|
||||
String input = loadClasspath("/r4/bug832-korean-text.xml");
|
||||
|
|
|
@ -564,10 +564,8 @@ public class FhirResourceDaoR4LegacySearchBuilderTest extends BaseJpaR4Test {
|
|||
ma.setMedication(new Reference(medId));
|
||||
IIdType moId = myMedicationAdministrationDao.create(ma).getId().toUnqualified();
|
||||
|
||||
runInTransaction(() -> {
|
||||
ourLog.info("Resource Links:\n * {}", myResourceLinkDao.findAll().stream().map(t -> t.toString()).collect(Collectors.joining("\n * ")));
|
||||
ourLog.info("Token indexes:\n * {}", myResourceIndexedSearchParamTokenDao.findAll().stream().map(t -> t.toString()).collect(Collectors.joining("\n * ")));
|
||||
});
|
||||
logAllResourceLinks();
|
||||
logAllTokenIndexes();
|
||||
|
||||
SearchParameterMap map = SearchParameterMap.newSynchronous();
|
||||
map.add(MedicationAdministration.SP_MEDICATION, new ReferenceAndListParam().addAnd(new ReferenceOrListParam().add(new ReferenceParam("code", "04823543"))));
|
||||
|
|
|
@ -797,6 +797,84 @@ public class FhirResourceDaoR4QueryCountTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTransactionWithCreateClientAssignedIdAndReference() {
|
||||
myDaoConfig.setDeleteEnabled(false);
|
||||
|
||||
Bundle input = new Bundle();
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.setId("Patient/A");
|
||||
patient.setActive(true);
|
||||
input.addEntry()
|
||||
.setFullUrl(patient.getId())
|
||||
.setResource(patient)
|
||||
.getRequest()
|
||||
.setMethod(Bundle.HTTPVerb.PUT)
|
||||
.setUrl("Patient/A");
|
||||
|
||||
Observation observation = new Observation();
|
||||
observation.setId(IdType.newRandomUuid());
|
||||
observation.addReferenceRange().setText("A");
|
||||
input.addEntry()
|
||||
.setFullUrl(observation.getId())
|
||||
.setResource(observation)
|
||||
.getRequest()
|
||||
.setMethod(Bundle.HTTPVerb.POST)
|
||||
.setUrl("Observation");
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
Bundle output = mySystemDao.transaction(mySrd, input);
|
||||
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
|
||||
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
||||
assertEquals(5, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||
assertEquals(2, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
||||
// Pass 2
|
||||
|
||||
input = new Bundle();
|
||||
|
||||
patient = new Patient();
|
||||
patient.setId("Patient/A");
|
||||
patient.setActive(true);
|
||||
input.addEntry()
|
||||
.setFullUrl(patient.getId())
|
||||
.setResource(patient)
|
||||
.getRequest()
|
||||
.setMethod(Bundle.HTTPVerb.PUT)
|
||||
.setUrl("Patient/A");
|
||||
|
||||
observation = new Observation();
|
||||
observation.setId(IdType.newRandomUuid());
|
||||
observation.addReferenceRange().setText("A");
|
||||
input.addEntry()
|
||||
.setFullUrl(observation.getId())
|
||||
.setResource(observation)
|
||||
.getRequest()
|
||||
.setMethod(Bundle.HTTPVerb.POST)
|
||||
.setUrl("Observation");
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
output = mySystemDao.transaction(mySrd, input);
|
||||
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
|
||||
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(3, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
||||
assertEquals(2, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||
assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testTransactionWithMultipleReferences() {
|
||||
Bundle input = new Bundle();
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||
import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
||||
|
@ -101,6 +102,7 @@ import static org.hamcrest.Matchers.not;
|
|||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
|
@ -3385,7 +3387,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
|
|||
assertEquals("Patient/P1/_history/1", new IdType(resp.getEntry().get(0).getResponse().getLocation()).toUnqualified().getValue());
|
||||
|
||||
p = new Patient();
|
||||
p.setActive(true);
|
||||
p.setActive(false);
|
||||
p.addName().setFamily("AAA");
|
||||
|
||||
b = new Bundle();
|
||||
|
|
|
@ -252,6 +252,9 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||
@OneToOne(optional = true, fetch = FetchType.EAGER, cascade = {}, orphanRemoval = false, mappedBy = "myResource")
|
||||
private ForcedId myForcedId;
|
||||
|
||||
@Transient
|
||||
private volatile String myCreatedByMatchUrl;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
|
@ -698,5 +701,11 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||
}
|
||||
}
|
||||
|
||||
public void setCreatedByMatchUrl(String theCreatedByMatchUrl) {
|
||||
myCreatedByMatchUrl = theCreatedByMatchUrl;
|
||||
}
|
||||
|
||||
public String getCreatedByMatchUrl() {
|
||||
return myCreatedByMatchUrl;
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue