Oracle: Ensure migrated database still takes large resource text updates (#5629)
* First pass at fix to Oracle HFJ_RES_VER.RES_TEXT_VC migration. * First stab at agreed upon solution. * Fix error with 4001 by removing unnecessary annotation. * Spotless and TODO. * Remove annotation for good and set length to LONG32. * Fix copyright year. * Finalize changelog. * Remove migration changes. Fix unit test. * Fix compile error. * Log output. * Refactor resource history code into new ResourceHistoryCalculator. * Spotless. * Convert record to POJO. * Restore pre-17 switch statement. * Finalize new resource history calculator code and tests. * Spotless. * Remove logging. * Update hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5633-oracle-hfj-res-ver-clob-migration.yaml Apply code reviewer suggestion Co-authored-by: Michael Buckley <michaelabuckley@gmail.com> * Code review feedback. --------- Co-authored-by: Michael Buckley <michaelabuckley@gmail.com>
This commit is contained in:
parent
224e569317
commit
d3876c546f
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5633
|
||||||
|
title: "Smile failed to save resources running on Oracle when installed from 2023-02 or earlier.
|
||||||
|
This has been fixed."
|
|
@ -75,4 +75,8 @@ public class HibernatePropertiesProvider {
|
||||||
public DataSource getDataSource() {
|
public DataSource getDataSource() {
|
||||||
return myEntityManagerFactory.getDataSource();
|
return myEntityManagerFactory.getDataSource();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean isOracleDialect() {
|
||||||
|
return getDialect() instanceof org.hibernate.dialect.OracleDialect;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -51,6 +51,7 @@ import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
|
||||||
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
|
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
|
||||||
import ca.uhn.fhir.jpa.dao.JpaStorageResourceParser;
|
import ca.uhn.fhir.jpa.dao.JpaStorageResourceParser;
|
||||||
import ca.uhn.fhir.jpa.dao.MatchResourceUrlService;
|
import ca.uhn.fhir.jpa.dao.MatchResourceUrlService;
|
||||||
|
import ca.uhn.fhir.jpa.dao.ResourceHistoryCalculator;
|
||||||
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
|
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
|
||||||
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
||||||
import ca.uhn.fhir.jpa.dao.data.IResourceModifiedDao;
|
import ca.uhn.fhir.jpa.dao.data.IResourceModifiedDao;
|
||||||
|
@ -869,4 +870,10 @@ public class JpaConfig {
|
||||||
public IMetaTagSorter metaTagSorter() {
|
public IMetaTagSorter metaTagSorter() {
|
||||||
return new MetaTagSorterAlphabetical();
|
return new MetaTagSorterAlphabetical();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public ResourceHistoryCalculator resourceHistoryCalculator(
|
||||||
|
FhirContext theFhirContext, HibernatePropertiesProvider theHibernatePropertiesProvider) {
|
||||||
|
return new ResourceHistoryCalculator(theFhirContext, theHibernatePropertiesProvider.isOracleDialect());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -85,7 +85,6 @@ import ca.uhn.fhir.model.api.TagList;
|
||||||
import ca.uhn.fhir.model.base.composite.BaseCodingDt;
|
import ca.uhn.fhir.model.base.composite.BaseCodingDt;
|
||||||
import ca.uhn.fhir.model.primitive.IdDt;
|
import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
import ca.uhn.fhir.parser.DataFormatException;
|
import ca.uhn.fhir.parser.DataFormatException;
|
||||||
import ca.uhn.fhir.parser.IParser;
|
|
||||||
import ca.uhn.fhir.rest.api.Constants;
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
|
import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
|
||||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||||
|
@ -105,8 +104,6 @@ import com.google.common.annotations.VisibleForTesting;
|
||||||
import com.google.common.base.Charsets;
|
import com.google.common.base.Charsets;
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
import com.google.common.hash.HashCode;
|
import com.google.common.hash.HashCode;
|
||||||
import com.google.common.hash.HashFunction;
|
|
||||||
import com.google.common.hash.Hashing;
|
|
||||||
import jakarta.annotation.Nonnull;
|
import jakarta.annotation.Nonnull;
|
||||||
import jakarta.annotation.Nullable;
|
import jakarta.annotation.Nullable;
|
||||||
import jakarta.annotation.PostConstruct;
|
import jakarta.annotation.PostConstruct;
|
||||||
|
@ -264,6 +261,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
@Autowired
|
@Autowired
|
||||||
private PlatformTransactionManager myTransactionManager;
|
private PlatformTransactionManager myTransactionManager;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
protected ResourceHistoryCalculator myResourceHistoryCalculator;
|
||||||
|
|
||||||
protected final CodingSpy myCodingSpy = new CodingSpy();
|
protected final CodingSpy myCodingSpy = new CodingSpy();
|
||||||
|
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
|
@ -277,6 +277,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
mySearchParamPresenceSvc = theSearchParamPresenceSvc;
|
mySearchParamPresenceSvc = theSearchParamPresenceSvc;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
public void setResourceHistoryCalculator(ResourceHistoryCalculator theResourceHistoryCalculator) {
|
||||||
|
myResourceHistoryCalculator = theResourceHistoryCalculator;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected IInterceptorBroadcaster getInterceptorBroadcaster() {
|
protected IInterceptorBroadcaster getInterceptorBroadcaster() {
|
||||||
return myInterceptorBroadcaster;
|
return myInterceptorBroadcaster;
|
||||||
|
@ -643,6 +648,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
theEntity.setResourceType(toResourceName(theResource));
|
theEntity.setResourceType(toResourceName(theResource));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
byte[] resourceBinary;
|
||||||
String resourceText;
|
String resourceText;
|
||||||
ResourceEncodingEnum encoding;
|
ResourceEncodingEnum encoding;
|
||||||
boolean changed = false;
|
boolean changed = false;
|
||||||
|
@ -659,6 +665,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
if (address != null) {
|
if (address != null) {
|
||||||
|
|
||||||
encoding = ResourceEncodingEnum.ESR;
|
encoding = ResourceEncodingEnum.ESR;
|
||||||
|
resourceBinary = null;
|
||||||
resourceText = address.getProviderId() + ":" + address.getLocation();
|
resourceText = address.getProviderId() + ":" + address.getLocation();
|
||||||
changed = true;
|
changed = true;
|
||||||
|
|
||||||
|
@ -675,10 +682,15 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
|
|
||||||
theEntity.setFhirVersion(myContext.getVersion().getVersion());
|
theEntity.setFhirVersion(myContext.getVersion().getVersion());
|
||||||
|
|
||||||
HashFunction sha256 = Hashing.sha256();
|
// TODO: LD: Once 2024-02 it out the door we should consider further refactoring here to move
|
||||||
resourceText = encodeResource(theResource, encoding, excludeElements, myContext);
|
// more of this logic within the calculator and eliminate more local variables
|
||||||
encoding = ResourceEncodingEnum.JSON;
|
final ResourceHistoryState calculate = myResourceHistoryCalculator.calculateResourceHistoryState(
|
||||||
HashCode hashCode = sha256.hashUnencodedChars(resourceText);
|
theResource, encoding, excludeElements);
|
||||||
|
|
||||||
|
resourceText = calculate.getResourceText();
|
||||||
|
resourceBinary = calculate.getResourceBinary();
|
||||||
|
encoding = calculate.getEncoding(); // This may be a no-op
|
||||||
|
final HashCode hashCode = calculate.getHashCode();
|
||||||
|
|
||||||
String hashSha256 = hashCode.toString();
|
String hashSha256 = hashCode.toString();
|
||||||
if (!hashSha256.equals(theEntity.getHashSha256())) {
|
if (!hashSha256.equals(theEntity.getHashSha256())) {
|
||||||
|
@ -696,6 +708,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
encoding = null;
|
encoding = null;
|
||||||
|
resourceBinary = null;
|
||||||
resourceText = null;
|
resourceText = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -713,6 +726,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
changed = true;
|
changed = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
resourceBinary = null;
|
||||||
resourceText = null;
|
resourceText = null;
|
||||||
encoding = ResourceEncodingEnum.DEL;
|
encoding = ResourceEncodingEnum.DEL;
|
||||||
}
|
}
|
||||||
|
@ -737,13 +751,17 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
if (currentHistoryVersion == null || !currentHistoryVersion.hasResource()) {
|
if (currentHistoryVersion == null || !currentHistoryVersion.hasResource()) {
|
||||||
changed = true;
|
changed = true;
|
||||||
} else {
|
} else {
|
||||||
changed = !StringUtils.equals(currentHistoryVersion.getResourceTextVc(), resourceText);
|
// TODO: LD: Once 2024-02 it out the door we should consider further refactoring here to move
|
||||||
|
// more of this logic within the calculator and eliminate more local variables
|
||||||
|
changed = myResourceHistoryCalculator.isResourceHistoryChanged(
|
||||||
|
currentHistoryVersion, resourceBinary, resourceText);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
EncodedResource retVal = new EncodedResource();
|
EncodedResource retVal = new EncodedResource();
|
||||||
retVal.setEncoding(encoding);
|
retVal.setEncoding(encoding);
|
||||||
|
retVal.setResourceBinary(resourceBinary);
|
||||||
retVal.setResourceText(resourceText);
|
retVal.setResourceText(resourceText);
|
||||||
retVal.setChanged(changed);
|
retVal.setChanged(changed);
|
||||||
|
|
||||||
|
@ -1393,8 +1411,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
ResourceEncodingEnum encoding = myStorageSettings.getResourceEncoding();
|
ResourceEncodingEnum encoding = myStorageSettings.getResourceEncoding();
|
||||||
List<String> excludeElements = new ArrayList<>(8);
|
List<String> excludeElements = new ArrayList<>(8);
|
||||||
getExcludedElements(historyEntity.getResourceType(), excludeElements, theResource.getMeta());
|
getExcludedElements(historyEntity.getResourceType(), excludeElements, theResource.getMeta());
|
||||||
String encodedResourceString = encodeResource(theResource, encoding, excludeElements, myContext);
|
String encodedResourceString =
|
||||||
boolean changed = !StringUtils.equals(historyEntity.getResourceTextVc(), encodedResourceString);
|
myResourceHistoryCalculator.encodeResource(theResource, encoding, excludeElements);
|
||||||
|
byte[] resourceBinary = ResourceHistoryCalculator.getResourceBinary(encoding, encodedResourceString);
|
||||||
|
final boolean changed = myResourceHistoryCalculator.isResourceHistoryChanged(
|
||||||
|
historyEntity, resourceBinary, encodedResourceString);
|
||||||
|
|
||||||
historyEntity.setUpdated(theTransactionDetails.getTransactionDate());
|
historyEntity.setUpdated(theTransactionDetails.getTransactionDate());
|
||||||
|
|
||||||
|
@ -1406,14 +1427,15 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
return historyEntity;
|
return historyEntity;
|
||||||
}
|
}
|
||||||
|
|
||||||
populateEncodedResource(encodedResource, encodedResourceString, ResourceEncodingEnum.JSON);
|
myResourceHistoryCalculator.populateEncodedResource(
|
||||||
|
encodedResource, encodedResourceString, resourceBinary, encoding);
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Save the resource itself to the resourceHistoryTable
|
* Save the resource itself to the resourceHistoryTable
|
||||||
*/
|
*/
|
||||||
historyEntity = myEntityManager.merge(historyEntity);
|
historyEntity = myEntityManager.merge(historyEntity);
|
||||||
historyEntity.setEncoding(encodedResource.getEncoding());
|
historyEntity.setEncoding(encodedResource.getEncoding());
|
||||||
|
historyEntity.setResource(encodedResource.getResourceBinary());
|
||||||
historyEntity.setResourceTextVc(encodedResource.getResourceText());
|
historyEntity.setResourceTextVc(encodedResource.getResourceText());
|
||||||
myResourceHistoryTableDao.save(historyEntity);
|
myResourceHistoryTableDao.save(historyEntity);
|
||||||
|
|
||||||
|
@ -1423,8 +1445,12 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
}
|
}
|
||||||
|
|
||||||
private void populateEncodedResource(
|
private void populateEncodedResource(
|
||||||
EncodedResource encodedResource, String encodedResourceString, ResourceEncodingEnum theEncoding) {
|
EncodedResource encodedResource,
|
||||||
|
String encodedResourceString,
|
||||||
|
byte[] theResourceBinary,
|
||||||
|
ResourceEncodingEnum theEncoding) {
|
||||||
encodedResource.setResourceText(encodedResourceString);
|
encodedResource.setResourceText(encodedResourceString);
|
||||||
|
encodedResource.setResourceBinary(theResourceBinary);
|
||||||
encodedResource.setEncoding(theEncoding);
|
encodedResource.setEncoding(theEncoding);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1489,6 +1515,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
}
|
}
|
||||||
|
|
||||||
historyEntry.setEncoding(theChanged.getEncoding());
|
historyEntry.setEncoding(theChanged.getEncoding());
|
||||||
|
historyEntry.setResource(theChanged.getResourceBinary());
|
||||||
historyEntry.setResourceTextVc(theChanged.getResourceText());
|
historyEntry.setResourceTextVc(theChanged.getResourceText());
|
||||||
|
|
||||||
ourLog.debug("Saving history entry ID[{}] for RES_ID[{}]", historyEntry.getId(), historyEntry.getResourceId());
|
ourLog.debug("Saving history entry ID[{}] for RES_ID[{}]", historyEntry.getId(), historyEntry.getResourceId());
|
||||||
|
@ -1926,16 +1953,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
return resourceText;
|
return resourceText;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String encodeResource(
|
|
||||||
IBaseResource theResource,
|
|
||||||
ResourceEncodingEnum theEncoding,
|
|
||||||
List<String> theExcludeElements,
|
|
||||||
FhirContext theContext) {
|
|
||||||
IParser parser = theEncoding.newParser(theContext);
|
|
||||||
parser.setDontEncodeElements(theExcludeElements);
|
|
||||||
return parser.encodeResourceToString(theResource);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static String parseNarrativeTextIntoWords(IBaseResource theResource) {
|
private static String parseNarrativeTextIntoWords(IBaseResource theResource) {
|
||||||
|
|
||||||
StringBuilder b = new StringBuilder();
|
StringBuilder b = new StringBuilder();
|
||||||
|
|
|
@ -1709,17 +1709,11 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
if (historyEntity.getEncoding() == ResourceEncodingEnum.JSONC
|
if (historyEntity.getEncoding() == ResourceEncodingEnum.JSONC
|
||||||
|| historyEntity.getEncoding() == ResourceEncodingEnum.JSON) {
|
|| historyEntity.getEncoding() == ResourceEncodingEnum.JSON) {
|
||||||
byte[] resourceBytes = historyEntity.getResource();
|
byte[] resourceBytes = historyEntity.getResource();
|
||||||
|
|
||||||
// Always migrate data out of the bytes column
|
|
||||||
if (resourceBytes != null) {
|
if (resourceBytes != null) {
|
||||||
String resourceText = decodeResource(resourceBytes, historyEntity.getEncoding());
|
String resourceText = decodeResource(resourceBytes, historyEntity.getEncoding());
|
||||||
ourLog.debug(
|
if (myResourceHistoryCalculator.conditionallyAlterHistoryEntity(entity, historyEntity, resourceText)) {
|
||||||
"Storing text of resource {} version {} as inline VARCHAR",
|
changed = true;
|
||||||
entity.getResourceId(),
|
}
|
||||||
historyEntity.getVersion());
|
|
||||||
historyEntity.setResourceTextVc(resourceText);
|
|
||||||
historyEntity.setEncoding(ResourceEncodingEnum.JSON);
|
|
||||||
changed = true;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (isBlank(historyEntity.getSourceUri()) && isBlank(historyEntity.getRequestId())) {
|
if (isBlank(historyEntity.getSourceUri()) && isBlank(historyEntity.getRequestId())) {
|
||||||
|
|
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
||||||
class EncodedResource {
|
class EncodedResource {
|
||||||
|
|
||||||
private boolean myChanged;
|
private boolean myChanged;
|
||||||
|
private byte[] myResource;
|
||||||
private ResourceEncodingEnum myEncoding;
|
private ResourceEncodingEnum myEncoding;
|
||||||
private String myResourceText;
|
private String myResourceText;
|
||||||
|
|
||||||
|
@ -35,6 +36,14 @@ class EncodedResource {
|
||||||
myEncoding = theEncoding;
|
myEncoding = theEncoding;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public byte[] getResourceBinary() {
|
||||||
|
return myResource;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setResourceBinary(byte[] theResource) {
|
||||||
|
myResource = theResource;
|
||||||
|
}
|
||||||
|
|
||||||
public boolean isChanged() {
|
public boolean isChanged() {
|
||||||
return myChanged;
|
return myChanged;
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,134 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||||
|
import ca.uhn.fhir.parser.IParser;
|
||||||
|
import com.google.common.hash.HashCode;
|
||||||
|
import com.google.common.hash.HashFunction;
|
||||||
|
import com.google.common.hash.Hashing;
|
||||||
|
import jakarta.annotation.Nonnull;
|
||||||
|
import jakarta.annotation.Nullable;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Responsible for various resource history-centric and {@link FhirContext} aware operations called by
|
||||||
|
* {@link BaseHapiFhirDao} or {@link BaseHapiFhirResourceDao} that require knowledge of whether an Oracle database is
|
||||||
|
* being used.
|
||||||
|
*/
|
||||||
|
public class ResourceHistoryCalculator {
|
||||||
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResourceHistoryCalculator.class);
|
||||||
|
private static final HashFunction SHA_256 = Hashing.sha256();
|
||||||
|
|
||||||
|
private final FhirContext myFhirContext;
|
||||||
|
private final boolean myIsOracleDialect;
|
||||||
|
|
||||||
|
public ResourceHistoryCalculator(FhirContext theFhirContext, boolean theIsOracleDialect) {
|
||||||
|
myFhirContext = theFhirContext;
|
||||||
|
myIsOracleDialect = theIsOracleDialect;
|
||||||
|
}
|
||||||
|
|
||||||
|
ResourceHistoryState calculateResourceHistoryState(
|
||||||
|
IBaseResource theResource, ResourceEncodingEnum theEncoding, List<String> theExcludeElements) {
|
||||||
|
final String encodedResource = encodeResource(theResource, theEncoding, theExcludeElements);
|
||||||
|
final byte[] resourceBinary;
|
||||||
|
final String resourceText;
|
||||||
|
final ResourceEncodingEnum encoding;
|
||||||
|
final HashCode hashCode;
|
||||||
|
|
||||||
|
if (myIsOracleDialect) {
|
||||||
|
resourceText = null;
|
||||||
|
resourceBinary = getResourceBinary(theEncoding, encodedResource);
|
||||||
|
encoding = theEncoding;
|
||||||
|
hashCode = SHA_256.hashBytes(resourceBinary);
|
||||||
|
} else {
|
||||||
|
resourceText = encodedResource;
|
||||||
|
resourceBinary = null;
|
||||||
|
encoding = ResourceEncodingEnum.JSON;
|
||||||
|
hashCode = SHA_256.hashUnencodedChars(encodedResource);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new ResourceHistoryState(resourceText, resourceBinary, encoding, hashCode);
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean conditionallyAlterHistoryEntity(
|
||||||
|
ResourceTable theEntity, ResourceHistoryTable theHistoryEntity, String theResourceText) {
|
||||||
|
if (!myIsOracleDialect) {
|
||||||
|
ourLog.debug(
|
||||||
|
"Storing text of resource {} version {} as inline VARCHAR",
|
||||||
|
theEntity.getResourceId(),
|
||||||
|
theHistoryEntity.getVersion());
|
||||||
|
theHistoryEntity.setResourceTextVc(theResourceText);
|
||||||
|
theHistoryEntity.setResource(null);
|
||||||
|
theHistoryEntity.setEncoding(ResourceEncodingEnum.JSON);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean isResourceHistoryChanged(
|
||||||
|
ResourceHistoryTable theCurrentHistoryVersion,
|
||||||
|
@Nullable byte[] theResourceBinary,
|
||||||
|
@Nullable String resourceText) {
|
||||||
|
if (myIsOracleDialect) {
|
||||||
|
return !Arrays.equals(theCurrentHistoryVersion.getResource(), theResourceBinary);
|
||||||
|
}
|
||||||
|
|
||||||
|
return !StringUtils.equals(theCurrentHistoryVersion.getResourceTextVc(), resourceText);
|
||||||
|
}
|
||||||
|
|
||||||
|
String encodeResource(
|
||||||
|
IBaseResource theResource, ResourceEncodingEnum theEncoding, List<String> theExcludeElements) {
|
||||||
|
final IParser parser = theEncoding.newParser(myFhirContext);
|
||||||
|
parser.setDontEncodeElements(theExcludeElements);
|
||||||
|
return parser.encodeResourceToString(theResource);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* helper for returning the encoded byte array of the input resource string based on the theEncoding.
|
||||||
|
*
|
||||||
|
* @param theEncoding the theEncoding to used
|
||||||
|
* @param theEncodedResource the resource to encode
|
||||||
|
* @return byte array of the resource
|
||||||
|
*/
|
||||||
|
@Nonnull
|
||||||
|
static byte[] getResourceBinary(ResourceEncodingEnum theEncoding, String theEncodedResource) {
|
||||||
|
switch (theEncoding) {
|
||||||
|
case JSON:
|
||||||
|
return theEncodedResource.getBytes(StandardCharsets.UTF_8);
|
||||||
|
case JSONC:
|
||||||
|
return GZipUtil.compress(theEncodedResource);
|
||||||
|
default:
|
||||||
|
return new byte[0];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void populateEncodedResource(
|
||||||
|
EncodedResource theEncodedResource,
|
||||||
|
String theEncodedResourceString,
|
||||||
|
@Nullable byte[] theResourceBinary,
|
||||||
|
ResourceEncodingEnum theEncoding) {
|
||||||
|
if (myIsOracleDialect) {
|
||||||
|
populateEncodedResourceInner(theEncodedResource, null, theResourceBinary, theEncoding);
|
||||||
|
} else {
|
||||||
|
populateEncodedResourceInner(theEncodedResource, theEncodedResourceString, null, ResourceEncodingEnum.JSON);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void populateEncodedResourceInner(
|
||||||
|
EncodedResource encodedResource,
|
||||||
|
String encodedResourceString,
|
||||||
|
byte[] theResourceBinary,
|
||||||
|
ResourceEncodingEnum theEncoding) {
|
||||||
|
encodedResource.setResourceText(encodedResourceString);
|
||||||
|
encodedResource.setResourceBinary(theResourceBinary);
|
||||||
|
encodedResource.setEncoding(theEncoding);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,86 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
||||||
|
import com.google.common.hash.HashCode;
|
||||||
|
import jakarta.annotation.Nullable;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.StringJoiner;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* POJO to contain the results of {@link ResourceHistoryCalculator#calculateResourceHistoryState(IBaseResource, ResourceEncodingEnum, List)}
|
||||||
|
*/
|
||||||
|
public class ResourceHistoryState {
|
||||||
|
@Nullable
|
||||||
|
private final String myResourceText;
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
private final byte[] myResourceBinary;
|
||||||
|
|
||||||
|
private final ResourceEncodingEnum myEncoding;
|
||||||
|
private final HashCode myHashCode;
|
||||||
|
|
||||||
|
public ResourceHistoryState(
|
||||||
|
@Nullable String theResourceText,
|
||||||
|
@Nullable byte[] theResourceBinary,
|
||||||
|
ResourceEncodingEnum theEncoding,
|
||||||
|
HashCode theHashCode) {
|
||||||
|
myResourceText = theResourceText;
|
||||||
|
myResourceBinary = theResourceBinary;
|
||||||
|
myEncoding = theEncoding;
|
||||||
|
myHashCode = theHashCode;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
public String getResourceText() {
|
||||||
|
return myResourceText;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
public byte[] getResourceBinary() {
|
||||||
|
return myResourceBinary;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ResourceEncodingEnum getEncoding() {
|
||||||
|
return myEncoding;
|
||||||
|
}
|
||||||
|
|
||||||
|
public HashCode getHashCode() {
|
||||||
|
return myHashCode;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object theO) {
|
||||||
|
if (this == theO) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (theO == null || getClass() != theO.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
ResourceHistoryState that = (ResourceHistoryState) theO;
|
||||||
|
return Objects.equals(myResourceText, that.myResourceText)
|
||||||
|
&& Arrays.equals(myResourceBinary, that.myResourceBinary)
|
||||||
|
&& myEncoding == that.myEncoding
|
||||||
|
&& Objects.equals(myHashCode, that.myHashCode);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
int result = Objects.hash(myResourceText, myEncoding, myHashCode);
|
||||||
|
result = 31 * result + Arrays.hashCode(myResourceBinary);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return new StringJoiner(", ", ResourceHistoryState.class.getSimpleName() + "[", "]")
|
||||||
|
.add("myResourceText='" + myResourceText + "'")
|
||||||
|
.add("myResourceBinary=" + Arrays.toString(myResourceBinary))
|
||||||
|
.add("myEncoding=" + myEncoding)
|
||||||
|
.add("myHashCode=" + myHashCode)
|
||||||
|
.toString();
|
||||||
|
}
|
||||||
|
}
|
|
@ -627,6 +627,9 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||||
version.executeRawSqls("20230402.1", Map.of(DriverTypeEnum.POSTGRES_9_4, postgresTuningStatements));
|
version.executeRawSqls("20230402.1", Map.of(DriverTypeEnum.POSTGRES_9_4, postgresTuningStatements));
|
||||||
|
|
||||||
// Use an unlimited length text column for RES_TEXT_VC
|
// Use an unlimited length text column for RES_TEXT_VC
|
||||||
|
// N.B. This will FAIL SILENTLY on Oracle due to the fact that Oracle does not support an ALTER TABLE from
|
||||||
|
// VARCHAR to
|
||||||
|
// CLOB. Because of failureAllowed() this won't halt the migration
|
||||||
version.onTable("HFJ_RES_VER")
|
version.onTable("HFJ_RES_VER")
|
||||||
.modifyColumn("20230421.1", "RES_TEXT_VC")
|
.modifyColumn("20230421.1", "RES_TEXT_VC")
|
||||||
.nullable()
|
.nullable()
|
||||||
|
|
|
@ -649,7 +649,13 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
||||||
.getMessage(TermReadSvcImpl.class, "valueSetExpandedUsingPreExpansion", expansionTimestamp);
|
.getMessage(TermReadSvcImpl.class, "valueSetExpandedUsingPreExpansion", expansionTimestamp);
|
||||||
theAccumulator.addMessage(msg);
|
theAccumulator.addMessage(msg);
|
||||||
expandConcepts(
|
expandConcepts(
|
||||||
theExpansionOptions, theAccumulator, termValueSet, theFilter, theAdd, theAddedCodes, isOracleDialect());
|
theExpansionOptions,
|
||||||
|
theAccumulator,
|
||||||
|
termValueSet,
|
||||||
|
theFilter,
|
||||||
|
theAdd,
|
||||||
|
theAddedCodes,
|
||||||
|
myHibernatePropertiesProvider.isOracleDialect());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
|
@ -664,10 +670,6 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
||||||
return expansionTimestamp;
|
return expansionTimestamp;
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean isOracleDialect() {
|
|
||||||
return myHibernatePropertiesProvider.getDialect() instanceof org.hibernate.dialect.OracleDialect;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void expandConcepts(
|
private void expandConcepts(
|
||||||
ValueSetExpansionOptions theExpansionOptions,
|
ValueSetExpansionOptions theExpansionOptions,
|
||||||
IValueSetConceptAccumulator theAccumulator,
|
IValueSetConceptAccumulator theAccumulator,
|
||||||
|
|
|
@ -0,0 +1,326 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||||
|
import com.google.common.hash.HashCode;
|
||||||
|
import com.google.common.hash.HashFunction;
|
||||||
|
import com.google.common.hash.Hashing;
|
||||||
|
import org.hl7.fhir.dstu3.hapi.ctx.FhirDstu3;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
import org.hl7.fhir.r4.hapi.ctx.FhirR4;
|
||||||
|
import org.hl7.fhir.r4.model.Patient;
|
||||||
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
|
import org.junit.jupiter.params.provider.Arguments;
|
||||||
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.time.LocalDate;
|
||||||
|
import java.time.Month;
|
||||||
|
import java.time.ZoneId;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
|
|
||||||
|
class ResourceHistoryCalculatorTest {
|
||||||
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResourceHistoryCalculatorTest.class);
|
||||||
|
|
||||||
|
private static final FhirContext CONTEXT = FhirContext.forR4Cached();
|
||||||
|
|
||||||
|
private static final ResourceHistoryCalculator CALCULATOR_ORACLE = new ResourceHistoryCalculator(CONTEXT, true);
|
||||||
|
private static final ResourceHistoryCalculator CALCULATOR_NON_ORACLE = new ResourceHistoryCalculator(CONTEXT, false);
|
||||||
|
|
||||||
|
private static final LocalDate TODAY = LocalDate.of(2024, Month.JANUARY, 25);
|
||||||
|
private static final String ENCODED_RESOURCE_1 = "1234";
|
||||||
|
private static final String ENCODED_RESOURCE_2 = "abcd";
|
||||||
|
private static final String RESOURCE_TEXT_VC = "resourceTextVc";
|
||||||
|
private static final List<String> EXCLUDED_ELEMENTS_1 = List.of("id");
|
||||||
|
private static final List<String> EXCLUDED_ELEMENTS_2 = List.of("resourceType", "birthDate");
|
||||||
|
private static final HashFunction SHA_256 = Hashing.sha256();
|
||||||
|
|
||||||
|
private static Stream<Arguments> calculateResourceHistoryStateArguments() {
|
||||||
|
return Stream.of(
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), true, ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), false, ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), true, ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), false, ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), true, ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), false, ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), true, ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), false, ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), true, ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), false, ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), true, ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), false, ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), true, ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), false, ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), true, ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), false, ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_1)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The purpose of this test is to ensure that the conditional logic to pre-calculate resource history text or binaries
|
||||||
|
* is respected.
|
||||||
|
* If this is for Oracle, the resource text will be driven off a binary with a given encoding with the
|
||||||
|
* resource text effectively ignored.
|
||||||
|
* If this is not Oracle, it will be driven off a JSON encoded text field with
|
||||||
|
* the binary effectively ignored.
|
||||||
|
*/
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("calculateResourceHistoryStateArguments")
|
||||||
|
void calculateResourceHistoryState(FhirContext theFhirContext, boolean theIsOracle, ResourceEncodingEnum theResourceEncoding, List<String> theExcludedElements) {
|
||||||
|
final IBaseResource patient = getPatient(theFhirContext);
|
||||||
|
|
||||||
|
final ResourceHistoryCalculator calculator = getCalculator(theFhirContext, theIsOracle);
|
||||||
|
final ResourceHistoryState result = calculator.calculateResourceHistoryState(patient, theResourceEncoding, theExcludedElements);
|
||||||
|
|
||||||
|
if (theIsOracle) {
|
||||||
|
assertNotNull(result.getResourceBinary()); // On Oracle: We use the resource binary to serve up the resource content
|
||||||
|
assertNull(result.getResourceText()); // On Oracle: We do NOT use the resource text to serve up the resource content
|
||||||
|
assertEquals(theResourceEncoding, result.getEncoding()); // On Oracle, the resource encoding is what we used to encode the binary
|
||||||
|
assertEquals(SHA_256.hashBytes(result.getResourceBinary()), result.getHashCode()); // On Oracle, the SHA 256 hash is of the binary
|
||||||
|
} else {
|
||||||
|
assertNull(result.getResourceBinary()); // Non-Oracle: We do NOT use the resource binary to serve up the resource content
|
||||||
|
assertNotNull(result.getResourceText()); // Non-Oracle: We use the resource text to serve up the resource content
|
||||||
|
assertEquals(ResourceEncodingEnum.JSON, result.getEncoding()); // Non-Oracle, since we didn't encode a binary this is always JSON.
|
||||||
|
final HashCode expectedHashCode = SHA_256.hashUnencodedChars(calculator.encodeResource(patient, theResourceEncoding, theExcludedElements)); // Non-Oracle, the SHA 256 hash is of the parsed resource object
|
||||||
|
assertEquals(expectedHashCode, result.getHashCode());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private static Stream<Arguments> conditionallyAlterHistoryEntityArguments() {
|
||||||
|
return Stream.of(
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_2)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("conditionallyAlterHistoryEntityArguments")
|
||||||
|
void conditionallyAlterHistoryEntity_usesVarcharForOracle(boolean theIsOracle, ResourceEncodingEnum theResourceEncoding, String theResourceText) {
|
||||||
|
final ResourceTable resourceTable = new ResourceTable();
|
||||||
|
resourceTable.setId(123L);
|
||||||
|
|
||||||
|
final ResourceHistoryTable resourceHistoryTable = new ResourceHistoryTable();
|
||||||
|
resourceHistoryTable.setVersion(1);
|
||||||
|
resourceHistoryTable.setResource("resource".getBytes(StandardCharsets.UTF_8));
|
||||||
|
resourceHistoryTable.setEncoding(theResourceEncoding);
|
||||||
|
resourceHistoryTable.setResourceTextVc(RESOURCE_TEXT_VC);
|
||||||
|
|
||||||
|
final boolean isChanged =
|
||||||
|
getCalculator(theIsOracle).conditionallyAlterHistoryEntity(resourceTable, resourceHistoryTable, theResourceText);
|
||||||
|
|
||||||
|
if (theIsOracle) {
|
||||||
|
assertFalse(isChanged);
|
||||||
|
assertNotNull(resourceHistoryTable.getResource());
|
||||||
|
assertEquals(RESOURCE_TEXT_VC, resourceHistoryTable.getResourceTextVc());
|
||||||
|
assertEquals(resourceHistoryTable.getEncoding(), resourceHistoryTable.getEncoding());
|
||||||
|
} else {
|
||||||
|
assertTrue(isChanged);
|
||||||
|
assertNull(resourceHistoryTable.getResource());
|
||||||
|
assertEquals(theResourceText, resourceHistoryTable.getResourceTextVc());
|
||||||
|
assertEquals(resourceHistoryTable.getEncoding(), ResourceEncodingEnum.JSON);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Stream<Arguments> encodeResourceArguments() {
|
||||||
|
return Stream.of(
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_2),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_1),
|
||||||
|
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_2)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("encodeResourceArguments")
|
||||||
|
void encodeResource_ensureFhirVersionSpecificAndIntendedElementsExcluded(FhirContext theFhirContext, ResourceEncodingEnum theResourceEncoding, List<String> theExcludedElements) {
|
||||||
|
final IBaseResource patient = getPatient(theFhirContext);
|
||||||
|
final String encodedResource = getCalculator(theFhirContext, true).encodeResource(patient, theResourceEncoding, theExcludedElements);
|
||||||
|
|
||||||
|
final String expectedEncoding =
|
||||||
|
theResourceEncoding.newParser(theFhirContext).setDontEncodeElements(theExcludedElements).encodeResourceToString(patient);
|
||||||
|
|
||||||
|
assertEquals(expectedEncoding, encodedResource);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Stream<Arguments> getResourceBinaryArguments() {
|
||||||
|
return Stream.of(
|
||||||
|
Arguments.of(ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(ResourceEncodingEnum.DEL, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(ResourceEncodingEnum.DEL, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(ResourceEncodingEnum.ESR, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(ResourceEncodingEnum.ESR, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(ResourceEncodingEnum.JSON, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(ResourceEncodingEnum.JSON, ENCODED_RESOURCE_2)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("getResourceBinaryArguments")
|
||||||
|
void getResourceBinary(ResourceEncodingEnum theResourceEncoding, String theEncodedResource) {
|
||||||
|
final byte[] resourceBinary = ResourceHistoryCalculator.getResourceBinary(theResourceEncoding, theEncodedResource);
|
||||||
|
|
||||||
|
switch (theResourceEncoding) {
|
||||||
|
case JSON:
|
||||||
|
assertArrayEquals(theEncodedResource.getBytes(StandardCharsets.UTF_8), resourceBinary);
|
||||||
|
break;
|
||||||
|
case JSONC:
|
||||||
|
assertArrayEquals(GZipUtil.compress(theEncodedResource), resourceBinary);
|
||||||
|
break;
|
||||||
|
case DEL :
|
||||||
|
case ESR :
|
||||||
|
default:
|
||||||
|
assertArrayEquals(new byte[0], resourceBinary);
|
||||||
|
}
|
||||||
|
|
||||||
|
ourLog.info("resourceBinary: {}", resourceBinary);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Stream<Arguments> isResourceHistoryChangedArguments() {
|
||||||
|
return Stream.of(
|
||||||
|
Arguments.of(true, ENCODED_RESOURCE_1.getBytes(StandardCharsets.UTF_8), ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(false, ENCODED_RESOURCE_1.getBytes(StandardCharsets.UTF_8), ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(true, ENCODED_RESOURCE_2.getBytes(StandardCharsets.UTF_8), ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(false, ENCODED_RESOURCE_2.getBytes(StandardCharsets.UTF_8), ENCODED_RESOURCE_2)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("isResourceHistoryChangedArguments")
|
||||||
|
void isResourceHistoryChanged(boolean theIsOracle, byte[] theNewBinary, String theNewResourceText) {
|
||||||
|
final String existngResourceText = ENCODED_RESOURCE_1;
|
||||||
|
final byte[] existingBytes = existngResourceText.getBytes(StandardCharsets.UTF_8);
|
||||||
|
|
||||||
|
final ResourceHistoryTable resourceHistoryTable = new ResourceHistoryTable();
|
||||||
|
resourceHistoryTable.setResource(existingBytes);
|
||||||
|
resourceHistoryTable.setResourceTextVc(existngResourceText);
|
||||||
|
|
||||||
|
final boolean isChanged = getCalculator(theIsOracle).isResourceHistoryChanged(resourceHistoryTable, theNewBinary, theNewResourceText);
|
||||||
|
|
||||||
|
if (theIsOracle) {
|
||||||
|
final boolean expectedResult = !Arrays.equals(existingBytes, theNewBinary);
|
||||||
|
assertEquals(expectedResult, isChanged);
|
||||||
|
} else {
|
||||||
|
final boolean expectedResult = ! existngResourceText.equals(theNewResourceText);
|
||||||
|
assertEquals(expectedResult, isChanged);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Stream<Arguments> populateEncodedResourceArguments() {
|
||||||
|
return Stream.of(
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_1),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(true, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_2),
|
||||||
|
Arguments.of(false, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_1)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("populateEncodedResourceArguments")
|
||||||
|
void populateEncodedResource(boolean theIsOracle, ResourceEncodingEnum theResourceEncoding, String theEncodedResourceString) {
|
||||||
|
final EncodedResource encodedResource = new EncodedResource();
|
||||||
|
final byte[] resourceBinary = theEncodedResourceString.getBytes(StandardCharsets.UTF_8);
|
||||||
|
|
||||||
|
getCalculator(theIsOracle)
|
||||||
|
.populateEncodedResource(encodedResource, theEncodedResourceString, resourceBinary, theResourceEncoding);
|
||||||
|
|
||||||
|
if (theIsOracle) {
|
||||||
|
assertEquals(resourceBinary, encodedResource.getResourceBinary());
|
||||||
|
assertNull(encodedResource.getResourceText());
|
||||||
|
assertEquals(theResourceEncoding, encodedResource.getEncoding());
|
||||||
|
} else {
|
||||||
|
assertNull(encodedResource.getResourceBinary());
|
||||||
|
assertEquals(theEncodedResourceString, encodedResource.getResourceText());
|
||||||
|
assertEquals(ResourceEncodingEnum.JSON, encodedResource.getEncoding());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private ResourceHistoryCalculator getCalculator(boolean theIsOracle) {
|
||||||
|
return theIsOracle ? CALCULATOR_ORACLE : CALCULATOR_NON_ORACLE;
|
||||||
|
}
|
||||||
|
|
||||||
|
private ResourceHistoryCalculator getCalculator(FhirContext theFhirContext, boolean theIsOracle) {
|
||||||
|
return new ResourceHistoryCalculator(theFhirContext, theIsOracle);
|
||||||
|
}
|
||||||
|
|
||||||
|
private IBaseResource getPatient(FhirContext theFhirContext) {
|
||||||
|
if (theFhirContext.getVersion() instanceof FhirR4) {
|
||||||
|
return getPatientR4();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (theFhirContext.getVersion() instanceof FhirDstu3) {
|
||||||
|
return getPatientDstu3();
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private org.hl7.fhir.dstu3.model.Patient getPatientDstu3() {
|
||||||
|
final org.hl7.fhir.dstu3.model.Patient patient = new org.hl7.fhir.dstu3.model.Patient();
|
||||||
|
|
||||||
|
patient.setId("123");
|
||||||
|
patient.setBirthDate(Date.from(TODAY.atStartOfDay(ZoneId.of("America/Toronto")).toInstant()));
|
||||||
|
|
||||||
|
return patient;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Patient getPatientR4() {
|
||||||
|
final Patient patient = new Patient();
|
||||||
|
|
||||||
|
patient.setId("123");
|
||||||
|
patient.setBirthDate(Date.from(TODAY.atStartOfDay(ZoneId.of("America/Toronto")).toInstant()));
|
||||||
|
|
||||||
|
return patient;
|
||||||
|
}
|
||||||
|
}
|
|
@ -32,8 +32,6 @@ import java.io.Serializable;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
|
||||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
|
||||||
|
|
||||||
@Entity
|
@Entity
|
||||||
@Table(
|
@Table(
|
||||||
name = ResourceHistoryTable.HFJ_RES_VER,
|
name = ResourceHistoryTable.HFJ_RES_VER,
|
||||||
|
@ -86,15 +84,12 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
||||||
@OneToMany(mappedBy = "myResourceHistory", cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true)
|
@OneToMany(mappedBy = "myResourceHistory", cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true)
|
||||||
private Collection<ResourceHistoryTag> myTags;
|
private Collection<ResourceHistoryTag> myTags;
|
||||||
|
|
||||||
/**
|
|
||||||
* Note: No setter for this field because it's only a legacy way of storing data now.
|
|
||||||
*/
|
|
||||||
@Column(name = "RES_TEXT", length = Integer.MAX_VALUE - 1, nullable = true)
|
@Column(name = "RES_TEXT", length = Integer.MAX_VALUE - 1, nullable = true)
|
||||||
@Lob()
|
@Lob()
|
||||||
@OptimisticLock(excluded = true)
|
@OptimisticLock(excluded = true)
|
||||||
private byte[] myResource;
|
private byte[] myResource;
|
||||||
|
|
||||||
@Column(name = "RES_TEXT_VC", nullable = true, length = Length.LONG32)
|
@Column(name = "RES_TEXT_VC", length = Length.LONG32, nullable = true)
|
||||||
@OptimisticLock(excluded = true)
|
@OptimisticLock(excluded = true)
|
||||||
private String myResourceTextVc;
|
private String myResourceTextVc;
|
||||||
|
|
||||||
|
@ -155,8 +150,7 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setResourceTextVc(String theResourceTextVc) {
|
public void setResourceTextVc(String theResourceTextVc) {
|
||||||
myResource = null;
|
myResourceTextVc = theResourceTextVc;
|
||||||
myResourceTextVc = defaultString(theResourceTextVc);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public ResourceHistoryProvenanceEntity getProvenance() {
|
public ResourceHistoryProvenanceEntity getProvenance() {
|
||||||
|
@ -212,6 +206,10 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
||||||
return myResource;
|
return myResource;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setResource(byte[] theResource) {
|
||||||
|
myResource = theResource;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Long getResourceId() {
|
public Long getResourceId() {
|
||||||
return myResourceId;
|
return myResourceId;
|
||||||
|
|
|
@ -17,8 +17,10 @@ import ca.uhn.fhir.jpa.cache.ResourceChangeListenerCacheRefresherImpl;
|
||||||
import ca.uhn.fhir.jpa.cache.ResourceChangeListenerRegistryImpl;
|
import ca.uhn.fhir.jpa.cache.ResourceChangeListenerRegistryImpl;
|
||||||
import ca.uhn.fhir.jpa.cache.ResourcePersistentIdMap;
|
import ca.uhn.fhir.jpa.cache.ResourcePersistentIdMap;
|
||||||
import ca.uhn.fhir.jpa.cache.ResourceVersionMap;
|
import ca.uhn.fhir.jpa.cache.ResourceVersionMap;
|
||||||
|
import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider;
|
||||||
import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
|
import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
|
||||||
import ca.uhn.fhir.jpa.dao.JpaResourceDao;
|
import ca.uhn.fhir.jpa.dao.JpaResourceDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.ResourceHistoryCalculator;
|
||||||
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
||||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
|
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
|
||||||
import ca.uhn.fhir.jpa.dao.index.DaoSearchParamSynchronizer;
|
import ca.uhn.fhir.jpa.dao.index.DaoSearchParamSynchronizer;
|
||||||
|
@ -148,6 +150,7 @@ public class GiantTransactionPerfTest {
|
||||||
private IIdHelperService myIdHelperService;
|
private IIdHelperService myIdHelperService;
|
||||||
@Mock
|
@Mock
|
||||||
private IJpaStorageResourceParser myJpaStorageResourceParser;
|
private IJpaStorageResourceParser myJpaStorageResourceParser;
|
||||||
|
private final ResourceHistoryCalculator myResourceHistoryCalculator = new ResourceHistoryCalculator(FhirContext.forR4Cached(), false);
|
||||||
private IMetaTagSorter myMetaTagSorter;
|
private IMetaTagSorter myMetaTagSorter;
|
||||||
|
|
||||||
@AfterEach
|
@AfterEach
|
||||||
|
@ -271,6 +274,7 @@ public class GiantTransactionPerfTest {
|
||||||
myEobDao.setJpaStorageResourceParserForUnitTest(myJpaStorageResourceParser);
|
myEobDao.setJpaStorageResourceParserForUnitTest(myJpaStorageResourceParser);
|
||||||
myEobDao.setExternallyStoredResourceServiceRegistryForUnitTest(new ExternallyStoredResourceServiceRegistry());
|
myEobDao.setExternallyStoredResourceServiceRegistryForUnitTest(new ExternallyStoredResourceServiceRegistry());
|
||||||
myEobDao.setMyMetaTagSorter(myMetaTagSorter);
|
myEobDao.setMyMetaTagSorter(myMetaTagSorter);
|
||||||
|
myEobDao.setResourceHistoryCalculator(myResourceHistoryCalculator);
|
||||||
myEobDao.start();
|
myEobDao.start();
|
||||||
|
|
||||||
myDaoRegistry.setResourceDaos(Lists.newArrayList(myEobDao));
|
myDaoRegistry.setResourceDaos(Lists.newArrayList(myEobDao));
|
||||||
|
|
Loading…
Reference in New Issue