Compare commits
2 Commits
0e16a4afe1
...
b72a3873b3
Author | SHA1 | Date |
---|---|---|
James Agnew | b72a3873b3 | |
James Agnew | fe723491a6 |
|
@ -1,7 +1,6 @@
|
|||
package ca.uhn.hapi.fhir.sql.hibernatesvc;
|
||||
|
||||
import ca.uhn.fhir.context.ConfigurationException;
|
||||
import com.google.common.collect.Lists;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.persistence.JoinColumn;
|
||||
|
@ -34,7 +33,6 @@ import org.slf4j.LoggerFactory;
|
|||
|
||||
import java.lang.reflect.Field;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -57,22 +55,22 @@ public class ConditionalIdMappingContributor implements org.hibernate.boot.spi.A
|
|||
|
||||
@Override
|
||||
public void contribute(
|
||||
AdditionalMappingContributions theContributions,
|
||||
InFlightMetadataCollector theMetadata,
|
||||
ResourceStreamLocator theResourceStreamLocator,
|
||||
MetadataBuildingContext theBuildingContext) {
|
||||
AdditionalMappingContributions theContributions,
|
||||
InFlightMetadataCollector theMetadata,
|
||||
ResourceStreamLocator theResourceStreamLocator,
|
||||
MetadataBuildingContext theBuildingContext) {
|
||||
|
||||
HapiHibernateDialectSettingsService hapiSettingsSvc = theMetadata.getBootstrapContext().getServiceRegistry().getService(HapiHibernateDialectSettingsService.class);
|
||||
HapiHibernateDialectSettingsService hapiSettingsSvc = theMetadata
|
||||
.getBootstrapContext()
|
||||
.getServiceRegistry()
|
||||
.getService(HapiHibernateDialectSettingsService.class);
|
||||
assert hapiSettingsSvc != null;
|
||||
if (!hapiSettingsSvc.isTrimConditionalIdsFromPrimaryKeys()) {
|
||||
return;
|
||||
}
|
||||
|
||||
myTableNameToEntityType = theMetadata
|
||||
.getEntityBindingMap()
|
||||
.values()
|
||||
.stream()
|
||||
.collect(Collectors.toMap(t -> t.getTable().getName(), t -> getType(t.getClassName())));
|
||||
myTableNameToEntityType = theMetadata.getEntityBindingMap().values().stream()
|
||||
.collect(Collectors.toMap(t -> t.getTable().getName(), t -> getType(t.getClassName())));
|
||||
|
||||
removeConditionalIdProperties(theMetadata);
|
||||
|
||||
|
@ -109,15 +107,20 @@ public class ConditionalIdMappingContributor implements org.hibernate.boot.spi.A
|
|||
field = getField(identifier.getComponentClass(), fieldName);
|
||||
}
|
||||
if (field == null) {
|
||||
throw new ConfigurationException("Failed to find field " + fieldName + " on type: " + entityType.getName());
|
||||
throw new ConfigurationException(
|
||||
"Failed to find field " + fieldName + " on type: " + entityType.getName());
|
||||
}
|
||||
|
||||
ConditionalIdProperty remove = field.getAnnotation(ConditionalIdProperty.class);
|
||||
if (remove != null) {
|
||||
Property removedProperty = properties.remove(i);
|
||||
idRemovedColumns.addAll(removedProperty.getColumns());
|
||||
idRemovedColumnNames.addAll(removedProperty.getColumns().stream().map(Column::getName).collect(Collectors.toSet()));
|
||||
removedProperty.getColumns().stream().map(theColumn -> table.getName() + "#" + theColumn.getName()).forEach(myQualifiedIdRemovedColumnNames::add);
|
||||
idRemovedColumnNames.addAll(removedProperty.getColumns().stream()
|
||||
.map(Column::getName)
|
||||
.collect(Collectors.toSet()));
|
||||
removedProperty.getColumns().stream()
|
||||
.map(theColumn -> table.getName() + "#" + theColumn.getName())
|
||||
.forEach(myQualifiedIdRemovedColumnNames::add);
|
||||
idRemovedProperties.add(removedProperty.getName());
|
||||
i--;
|
||||
|
||||
|
@ -133,8 +136,8 @@ public class ConditionalIdMappingContributor implements org.hibernate.boot.spi.A
|
|||
entityPersistentClass.addProperty(removedProperty);
|
||||
}
|
||||
|
||||
// addToCollectionUsingReflection(entityPersistentClass, "properties", removedProperty);
|
||||
// addToCollectionUsingReflection(entityPersistentClass, "declaredProperties", removedProperty);
|
||||
// addToCollectionUsingReflection(entityPersistentClass, "properties", removedProperty);
|
||||
// addToCollectionUsingReflection(entityPersistentClass, "declaredProperties", removedProperty);
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -161,7 +164,7 @@ public class ConditionalIdMappingContributor implements org.hibernate.boot.spi.A
|
|||
|
||||
// Adjust composites - This handles @EmbeddedId PKs like JpaPid
|
||||
List<Component> registeredComponents = new ArrayList<>();
|
||||
theMetadata.visitRegisteredComponents(c->registeredComponents.add(c));
|
||||
theMetadata.visitRegisteredComponents(c -> registeredComponents.add(c));
|
||||
|
||||
for (Component c : registeredComponents) {
|
||||
Class<?> componentType = c.getComponentClass();
|
||||
|
@ -180,7 +183,9 @@ public class ConditionalIdMappingContributor implements org.hibernate.boot.spi.A
|
|||
myQualifiedIdRemovedColumnNames.add(tableName + "#" + columnName);
|
||||
|
||||
PrimaryKey primaryKey = c.getTable().getPrimaryKey();
|
||||
primaryKey.getColumns().removeIf(t -> myQualifiedIdRemovedColumnNames.contains(tableName + "#" + t.getName()));
|
||||
primaryKey
|
||||
.getColumns()
|
||||
.removeIf(t -> myQualifiedIdRemovedColumnNames.contains(tableName + "#" + t.getName()));
|
||||
|
||||
for (Column nextColumn : c.getTable().getColumns()) {
|
||||
if (myQualifiedIdRemovedColumnNames.contains(tableName + "#" + nextColumn.getName())) {
|
||||
|
@ -203,8 +208,6 @@ public class ConditionalIdMappingContributor implements org.hibernate.boot.spi.A
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
Type type = c.getType();
|
||||
|
@ -218,42 +221,41 @@ public class ConditionalIdMappingContributor implements org.hibernate.boot.spi.A
|
|||
}
|
||||
}
|
||||
|
||||
// if (!removedPropertyNames.isEmpty()) {
|
||||
// if (tableName.equals("HFJ_RES_VER") && c.toString().contains("JpaPid")) {
|
||||
// }
|
||||
|
||||
// if (!removedPropertyNames.isEmpty()) {
|
||||
// if (tableName.equals("HFJ_RES_VER") && c.toString().contains("JpaPid")) {
|
||||
// }
|
||||
|
||||
// ComponentType component = (ComponentType) type;
|
||||
// for (int i = 0; i < component.getPropertyNames().length; i++) {
|
||||
// String propertyName = component.getPropertyNames()[i];
|
||||
// if (removedPropertyNames.contains(propertyName)) {
|
||||
// removeArrayFieldValueAtIndex(component, "propertyNames", i);
|
||||
// removeArrayFieldValueAtIndex(component, "propertyTypes", i);
|
||||
// removeArrayFieldValueAtIndex(component, "propertyNullability", i);
|
||||
// removeArrayFieldValueAtIndex(component, "cascade", i);
|
||||
// removeArrayFieldValueAtIndex(component, "joinedFetch", i);
|
||||
// removeArrayFieldValueAtIndex(component, "joinedFetch", i);
|
||||
// }
|
||||
// }
|
||||
// for (int i = 0; i < component.getPropertyNames().length; i++) {
|
||||
// String propertyName = component.getPropertyNames()[i];
|
||||
// if (removedPropertyNames.contains(propertyName)) {
|
||||
// removeArrayFieldValueAtIndex(component, "propertyNames", i);
|
||||
// removeArrayFieldValueAtIndex(component, "propertyTypes", i);
|
||||
// removeArrayFieldValueAtIndex(component, "propertyNullability", i);
|
||||
// removeArrayFieldValueAtIndex(component, "cascade", i);
|
||||
// removeArrayFieldValueAtIndex(component, "joinedFetch", i);
|
||||
// removeArrayFieldValueAtIndex(component, "joinedFetch", i);
|
||||
// }
|
||||
// }
|
||||
}
|
||||
|
||||
// Value propertyValue = property.getValue();
|
||||
// if (propertyValue instanceof Component) {
|
||||
// type = propertyValue.getType();
|
||||
// type = propertyValue.getType();
|
||||
// if (type instanceof ComponentType) {
|
||||
// ComponentType ect = (ComponentType) type;
|
||||
// for (int i = 0; i < ect.getPropertyNames().length; i++) {
|
||||
// String propertyName = ect.getPropertyNames()[i];
|
||||
// Field propertyField = getField(ect.getReturnedClass(), propertyName);
|
||||
// ConditionalIdProperty conditionalId = propertyField.getAnnotation(ConditionalIdProperty.class);
|
||||
// if (conditionalId != null) {
|
||||
// ect.getPropertyNames()
|
||||
// }
|
||||
//
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// Value propertyValue = property.getValue();
|
||||
// if (propertyValue instanceof Component) {
|
||||
// type = propertyValue.getType();
|
||||
// type = propertyValue.getType();
|
||||
// if (type instanceof ComponentType) {
|
||||
// ComponentType ect = (ComponentType) type;
|
||||
// for (int i = 0; i < ect.getPropertyNames().length; i++) {
|
||||
// String propertyName = ect.getPropertyNames()[i];
|
||||
// Field propertyField = getField(ect.getReturnedClass(), propertyName);
|
||||
// ConditionalIdProperty conditionalId = propertyField.getAnnotation(ConditionalIdProperty.class);
|
||||
// if (conditionalId != null) {
|
||||
// ect.getPropertyNames()
|
||||
// }
|
||||
//
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
c.getColumns().removeIf(t -> {
|
||||
String name = tableName + "#" + t.getName();
|
||||
|
@ -271,25 +273,32 @@ public class ConditionalIdMappingContributor implements org.hibernate.boot.spi.A
|
|||
if (value instanceof ToOne) {
|
||||
ToOne manyToOne = (ToOne) value;
|
||||
|
||||
String targetTableName = theMetadata.getEntityBindingMap().get(manyToOne.getReferencedEntityName()).getTable().getName();
|
||||
String targetTableName = theMetadata
|
||||
.getEntityBindingMap()
|
||||
.get(manyToOne.getReferencedEntityName())
|
||||
.getTable()
|
||||
.getName();
|
||||
Class<?> entityType = getType(nextEntry.getKey());
|
||||
String propertyName = manyToOne.getPropertyName();
|
||||
Set<String> columnNamesToRemoveFromFks = determineFilteredColumnNamesInForeignKey(entityType, propertyName, targetTableName);
|
||||
Set<String> columnNamesToRemoveFromFks =
|
||||
determineFilteredColumnNamesInForeignKey(entityType, propertyName, targetTableName);
|
||||
|
||||
removeColumns(manyToOne.getColumns(), t1 -> columnNamesToRemoveFromFks.contains(t1.getName()));
|
||||
removeColumns(foreignKey.getColumns(), t1 -> columnNamesToRemoveFromFks.contains(t1.getName()));
|
||||
|
||||
columnNamesToRemoveFromFks.forEach(t -> myQualifiedIdRemovedColumnNames.add(table.getName() + "#" + t));
|
||||
columnNamesToRemoveFromFks.forEach(
|
||||
t -> myQualifiedIdRemovedColumnNames.add(table.getName() + "#" + t));
|
||||
|
||||
} else {
|
||||
|
||||
foreignKey.getColumns().removeIf(t -> myQualifiedIdRemovedColumnNames.contains(foreignKey.getReferencedTable().getName() + "#" + t.getName()));
|
||||
|
||||
foreignKey
|
||||
.getColumns()
|
||||
.removeIf(t -> myQualifiedIdRemovedColumnNames.contains(
|
||||
foreignKey.getReferencedTable().getName() + "#" + t.getName()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Adjust relations with remote filtered columns (e.g. OneToMany)
|
||||
for (var nextEntry : theMetadata.getEntityBindingMap().entrySet()) {
|
||||
PersistentClass entityPersistentClass = nextEntry.getValue();
|
||||
|
@ -306,20 +315,23 @@ public class ConditionalIdMappingContributor implements org.hibernate.boot.spi.A
|
|||
if (propertyKey instanceof DependantValue) {
|
||||
DependantValue dependantValue = (DependantValue) propertyKey;
|
||||
|
||||
dependantValue.getColumns().removeIf(t -> myQualifiedIdRemovedColumnNames.contains(propertyValueBag.getCollectionTable().getName() + "#" + t.getName()));
|
||||
dependantValue
|
||||
.getColumns()
|
||||
.removeIf(t -> myQualifiedIdRemovedColumnNames.contains(
|
||||
propertyValueBag.getCollectionTable().getName() + "#" + t.getName()));
|
||||
|
||||
// KeyValue wrappedValue = dependantValue.getWrappedValue();
|
||||
// if (wrappedValue instanceof Component) {}
|
||||
//
|
||||
// dependantValue.copy(); // FIXME: remove
|
||||
// KeyValue wrappedValue = dependantValue.getWrappedValue();
|
||||
// if (wrappedValue instanceof Component) {}
|
||||
//
|
||||
// dependantValue.copy(); // FIXME: remove
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static void updateComponentWithNewPropertyList(Component identifierMapper, List<Property> finalPropertyList) {
|
||||
private static void updateComponentWithNewPropertyList(
|
||||
Component identifierMapper, List<Property> finalPropertyList) {
|
||||
CompositeType type = identifierMapper.getType();
|
||||
if (type instanceof ComponentType) {
|
||||
ComponentType ect = (ComponentType) type;
|
||||
|
@ -339,7 +351,6 @@ public class ConditionalIdMappingContributor implements org.hibernate.boot.spi.A
|
|||
} catch (NoSuchFieldException | IllegalAccessException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -360,7 +371,8 @@ public class ConditionalIdMappingContributor implements org.hibernate.boot.spi.A
|
|||
}
|
||||
|
||||
@Nonnull
|
||||
private Set<String> determineFilteredColumnNamesInForeignKey(Class<?> theEntityType, String thePropertyName, String theTargetTableName) {
|
||||
private Set<String> determineFilteredColumnNamesInForeignKey(
|
||||
Class<?> theEntityType, String thePropertyName, String theTargetTableName) {
|
||||
Field field = getField(theEntityType, thePropertyName);
|
||||
Validate.notNull(field, "Unable to find field %s on entity %s", thePropertyName, theEntityType.getName());
|
||||
JoinColumns joinColumns = field.getAnnotation(JoinColumns.class);
|
||||
|
|
|
@ -4,5 +4,4 @@ import java.lang.annotation.Retention;
|
|||
import java.lang.annotation.RetentionPolicy;
|
||||
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface ConditionalIdProperty {
|
||||
}
|
||||
public @interface ConditionalIdProperty {}
|
||||
|
|
|
@ -10,7 +10,8 @@ import java.util.concurrent.atomic.AtomicReference;
|
|||
*/
|
||||
public class HapiHibernateDialectSettingsService implements Service {
|
||||
|
||||
private static final AtomicReference<Boolean> myLastTrimConditionalIdsFromPrimaryKeysForUnitTest = new AtomicReference<>();
|
||||
private static final AtomicReference<Boolean> myLastTrimConditionalIdsFromPrimaryKeysForUnitTest =
|
||||
new AtomicReference<>();
|
||||
private boolean myTrimConditionalIdsFromPrimaryKeys;
|
||||
|
||||
public boolean isTrimConditionalIdsFromPrimaryKeys() {
|
||||
|
@ -26,5 +27,4 @@ public class HapiHibernateDialectSettingsService implements Service {
|
|||
public static Boolean getLastTrimConditionalIdsFromPrimaryKeysForUnitTest() {
|
||||
return myLastTrimConditionalIdsFromPrimaryKeysForUnitTest.get();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -37,7 +37,8 @@ import java.util.ArrayList;
|
|||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.slf4j.LoggerFactory.getLogger;
|
||||
|
||||
|
@ -118,47 +119,33 @@ public class ResourceVersionSvcDaoImpl implements IResourceVersionSvc {
|
|||
private ResourcePersistentIdMap getIdsOfExistingResources(
|
||||
RequestPartitionId thePartitionId, Collection<IIdType> theIds) {
|
||||
// these are the found Ids that were in the db
|
||||
ResourcePersistentIdMap retval = new ResourcePersistentIdMap();
|
||||
ResourcePersistentIdMap retVal = new ResourcePersistentIdMap();
|
||||
|
||||
if (theIds == null || theIds.isEmpty()) {
|
||||
return retval;
|
||||
return retVal;
|
||||
}
|
||||
|
||||
Map<String, IIdType> idValueToId = theIds.stream()
|
||||
.collect(Collectors.toMap(t -> t.toUnqualifiedVersionless().getValue(), t -> t));
|
||||
|
||||
List<JpaPid> jpaPids =
|
||||
myIdHelperService.resolveResourcePersistentIdsWithCache(thePartitionId, new ArrayList<>(theIds));
|
||||
|
||||
// we'll use this map to fetch pids that require versions
|
||||
HashMap<Long, JpaPid> pidsToVersionToResourcePid = new HashMap<>();
|
||||
Collection<Object[]> resourceEntries = myResourceTableDao.getResourceVersionsForPid(jpaPids);
|
||||
|
||||
// fill in our map
|
||||
for (JpaPid pid : jpaPids) {
|
||||
if (pid.getVersion() == null) {
|
||||
pidsToVersionToResourcePid.put(pid.getId(), pid);
|
||||
}
|
||||
Optional<IIdType> idOp = theIds.stream()
|
||||
.filter(i ->
|
||||
i.getIdPart().equals(pid.getAssociatedResourceId().getIdPart()))
|
||||
.findFirst();
|
||||
// this should always be present
|
||||
// since it was passed in.
|
||||
// but land of optionals...
|
||||
idOp.ifPresent(id -> retval.put(id, pid));
|
||||
for (Object[] nextRecord : resourceEntries) {
|
||||
// order matters!
|
||||
JpaPid retPid = (JpaPid) nextRecord[0];
|
||||
String resType = (String) nextRecord[1];
|
||||
String fhirId = (String) nextRecord[2];
|
||||
Long version = (Long) nextRecord[3];
|
||||
|
||||
retPid.setVersion(version);
|
||||
|
||||
IIdType originalId = idValueToId.get(resType + "/" + fhirId);
|
||||
retVal.put(originalId, retPid);
|
||||
}
|
||||
|
||||
// set any versions we don't already have
|
||||
if (!pidsToVersionToResourcePid.isEmpty()) {
|
||||
Collection<Object[]> resourceEntries =
|
||||
myResourceTableDao.getResourceVersionsForPid(new ArrayList<>(pidsToVersionToResourcePid.keySet()));
|
||||
|
||||
for (Object[] nextRecord : resourceEntries) {
|
||||
// order matters!
|
||||
Long retPid = (Long) nextRecord[0];
|
||||
String resType = (String) nextRecord[1];
|
||||
Long version = (Long) nextRecord[2];
|
||||
pidsToVersionToResourcePid.get(retPid).setVersion(version);
|
||||
}
|
||||
}
|
||||
|
||||
return retval;
|
||||
return retVal;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -740,8 +740,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
} else {
|
||||
ResourceHistoryTable currentHistoryVersion = theEntity.getCurrentVersionEntity();
|
||||
if (currentHistoryVersion == null) {
|
||||
currentHistoryVersion = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(
|
||||
theEntity.getId(), theEntity.getVersion());
|
||||
currentHistoryVersion =
|
||||
myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), theEntity.getVersion());
|
||||
}
|
||||
if (currentHistoryVersion == null || !currentHistoryVersion.hasResource()) {
|
||||
changed = true;
|
||||
|
@ -1264,7 +1264,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
newParams, entity, existingParams);
|
||||
|
||||
// FIXME: restore?
|
||||
// newParams.populateResourceTableParamCollections(entity);
|
||||
// newParams.populateResourceTableParamCollections(entity);
|
||||
entity.setParamsForStorage(newParams);
|
||||
|
||||
// Interceptor broadcast: JPA_PERFTRACE_INFO
|
||||
|
@ -1481,8 +1481,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
* this could return null if the current resourceVersion has been expunged
|
||||
* in which case we'll still create a new one
|
||||
*/
|
||||
historyEntry = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(
|
||||
theEntity.getId(), resourceVersion - 1);
|
||||
historyEntry = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), resourceVersion - 1);
|
||||
if (historyEntry != null) {
|
||||
reusingHistoryEntity = true;
|
||||
theEntity.populateHistoryEntityVersionAndDates(historyEntry);
|
||||
|
|
|
@ -1383,8 +1383,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
doMetaAdd(theMetaAdd, latestVersion, theRequest, transactionDetails);
|
||||
|
||||
// Also update history entry
|
||||
ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(
|
||||
entity.getResourceId(), entity.getVersion());
|
||||
ResourceHistoryTable history =
|
||||
myResourceHistoryTableDao.findForIdAndVersion(entity.getResourceId(), entity.getVersion());
|
||||
doMetaAdd(theMetaAdd, history, theRequest, transactionDetails);
|
||||
}
|
||||
|
||||
|
@ -1432,8 +1432,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
} else {
|
||||
doMetaDelete(theMetaDel, latestVersion, theRequest, transactionDetails);
|
||||
// Also update history entry
|
||||
ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(
|
||||
entity.getResourceId(), entity.getVersion());
|
||||
ResourceHistoryTable history =
|
||||
myResourceHistoryTableDao.findForIdAndVersion(entity.getResourceId(), entity.getVersion());
|
||||
doMetaDelete(theMetaDel, history, theRequest, transactionDetails);
|
||||
}
|
||||
|
||||
|
@ -1704,7 +1704,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
int pageSize = 100;
|
||||
for (int page = 0; ((long) page * pageSize) < entity.getVersion(); page++) {
|
||||
Slice<ResourceHistoryTable> historyEntities =
|
||||
myResourceHistoryTableDao.findForResourceIdAndReturnEntitiesAndFetchProvenance(
|
||||
myResourceHistoryTableDao.findAllVersionsExceptSpecificForResourcePid(
|
||||
PageRequest.of(page, pageSize), entity.getId(), historyEntity.getVersion());
|
||||
for (ResourceHistoryTable next : historyEntities) {
|
||||
reindexOptimizeStorageHistoryEntity(entity, next);
|
||||
|
@ -1729,7 +1729,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) {
|
||||
if (isBlank(historyEntity.getSourceUri()) && isBlank(historyEntity.getRequestId())) {
|
||||
IdAndPartitionId id = historyEntity.getId().asIdAndPartitionId();
|
||||
Optional<ResourceHistoryProvenanceEntity> provenanceEntityOpt = myResourceHistoryProvenanceDao.findById(id);
|
||||
Optional<ResourceHistoryProvenanceEntity> provenanceEntityOpt =
|
||||
myResourceHistoryProvenanceDao.findById(id);
|
||||
if (provenanceEntityOpt.isPresent()) {
|
||||
ResourceHistoryProvenanceEntity provenanceEntity = provenanceEntityOpt.get();
|
||||
historyEntity.setSourceUri(provenanceEntity.getSourceUri());
|
||||
|
|
|
@ -168,7 +168,8 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
|
|||
if (isBlank(provenanceSourceUri) && isBlank(provenanceRequestId)) {
|
||||
// FIXME: write test if none is found
|
||||
if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) {
|
||||
Optional<ResourceHistoryProvenanceEntity> provenanceOpt = myResourceHistoryProvenanceDao.findById(history.getId().asIdAndPartitionId());
|
||||
Optional<ResourceHistoryProvenanceEntity> provenanceOpt = myResourceHistoryProvenanceDao.findById(
|
||||
history.getId().asIdAndPartitionId());
|
||||
if (provenanceOpt.isPresent()) {
|
||||
ResourceHistoryProvenanceEntity provenance = provenanceOpt.get();
|
||||
provenanceRequestId = provenance.getRequestId();
|
||||
|
@ -183,15 +184,13 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
|
|||
history = resource.getCurrentVersionEntity();
|
||||
} else {
|
||||
version = theEntity.getVersion();
|
||||
history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(
|
||||
theEntity.getResourceId(), version);
|
||||
history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getResourceId(), version);
|
||||
((ResourceTable) theEntity).setCurrentVersionEntity(history);
|
||||
|
||||
while (history == null) {
|
||||
if (version > 1L) {
|
||||
version--;
|
||||
history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(
|
||||
theEntity.getResourceId(), version);
|
||||
history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getResourceId(), version);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
@ -218,7 +217,8 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
|
|||
if (isBlank(provenanceSourceUri) && isBlank(provenanceRequestId)) {
|
||||
// FIXME: write test for this if none is found
|
||||
if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) {
|
||||
Optional<ResourceHistoryProvenanceEntity> provenanceOpt = myResourceHistoryProvenanceDao.findById(history.getId().asIdAndPartitionId());
|
||||
Optional<ResourceHistoryProvenanceEntity> provenanceOpt = myResourceHistoryProvenanceDao.findById(
|
||||
history.getId().asIdAndPartitionId());
|
||||
if (provenanceOpt.isPresent()) {
|
||||
ResourceHistoryProvenanceEntity provenance = provenanceOpt.get();
|
||||
provenanceRequestId = provenance.getRequestId();
|
||||
|
|
|
@ -32,5 +32,4 @@ public interface IResourceHistoryProvenanceDao
|
|||
@Modifying
|
||||
@Query("DELETE FROM ResourceHistoryProvenanceEntity t WHERE t.myId = :pid")
|
||||
void deleteByPid(@Param("pid") Long theId);
|
||||
|
||||
}
|
||||
|
|
|
@ -43,30 +43,27 @@ public interface IResourceHistoryTableDao
|
|||
@Query("SELECT t FROM ResourceHistoryTable t WHERE t.myResourcePid = :resId ORDER BY t.myResourceVersion ASC")
|
||||
List<ResourceHistoryTable> findAllVersionsForResourceIdInOrder(@Param("resId") JpaPid theId);
|
||||
|
||||
// FIXME: this fetched provenance
|
||||
@Query("SELECT t FROM ResourceHistoryTable t WHERE t.myResourcePid = :id AND t.myResourceVersion = :version")
|
||||
ResourceHistoryTable findForIdAndVersionAndFetchProvenance(
|
||||
@Param("id") JpaPid theId, @Param("version") long theVersion);
|
||||
ResourceHistoryTable findForIdAndVersion(@Param("id") JpaPid theId, @Param("version") long theVersion);
|
||||
|
||||
@Query(
|
||||
"SELECT t.myId FROM ResourceHistoryTable t WHERE t.myResourcePid = :resId AND t.myResourceVersion <> :dontWantVersion")
|
||||
Slice<ResourceHistoryTablePk> findForResourceId(
|
||||
Pageable thePage, @Param("resId") JpaPid theId, @Param("dontWantVersion") Long theDontWantVersion);
|
||||
|
||||
// FIXME: this fetched provenance
|
||||
@Query(
|
||||
"SELECT t FROM ResourceHistoryTable t WHERE t.myResourcePid = :resId AND t.myResourceVersion <> :dontWantVersion")
|
||||
Slice<ResourceHistoryTable> findForResourceIdAndReturnEntitiesAndFetchProvenance(
|
||||
Slice<ResourceHistoryTable> findAllVersionsExceptSpecificForResourcePid(
|
||||
Pageable thePage, @Param("resId") JpaPid theId, @Param("dontWantVersion") Long theDontWantVersion);
|
||||
|
||||
@Query("" + "SELECT v.myId FROM ResourceHistoryTable v "
|
||||
@Query("SELECT v.myId FROM ResourceHistoryTable v "
|
||||
+ "LEFT OUTER JOIN ResourceTable t ON (v.myResourceTable = t) "
|
||||
+ "WHERE v.myResourceVersion <> t.myVersion AND "
|
||||
+ "t.myPid = :resId")
|
||||
Slice<ResourceHistoryTablePk> findIdsOfPreviousVersionsOfResourceId(
|
||||
Pageable thePage, @Param("resId") JpaPid theResourceId);
|
||||
|
||||
@Query("" + "SELECT v.myId FROM ResourceHistoryTable v "
|
||||
@Query("SELECT v.myId FROM ResourceHistoryTable v "
|
||||
+ "LEFT OUTER JOIN ResourceTable t ON (v.myResourceTable = t) "
|
||||
+ "WHERE v.myResourceVersion <> t.myVersion AND "
|
||||
+ "t.myResourceType = :restype")
|
||||
|
|
|
@ -173,13 +173,12 @@ public interface IResourceTableDao
|
|||
|
||||
/**
|
||||
* This query will return rows with the following values:
|
||||
* Id (resource pid - long), ResourceType (Patient, etc), version (long)
|
||||
* Id (JpaPid), FhirId, ResourceType (Patient, etc), version (long)
|
||||
* Order matters!
|
||||
* @param pid - list of pids to get versions for
|
||||
* @return
|
||||
*/
|
||||
@Query("SELECT t.myPid.myId, t.myResourceType, t.myVersion FROM ResourceTable t WHERE t.myPid.myId IN ( :pid )")
|
||||
Collection<Object[]> getResourceVersionsForPid(@Param("pid") List<Long> pid);
|
||||
@Query("SELECT t.myPid, t.myResourceType, t.myFhirId, t.myVersion FROM ResourceTable t WHERE t.myPid IN ( :pid )")
|
||||
Collection<Object[]> getResourceVersionsForPid(@Param("pid") Collection<JpaPid> pid);
|
||||
|
||||
@Query("SELECT t FROM ResourceTable t WHERE t.myPartitionIdValue IS NULL AND t.myPid.myId = :pid")
|
||||
Optional<ResourceTable> readByPartitionIdNull(@Param("pid") Long theResourceId);
|
||||
|
|
|
@ -154,8 +154,7 @@ public class JpaResourceExpungeService implements IResourceExpungeService<JpaPid
|
|||
Slice<ResourceHistoryTablePk> ids;
|
||||
if (theJpaPid != null && theJpaPid.getId() != null) {
|
||||
if (theJpaPid.getVersion() != null) {
|
||||
ids = toSlice(myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(
|
||||
theJpaPid, theJpaPid.getVersion()));
|
||||
ids = toSlice(myResourceHistoryTableDao.findForIdAndVersion(theJpaPid, theJpaPid.getVersion()));
|
||||
} else {
|
||||
ids = myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResourceId(page, theJpaPid);
|
||||
}
|
||||
|
@ -237,7 +236,8 @@ public class JpaResourceExpungeService implements IResourceExpungeService<JpaPid
|
|||
callHooks(theRequestDetails, theRemainingCount, version, id);
|
||||
|
||||
if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) {
|
||||
Optional<ResourceHistoryProvenanceEntity> provenanceOpt = myResourceHistoryProvenanceTableDao.findById(theNextVersionId.asIdAndPartitionId());
|
||||
Optional<ResourceHistoryProvenanceEntity> provenanceOpt =
|
||||
myResourceHistoryProvenanceTableDao.findById(theNextVersionId.asIdAndPartitionId());
|
||||
provenanceOpt.ifPresent(entity -> myResourceHistoryProvenanceTableDao.deleteByPid(entity.getId()));
|
||||
}
|
||||
|
||||
|
@ -303,8 +303,8 @@ public class JpaResourceExpungeService implements IResourceExpungeService<JpaPid
|
|||
|
||||
ResourceTable resource = myResourceTableDao.findById(id).orElseThrow(IllegalStateException::new);
|
||||
|
||||
ResourceHistoryTable currentVersion = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(
|
||||
resource.getId(), resource.getVersion());
|
||||
ResourceHistoryTable currentVersion =
|
||||
myResourceHistoryTableDao.findForIdAndVersion(resource.getId(), resource.getVersion());
|
||||
if (currentVersion != null) {
|
||||
expungeHistoricalVersion(theRequestDetails, currentVersion.getId(), theRemainingCount);
|
||||
}
|
||||
|
|
|
@ -847,14 +847,14 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
@Override
|
||||
@Nonnull
|
||||
public JpaPid getPidOrThrowException(@Nonnull IAnyResource theResource) {
|
||||
Long theResourcePID = (Long) theResource.getUserData(RESOURCE_PID);
|
||||
JpaPid theResourcePID = (JpaPid) theResource.getUserData(RESOURCE_PID);
|
||||
if (theResourcePID == null) {
|
||||
throw new IllegalStateException(Msg.code(2108)
|
||||
+ String.format(
|
||||
"Unable to find %s in the user data for %s with ID %s",
|
||||
RESOURCE_PID, theResource, theResource.getId()));
|
||||
}
|
||||
return JpaPid.fromId(theResourcePID);
|
||||
return theResourcePID;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -1186,8 +1186,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
Long version = resourcePidToVersion.get(next.getResourceId());
|
||||
resourceId.setVersion(version);
|
||||
if (version != null && !version.equals(next.getVersion())) {
|
||||
next = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(
|
||||
next.getResourceId(), version);
|
||||
next = myResourceHistoryTableDao.findForIdAndVersion(next.getResourceId(), version);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -31,7 +31,6 @@ import jakarta.persistence.JoinColumn;
|
|||
import jakarta.persistence.JoinColumns;
|
||||
import jakarta.persistence.ManyToOne;
|
||||
import jakarta.persistence.Table;
|
||||
import jakarta.persistence.Transient;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
|
||||
|
|
|
@ -687,6 +687,9 @@ public class FhirResourceDaoCreatePlaceholdersR4Test extends BaseJpaR4Test {
|
|||
patient.setActive(true);
|
||||
myPatientDao.update(patient);
|
||||
|
||||
logAllResources();
|
||||
logAllResourceVersions();
|
||||
|
||||
// observation (with version 2)
|
||||
Observation obs = new Observation();
|
||||
obs.setId("Observation/DEF");
|
||||
|
|
|
@ -425,7 +425,7 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
|
|||
assertTrue(outcome.getCreated());
|
||||
ResourceSearchUrlEntity searchUrlEntity = myResourceSearchUrlDao.findAll().get(0);
|
||||
assertNotNull(searchUrlEntity);
|
||||
assertEquals(expectedResId, searchUrlEntity.getResourcePid());
|
||||
assertEquals(expectedResId, searchUrlEntity.getResourcePid().getId());
|
||||
Instant now = Instant.now();
|
||||
assertThat(searchUrlEntity.getCreatedTime())
|
||||
.as("Check that the creation time of the URL is within the last second")
|
||||
|
@ -1400,7 +1400,7 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
|
|||
|
||||
assertEquals(theExpectedTasks.length, searchUrlsPreDelete.size());
|
||||
assertEquals(Arrays.stream(theExpectedTasks).map(Resource::getIdElement).map(IdType::getIdPartAsLong).toList(),
|
||||
searchUrlsPreDelete.stream().map(ResourceSearchUrlEntity::getResourcePid).toList());
|
||||
searchUrlsPreDelete.stream().map(t->t.getResourcePid().getId()).toList());
|
||||
}
|
||||
|
||||
private void deleteExpunge(Task theTask) {
|
||||
|
|
|
@ -54,12 +54,12 @@ public class FhirResourceDaoR4DeleteTest extends BaseJpaR4Test {
|
|||
|
||||
// Current version should be marked as deleted
|
||||
runInTransaction(() -> {
|
||||
ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(JpaPid.fromId(id.getIdPartAsLong()), 1);
|
||||
ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersion(JpaPid.fromId(id.getIdPartAsLong()), 1);
|
||||
assertNull(resourceTable.getDeleted());
|
||||
assertNotNull(resourceTable.getPersistentId());
|
||||
});
|
||||
runInTransaction(() -> {
|
||||
ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(JpaPid.fromId(id.getIdPartAsLong()), 2);
|
||||
ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersion(JpaPid.fromId(id.getIdPartAsLong()), 2);
|
||||
assertNotNull(resourceTable.getDeleted());
|
||||
});
|
||||
|
||||
|
@ -196,7 +196,7 @@ public class FhirResourceDaoR4DeleteTest extends BaseJpaR4Test {
|
|||
// Mark the current history version as not-deleted even though the actual resource
|
||||
// table entry is marked deleted
|
||||
runInTransaction(() -> {
|
||||
ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(JpaPid.fromId(id.getIdPartAsLong()), 2);
|
||||
ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersion(JpaPid.fromId(id.getIdPartAsLong()), 2);
|
||||
resourceTable.setDeleted(null);
|
||||
myResourceHistoryTableDao.save(resourceTable);
|
||||
});
|
||||
|
|
|
@ -29,7 +29,7 @@ public class FhirResourceDaoR4InlineResourceModeTest extends BaseJpaR4Test {
|
|||
relocateResourceTextToCompressedColumn(pid, 1L);
|
||||
|
||||
runInTransaction(()->{
|
||||
ResourceHistoryTable historyEntity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(pid, 1);
|
||||
ResourceHistoryTable historyEntity = myResourceHistoryTableDao.findForIdAndVersion(pid, 1);
|
||||
assertNotNull(historyEntity.getResource());
|
||||
assertNull(historyEntity.getResourceTextVc());
|
||||
assertEquals(ResourceEncodingEnum.JSONC, historyEntity.getEncoding());
|
||||
|
|
|
@ -295,7 +295,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
|
|||
table.setDeleted(new Date());
|
||||
table = myResourceTableDao.saveAndFlush(table);
|
||||
ResourceHistoryTable newHistory = table.toHistory(true);
|
||||
ResourceHistoryTable currentHistory = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(table.getId(), 1L);
|
||||
ResourceHistoryTable currentHistory = myResourceHistoryTableDao.findForIdAndVersion(table.getId(), 1L);
|
||||
newHistory.setEncoding(currentHistory.getEncoding());
|
||||
newHistory.setResourceTextVc(currentHistory.getResourceTextVc());
|
||||
myResourceHistoryTableDao.save(newHistory);
|
||||
|
@ -2948,7 +2948,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
|
|||
tx.execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
|
||||
ResourceHistoryTable table = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(JpaPid.fromId(id.getIdPartAsLong()), 1L);
|
||||
ResourceHistoryTable table = myResourceHistoryTableDao.findForIdAndVersion(JpaPid.fromId(id.getIdPartAsLong()), 1L);
|
||||
String newContent = myFhirContext.newJsonParser().encodeResourceToString(p);
|
||||
newContent = newContent.replace("male", "foo");
|
||||
table.setResourceTextVc(newContent);
|
||||
|
|
|
@ -604,7 +604,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
|
|||
TransactionTemplate template = new TransactionTemplate(myTxManager);
|
||||
template.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
|
||||
template.execute((TransactionCallback<ResourceTable>) t -> {
|
||||
ResourceHistoryTable resourceHistoryTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(JpaPid.fromId(id.getIdPartAsLong()), id.getVersionIdPartAsLong());
|
||||
ResourceHistoryTable resourceHistoryTable = myResourceHistoryTableDao.findForIdAndVersion(JpaPid.fromId(id.getIdPartAsLong()), id.getVersionIdPartAsLong());
|
||||
resourceHistoryTable.setEncoding(ResourceEncodingEnum.JSON);
|
||||
resourceHistoryTable.setResourceTextVc("{\"resourceType\":\"FOO\"}");
|
||||
myResourceHistoryTableDao.save(resourceHistoryTable);
|
||||
|
@ -649,7 +649,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
|
|||
assertEquals(1, myPatientDao.search(searchParamMap).size().intValue());
|
||||
|
||||
runInTransaction(() -> {
|
||||
ResourceHistoryTable historyEntry = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(JpaPid.fromId(id.getIdPartAsLong()), 3);
|
||||
ResourceHistoryTable historyEntry = myResourceHistoryTableDao.findForIdAndVersion(JpaPid.fromId(id.getIdPartAsLong()), 3);
|
||||
assertNotNull(historyEntry);
|
||||
myResourceHistoryTableDao.delete(historyEntry);
|
||||
});
|
||||
|
|
|
@ -17,9 +17,7 @@ import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
|||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.IdAndPartitionId;
|
||||
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTag;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboStringUnique;
|
||||
|
@ -81,7 +79,6 @@ import org.mockito.ArgumentCaptor;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.jdbc.core.JdbcTemplate;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.LocalDate;
|
||||
|
@ -89,7 +86,6 @@ import java.util.Arrays;
|
|||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Optional;
|
||||
import java.util.TimeZone;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.function.Supplier;
|
||||
|
@ -439,7 +435,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
assertLocalDateFromDbMatches(myPartitionDate, tags.get(0).getPartitionId().getPartitionDate());
|
||||
|
||||
// HFJ_RES_VER
|
||||
ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(patientId, 1L);
|
||||
ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersion(patientId, 1L);
|
||||
assertEquals(myPartitionId, version.getPartitionId().getPartitionId().intValue());
|
||||
assertLocalDateFromDbMatches(myPartitionDate, version.getPartitionId().getPartitionDate());
|
||||
|
||||
|
@ -524,7 +520,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
assertLocalDateFromDbMatches(myPartitionDate, tags.get(0).getPartitionId().getPartitionDate());
|
||||
|
||||
// HFJ_RES_VER
|
||||
ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(patientId, 1L);
|
||||
ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersion(patientId, 1L);
|
||||
assertNull(version.getPartitionId().getPartitionId());
|
||||
assertLocalDateFromDbMatches(myPartitionDate, version.getPartitionId().getPartitionDate());
|
||||
|
||||
|
@ -782,7 +778,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
|
||||
// HFJ_RES_VER
|
||||
int version = 2;
|
||||
ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(patientId, version);
|
||||
ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersion(patientId, version);
|
||||
assertEquals(myPartitionId, resVer.getPartitionId().getPartitionId().intValue());
|
||||
assertLocalDateFromDbMatches(myPartitionDate, resVer.getPartitionId().getPartitionDate());
|
||||
|
||||
|
@ -856,7 +852,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
|
||||
// HFJ_RES_VER
|
||||
int version = 2;
|
||||
ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(patientId, version);
|
||||
ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersion(patientId, version);
|
||||
assertEquals(myPartitionId, resVer.getPartitionId().getPartitionId().intValue());
|
||||
assertLocalDateFromDbMatches(myPartitionDate, resVer.getPartitionId().getPartitionDate());
|
||||
|
||||
|
|
|
@ -171,7 +171,7 @@ public class PartitioningInterceptorR4Test extends BaseJpaR4SystemTest {
|
|||
runInTransaction(() -> {
|
||||
List<ResourceTable> resources = myResourceTableDao.findAll();
|
||||
assertEquals(1, resources.size());
|
||||
assertEquals(null, resources.get(0).getPartitionId());
|
||||
assertEquals(null, resources.get(0).getPartitionId().getPartitionId());
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -223,7 +223,7 @@ public class PartitioningInterceptorR4Test extends BaseJpaR4SystemTest {
|
|||
|
||||
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
|
||||
ourLog.info("Search SQL:\n{}", searchSql);
|
||||
assertEquals(0, StringUtils.countMatches(searchSql, "PARTITION_ID"));
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID"));
|
||||
|
||||
} finally {
|
||||
myInterceptorRegistry.unregisterInterceptor(interceptor);
|
||||
|
@ -258,7 +258,7 @@ public class PartitioningInterceptorR4Test extends BaseJpaR4SystemTest {
|
|||
|
||||
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
|
||||
ourLog.info("Search SQL:\n{}", searchSql);
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID"));
|
||||
assertEquals(2, StringUtils.countMatches(searchSql, "PARTITION_ID"));
|
||||
|
||||
} finally {
|
||||
myInterceptorRegistry.unregisterInterceptor(interceptor);
|
||||
|
|
|
@ -223,7 +223,7 @@ public class PatientIdPartitionInterceptorTest extends BaseResourceProviderR4Tes
|
|||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
|
||||
List<SqlQuery> selectQueriesForCurrentThread = myCaptureQueriesListener.getSelectQueriesForCurrentThread();
|
||||
assertEquals(4, selectQueriesForCurrentThread.size());
|
||||
assertEquals(3, selectQueriesForCurrentThread.size());
|
||||
assertThat(selectQueriesForCurrentThread.get(0).getSql(false, false)).contains("PARTITION_ID in (?)");
|
||||
assertThat(selectQueriesForCurrentThread.get(1).getSql(false, false)).contains("PARTITION_ID=");
|
||||
}
|
||||
|
|
|
@ -105,7 +105,7 @@ public class DiffProviderR4Test extends BaseResourceProviderR4Test {
|
|||
createPatient(withId(id), withActiveTrue(), withFamily("SMITH"));
|
||||
|
||||
runInTransaction(() -> {
|
||||
ResourceHistoryTable version2 = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(JpaPid.fromId(id.getIdPartAsLong()), 2);
|
||||
ResourceHistoryTable version2 = myResourceHistoryTableDao.findForIdAndVersion(JpaPid.fromId(id.getIdPartAsLong()), 2);
|
||||
myResourceHistoryTableDao.deleteByPid(version2.getId());
|
||||
});
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ public class ResourceProviderInvalidDataR4Test extends BaseResourceProviderR4Tes
|
|||
|
||||
// Manually set the value to be an invalid decimal number
|
||||
runInTransaction(() -> {
|
||||
ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(JpaPid.fromId(id), 1);
|
||||
ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersion(JpaPid.fromId(id), 1);
|
||||
String resourceText = resVer.getResourceTextVc();
|
||||
resourceText = resourceText.replace("100", "-.100");
|
||||
resVer.setResourceTextVc(resourceText);
|
||||
|
|
|
@ -3342,7 +3342,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
|||
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus status) {
|
||||
ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(JpaPid.fromId(id1.getIdPartAsLong()), 1);
|
||||
ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersion(JpaPid.fromId(id1.getIdPartAsLong()), 1);
|
||||
myResourceHistoryTableDao.delete(version);
|
||||
}
|
||||
});
|
||||
|
@ -3367,7 +3367,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
|||
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus status) {
|
||||
ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(JpaPid.fromId(id1.getIdPartAsLong()), 1);
|
||||
ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersion(JpaPid.fromId(id1.getIdPartAsLong()), 1);
|
||||
myResourceHistoryTableDao.delete(version);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -362,7 +362,7 @@ public class GiantTransactionPerfTest {
|
|||
}
|
||||
|
||||
@Override
|
||||
public ResourceHistoryTable findForIdAndVersionAndFetchProvenance(JpaPid theId, long theVersion) {
|
||||
public ResourceHistoryTable findForIdAndVersion(JpaPid theId, long theVersion) {
|
||||
throw new UnsupportedOperationException();
|
||||
|
||||
}
|
||||
|
@ -373,7 +373,7 @@ public class GiantTransactionPerfTest {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Slice<ResourceHistoryTable> findForResourceIdAndReturnEntitiesAndFetchProvenance(Pageable thePage, JpaPid theId, Long theDontWantVersion) {
|
||||
public Slice<ResourceHistoryTable> findAllVersionsExceptSpecificForResourcePid(Pageable thePage, JpaPid theId, Long theDontWantVersion) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,6 @@ import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceAddressMetadataKey;
|
|||
import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceServiceRegistry;
|
||||
import ca.uhn.fhir.jpa.esr.IExternallyStoredResourceService;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.IdAndPartitionId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
|
@ -54,7 +53,7 @@ public class ExternallyStoredResourceR5Test extends BaseJpaR5Test {
|
|||
runInTransaction(()->{
|
||||
ResourceTable resource = myResourceTableDao.getReferenceById(JpaPid.fromId(id.getIdPartAsLong()));
|
||||
assertNotNull(resource);
|
||||
ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(JpaPid.fromId(id.getIdPartAsLong()), 1L);
|
||||
ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersion(JpaPid.fromId(id.getIdPartAsLong()), 1L);
|
||||
assertNotNull(history);
|
||||
assertEquals(ResourceEncodingEnum.ESR, history.getEncoding());
|
||||
assertEquals(MY_PROVIDER_ID + ":" + ADDRESS_123, history.getResourceTextVc());
|
||||
|
|
|
@ -84,7 +84,6 @@ import ca.uhn.fhir.jpa.interceptor.PerformanceTracingLoggingInterceptor;
|
|||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTablePk;
|
||||
import ca.uhn.fhir.jpa.packages.IPackageInstallerSvc;
|
||||
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
|
||||
import ca.uhn.fhir.jpa.provider.JpaSystemProvider;
|
||||
|
@ -711,7 +710,7 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
|
|||
|
||||
protected void relocateResourceTextToCompressedColumn(JpaPid theResourcePid, Long theVersion) {
|
||||
runInTransaction(()->{
|
||||
ResourceHistoryTable historyEntity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theResourcePid, theVersion);
|
||||
ResourceHistoryTable historyEntity = myResourceHistoryTableDao.findForIdAndVersion(theResourcePid, theVersion);
|
||||
byte[] contents = GZipUtil.compress(historyEntity.getResourceTextVc());
|
||||
myResourceHistoryTableDao.updateNonInlinedContents(contents, historyEntity.getId());
|
||||
});
|
||||
|
|
|
@ -1741,7 +1741,8 @@ public class JpaStorageSettings extends StorageSettings {
|
|||
}
|
||||
|
||||
// FIXME: document
|
||||
public void setAccessMetaSourceInformationFromProvenanceTable(boolean theAccessMetaSourceInformationFromProvenanceTable) {
|
||||
public void setAccessMetaSourceInformationFromProvenanceTable(
|
||||
boolean theAccessMetaSourceInformationFromProvenanceTable) {
|
||||
myAccessMetaSourceInformationFromProvenanceTable = theAccessMetaSourceInformationFromProvenanceTable;
|
||||
}
|
||||
|
||||
|
|
|
@ -57,7 +57,8 @@ public class DdlGeneratorHibernate61 {
|
|||
private final List<GenerateDdlMojo.Dialect> myDialects = new ArrayList<>();
|
||||
private File myOutputDirectory;
|
||||
private MavenProject myProject;
|
||||
private HapiHibernateDialectSettingsService myHapiHibernateDialectSettingsService = new HapiHibernateDialectSettingsService();
|
||||
private HapiHibernateDialectSettingsService myHapiHibernateDialectSettingsService =
|
||||
new HapiHibernateDialectSettingsService();
|
||||
|
||||
public void addPackage(String thePackage) {
|
||||
Validate.notNull(thePackage);
|
||||
|
@ -94,14 +95,14 @@ public class DdlGeneratorHibernate61 {
|
|||
String dialectClassName = nextDialect.getClassName();
|
||||
|
||||
StandardServiceRegistryBuilder registryBuilder =
|
||||
new StandardServiceRegistryBuilder(bootstrapServiceRegistry);
|
||||
new StandardServiceRegistryBuilder(bootstrapServiceRegistry);
|
||||
registryBuilder.applySetting(SchemaToolingSettings.HBM2DDL_AUTO, "create");
|
||||
registryBuilder.applySetting(JdbcSettings.DIALECT, dialectClassName);
|
||||
registryBuilder.addService(ConnectionProvider.class, connectionProvider);
|
||||
registryBuilder.addService(
|
||||
ISequenceValueMassager.class, new ISequenceValueMassager.NoopSequenceValueMassager());
|
||||
ISequenceValueMassager.class, new ISequenceValueMassager.NoopSequenceValueMassager());
|
||||
registryBuilder.addService(
|
||||
HapiHibernateDialectSettingsService.class, myHapiHibernateDialectSettingsService);
|
||||
HapiHibernateDialectSettingsService.class, myHapiHibernateDialectSettingsService);
|
||||
StandardServiceRegistry standardRegistry = registryBuilder.build();
|
||||
MetadataSources metadataSources = new MetadataSources(standardRegistry);
|
||||
|
||||
|
@ -166,7 +167,7 @@ public class DdlGeneratorHibernate61 {
|
|||
|
||||
@Nonnull
|
||||
private Set<Class<?>> scanClasspathForEntityClasses(Set<String> thePackages, ClassLoader theClassLoader)
|
||||
throws MojoFailureException {
|
||||
throws MojoFailureException {
|
||||
|
||||
ClassPathScanningCandidateComponentProvider provider = new ClassPathScanningCandidateComponentProvider(false);
|
||||
provider.setResourceLoader(new PathMatchingResourcePatternResolver(theClassLoader));
|
||||
|
@ -199,7 +200,7 @@ public class DdlGeneratorHibernate61 {
|
|||
}
|
||||
|
||||
private static void writeContentsToFile(String prependFile, ClassLoader classLoader, File outputFile)
|
||||
throws MojoFailureException {
|
||||
throws MojoFailureException {
|
||||
if (isNotBlank(prependFile)) {
|
||||
ResourceLoader loader = new DefaultResourceLoader(classLoader);
|
||||
Resource resource = loader.getResource(prependFile);
|
||||
|
@ -218,7 +219,7 @@ public class DdlGeneratorHibernate61 {
|
|||
* matter that it doesn't correlate to the specified dialect.
|
||||
*/
|
||||
private static class FakeConnectionConnectionProvider extends UserSuppliedConnectionProviderImpl
|
||||
implements Closeable {
|
||||
implements Closeable {
|
||||
private static final long serialVersionUID = 4147495169899817244L;
|
||||
private Connection connection;
|
||||
|
||||
|
@ -237,8 +238,8 @@ public class DdlGeneratorHibernate61 {
|
|||
try {
|
||||
connection.setAutoCommit(true);
|
||||
connection
|
||||
.prepareStatement("create table all_sequences (PID bigint not null, primary key (PID))")
|
||||
.execute();
|
||||
.prepareStatement("create table all_sequences (PID bigint not null, primary key (PID))")
|
||||
.execute();
|
||||
} catch (SQLException e) {
|
||||
ourLog.error("Failed to create sequences table", e);
|
||||
}
|
||||
|
|
|
@ -52,7 +52,9 @@ public class GenerateDdlMojo extends AbstractMojo {
|
|||
}
|
||||
|
||||
DdlGeneratorHibernate61 generator = new DdlGeneratorHibernate61();
|
||||
generator.getHapiHibernateDialectSettingsService().setTrimConditionalIdsFromPrimaryKeys(trimConditionalIdsFromPrimaryKeys);
|
||||
generator
|
||||
.getHapiHibernateDialectSettingsService()
|
||||
.setTrimConditionalIdsFromPrimaryKeys(trimConditionalIdsFromPrimaryKeys);
|
||||
|
||||
for (String packageName : packageNames) {
|
||||
String t = trim(packageName);
|
||||
|
|
Loading…
Reference in New Issue