Postgres migrate from lob datatypes (#5837)
* wip - before moving JpaEmbeddedDatabase to test-utilities * failing test * BinaryStorageEntity migration with associated test. * BinaryStorageEntity migration with associated test - fix finger fumble. * TermConceptProperty Entity migration with associated test. * TermValueSetConcept Entity migration with associated test. * TermConcept Entity migration * fixing merge issue * spotless * adding error code 2513 for exception * adding tests. * spotless * adding unique message code for exceptions. * fixing test failure - 1 * fixing test failure - 2 * fixing test failure - 3 * fixing test failure - 4 * fixing test failure - 5 * troubleshooting test failure * migration for audit/transaction logs. * spotless * preparation for initial code review. * adding changelog. * files headers * addressing comments from first code review. * modifying RenameTableTask to drop table with newTableName if configured to do so. * modifications to pass tests. * passing all tests --------- Co-authored-by: peartree <etienne.poirier@smilecdr.com>
This commit is contained in:
parent
29cddaecc7
commit
aeb4299864
|
@ -2951,7 +2951,9 @@ public enum Pointcut implements IPointcut {
|
||||||
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails",
|
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails",
|
||||||
"ca.uhn.fhir.jpa.util.SqlQueryList"),
|
"ca.uhn.fhir.jpa.util.SqlQueryList"),
|
||||||
|
|
||||||
|
@Deprecated(since = "7.2.0 - Use STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX instead.")
|
||||||
/**
|
/**
|
||||||
|
* <b> Deprecated but still supported. Will eventually be removed. <code>Please use Pointcut.STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX</code> </b>
|
||||||
* <b> Binary Blob Prefix Assigning Hook:</b>
|
* <b> Binary Blob Prefix Assigning Hook:</b>
|
||||||
* <p>
|
* <p>
|
||||||
* Immediately before a binary blob is stored to its eventual data sink, this hook is called.
|
* Immediately before a binary blob is stored to its eventual data sink, this hook is called.
|
||||||
|
@ -2977,6 +2979,32 @@ public enum Pointcut implements IPointcut {
|
||||||
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
||||||
"org.hl7.fhir.instance.model.api.IBaseResource"),
|
"org.hl7.fhir.instance.model.api.IBaseResource"),
|
||||||
|
|
||||||
|
/**
|
||||||
|
* <b> Binary Content Prefix Assigning Hook:</b>
|
||||||
|
* <p>
|
||||||
|
* Immediately before binary content is stored to its eventual data sink, this hook is called.
|
||||||
|
* This hook allows implementers to provide a prefix to the binary content's ID.
|
||||||
|
* This is helpful in cases where you want to identify this blob for later retrieval outside of HAPI-FHIR. Note that allowable characters will depend on the specific storage sink being used.
|
||||||
|
* <ul>
|
||||||
|
* <li>
|
||||||
|
* ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the
|
||||||
|
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
|
||||||
|
* pulled out of the servlet request. Note that the bean
|
||||||
|
* properties are not all guaranteed to be populated.
|
||||||
|
* </li>
|
||||||
|
* <li>
|
||||||
|
* org.hl7.fhir.instance.model.api.IBaseBinary - The binary resource that is about to be stored.
|
||||||
|
* </li>
|
||||||
|
* </ul>
|
||||||
|
* <p>
|
||||||
|
* Hooks should return <code>String</code>, which represents the full prefix to be applied to the blob.
|
||||||
|
* </p>
|
||||||
|
*/
|
||||||
|
STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX(
|
||||||
|
String.class,
|
||||||
|
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
||||||
|
"org.hl7.fhir.instance.model.api.IBaseResource"),
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* <b>Storage Hook:</b>
|
* <b>Storage Hook:</b>
|
||||||
* Invoked before a batch job is persisted to the database.
|
* Invoked before a batch job is persisted to the database.
|
||||||
|
|
|
@ -159,4 +159,8 @@ public enum VersionEnum {
|
||||||
VersionEnum[] values = VersionEnum.values();
|
VersionEnum[] values = VersionEnum.values();
|
||||||
return values[values.length - 1];
|
return values[values.length - 1];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean isNewerThan(VersionEnum theVersionEnum) {
|
||||||
|
return ordinal() > theVersionEnum.ordinal();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -218,6 +218,8 @@ public class HapiFlywayMigrateDatabaseCommandTest {
|
||||||
"-p", "SA"
|
"-p", "SA"
|
||||||
};
|
};
|
||||||
|
|
||||||
|
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_BINARY_STORAGE_BLOB"));
|
||||||
|
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_BINARY_STORAGE"));
|
||||||
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_RESOURCE"));
|
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_RESOURCE"));
|
||||||
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_BLK_EXPORT_JOB"));
|
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_BLK_EXPORT_JOB"));
|
||||||
App.main(args);
|
App.main(args);
|
||||||
|
|
|
@ -60,6 +60,7 @@ create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varch
|
||||||
create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(50) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID));
|
create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(50) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID));
|
||||||
create table TRM_CONCEPT_PC_LINK (PID bigint not null, CHILD_PID bigint, PARENT_PID bigint, REL_TYPE integer, CODESYSTEM_PID bigint not null, primary key (PID));
|
create table TRM_CONCEPT_PC_LINK (PID bigint not null, CHILD_PID bigint, PARENT_PID bigint, REL_TYPE integer, CODESYSTEM_PID bigint not null, primary key (PID));
|
||||||
create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE integer not null, PROP_VAL varchar(500), CONCEPT_PID bigint, primary key (PID));
|
create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE integer not null, PROP_VAL varchar(500), CONCEPT_PID bigint, primary key (PID));
|
||||||
|
create table HFJ_BINARY_STORAGE_BLOB ( BLOB_ID varchar(200) not null, BLOB_DATA blob not null, CONTENT_TYPE varchar(100) not null, BLOB_HASH varchar(128), PUBLISHED_DATE timestamp(6) not null, RESOURCE_ID varchar(100) not null, BLOB_SIZE bigint, primary key (BLOB_ID) );
|
||||||
create index IDX_FORCEDID_TYPE_FORCEDID on HFJ_FORCED_ID (RESOURCE_TYPE, FORCED_ID);
|
create index IDX_FORCEDID_TYPE_FORCEDID on HFJ_FORCED_ID (RESOURCE_TYPE, FORCED_ID);
|
||||||
create unique index IDX_FORCEDID_RESID on HFJ_FORCED_ID (RESOURCE_PID);
|
create unique index IDX_FORCEDID_RESID on HFJ_FORCED_ID (RESOURCE_PID);
|
||||||
create unique index IDX_FORCEDID_TYPE_RESID on HFJ_FORCED_ID (RESOURCE_TYPE, RESOURCE_PID);
|
create unique index IDX_FORCEDID_TYPE_RESID on HFJ_FORCED_ID (RESOURCE_TYPE, RESOURCE_PID);
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: perf
|
||||||
|
issue: 5838
|
||||||
|
title: "Migration of remaining database columns still using the LOB datatypes. This change effectively cuts all ties
|
||||||
|
with the inefficient `pg_largeobject` table."
|
|
@ -41,6 +41,7 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.transaction.annotation.Propagation;
|
import org.springframework.transaction.annotation.Propagation;
|
||||||
import org.springframework.transaction.annotation.Transactional;
|
import org.springframework.transaction.annotation.Transactional;
|
||||||
|
|
||||||
|
import java.io.ByteArrayInputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
|
@ -50,7 +51,7 @@ import java.util.Date;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
|
public class DatabaseBinaryContentStorageSvcImpl extends BaseBinaryStorageSvcImpl {
|
||||||
|
|
||||||
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||||
private EntityManager myEntityManager;
|
private EntityManager myEntityManager;
|
||||||
|
@ -61,9 +62,9 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
|
||||||
@Nonnull
|
@Nonnull
|
||||||
@Override
|
@Override
|
||||||
@Transactional(propagation = Propagation.REQUIRED)
|
@Transactional(propagation = Propagation.REQUIRED)
|
||||||
public StoredDetails storeBlob(
|
public StoredDetails storeBinaryContent(
|
||||||
IIdType theResourceId,
|
IIdType theResourceId,
|
||||||
String theBlobIdOrNull,
|
String theBinaryContentIdOrNull,
|
||||||
String theContentType,
|
String theContentType,
|
||||||
InputStream theInputStream,
|
InputStream theInputStream,
|
||||||
RequestDetails theRequestDetails)
|
RequestDetails theRequestDetails)
|
||||||
|
@ -82,14 +83,20 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
|
||||||
|
|
||||||
BinaryStorageEntity entity = new BinaryStorageEntity();
|
BinaryStorageEntity entity = new BinaryStorageEntity();
|
||||||
entity.setResourceId(theResourceId.toUnqualifiedVersionless().getValue());
|
entity.setResourceId(theResourceId.toUnqualifiedVersionless().getValue());
|
||||||
entity.setBlobContentType(theContentType);
|
entity.setContentType(theContentType);
|
||||||
entity.setPublished(publishedDate);
|
entity.setPublished(publishedDate);
|
||||||
|
|
||||||
Session session = (Session) myEntityManager.getDelegate();
|
Session session = (Session) myEntityManager.getDelegate();
|
||||||
LobHelper lobHelper = session.getLobHelper();
|
LobHelper lobHelper = session.getLobHelper();
|
||||||
|
|
||||||
byte[] loadedStream = IOUtils.toByteArray(countingInputStream);
|
byte[] loadedStream = IOUtils.toByteArray(countingInputStream);
|
||||||
String id = super.provideIdForNewBlob(theBlobIdOrNull, loadedStream, theRequestDetails, theContentType);
|
String id = super.provideIdForNewBinaryContent(
|
||||||
entity.setBlobId(id);
|
theBinaryContentIdOrNull, loadedStream, theRequestDetails, theContentType);
|
||||||
|
|
||||||
|
entity.setContentId(id);
|
||||||
|
entity.setStorageContentBin(loadedStream);
|
||||||
|
|
||||||
|
// TODO: remove writing Blob in a future release
|
||||||
Blob dataBlob = lobHelper.createBlob(loadedStream);
|
Blob dataBlob = lobHelper.createBlob(loadedStream);
|
||||||
entity.setBlob(dataBlob);
|
entity.setBlob(dataBlob);
|
||||||
|
|
||||||
|
@ -103,7 +110,7 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
|
||||||
myEntityManager.persist(entity);
|
myEntityManager.persist(entity);
|
||||||
|
|
||||||
return new StoredDetails()
|
return new StoredDetails()
|
||||||
.setBlobId(id)
|
.setBinaryContentId(id)
|
||||||
.setBytes(bytes)
|
.setBytes(bytes)
|
||||||
.setPublished(publishedDate)
|
.setPublished(publishedDate)
|
||||||
.setHash(hash)
|
.setHash(hash)
|
||||||
|
@ -111,68 +118,98 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public StoredDetails fetchBlobDetails(IIdType theResourceId, String theBlobId) {
|
public StoredDetails fetchBinaryContentDetails(IIdType theResourceId, String theBinaryContentId) {
|
||||||
|
|
||||||
Optional<BinaryStorageEntity> entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId(
|
Optional<BinaryStorageEntity> entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId(
|
||||||
theBlobId, theResourceId.toUnqualifiedVersionless().getValue());
|
theBinaryContentId, theResourceId.toUnqualifiedVersionless().getValue());
|
||||||
if (entityOpt.isEmpty()) {
|
if (entityOpt.isEmpty()) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
BinaryStorageEntity entity = entityOpt.get();
|
BinaryStorageEntity entity = entityOpt.get();
|
||||||
return new StoredDetails()
|
return new StoredDetails()
|
||||||
.setBlobId(theBlobId)
|
.setBinaryContentId(theBinaryContentId)
|
||||||
.setContentType(entity.getBlobContentType())
|
.setContentType(entity.getContentType())
|
||||||
.setHash(entity.getHash())
|
.setHash(entity.getHash())
|
||||||
.setPublished(entity.getPublished())
|
.setPublished(entity.getPublished())
|
||||||
.setBytes(entity.getSize());
|
.setBytes(entity.getSize());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean writeBlob(IIdType theResourceId, String theBlobId, OutputStream theOutputStream) throws IOException {
|
public boolean writeBinaryContent(IIdType theResourceId, String theBinaryContentId, OutputStream theOutputStream)
|
||||||
|
throws IOException {
|
||||||
Optional<BinaryStorageEntity> entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId(
|
Optional<BinaryStorageEntity> entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId(
|
||||||
theBlobId, theResourceId.toUnqualifiedVersionless().getValue());
|
theBinaryContentId, theResourceId.toUnqualifiedVersionless().getValue());
|
||||||
if (entityOpt.isEmpty()) {
|
if (entityOpt.isEmpty()) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
copyBlobToOutputStream(theOutputStream, entityOpt.get());
|
copyBinaryContentToOutputStream(theOutputStream, entityOpt.get());
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void expungeBlob(IIdType theResourceId, String theBlobId) {
|
public void expungeBinaryContent(IIdType theResourceId, String theBinaryContentId) {
|
||||||
Optional<BinaryStorageEntity> entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId(
|
Optional<BinaryStorageEntity> entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId(
|
||||||
theBlobId, theResourceId.toUnqualifiedVersionless().getValue());
|
theBinaryContentId, theResourceId.toUnqualifiedVersionless().getValue());
|
||||||
entityOpt.ifPresent(
|
entityOpt.ifPresent(
|
||||||
theBinaryStorageEntity -> myBinaryStorageEntityDao.deleteByPid(theBinaryStorageEntity.getBlobId()));
|
theBinaryStorageEntity -> myBinaryStorageEntityDao.deleteByPid(theBinaryStorageEntity.getContentId()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte[] fetchBlob(IIdType theResourceId, String theBlobId) throws IOException {
|
public byte[] fetchBinaryContent(IIdType theResourceId, String theBinaryContentId) throws IOException {
|
||||||
BinaryStorageEntity entityOpt = myBinaryStorageEntityDao
|
BinaryStorageEntity entityOpt = myBinaryStorageEntityDao
|
||||||
.findByIdAndResourceId(
|
.findByIdAndResourceId(
|
||||||
theBlobId, theResourceId.toUnqualifiedVersionless().getValue())
|
theBinaryContentId,
|
||||||
|
theResourceId.toUnqualifiedVersionless().getValue())
|
||||||
.orElseThrow(() -> new ResourceNotFoundException(
|
.orElseThrow(() -> new ResourceNotFoundException(
|
||||||
"Unknown blob ID: " + theBlobId + " for resource ID " + theResourceId));
|
"Unknown BinaryContent ID: " + theBinaryContentId + " for resource ID " + theResourceId));
|
||||||
|
|
||||||
return copyBlobToByteArray(entityOpt);
|
return copyBinaryContentToByteArray(entityOpt);
|
||||||
}
|
}
|
||||||
|
|
||||||
void copyBlobToOutputStream(OutputStream theOutputStream, BinaryStorageEntity theEntity) throws IOException {
|
void copyBinaryContentToOutputStream(OutputStream theOutputStream, BinaryStorageEntity theEntity)
|
||||||
try (InputStream inputStream = theEntity.getBlob().getBinaryStream()) {
|
throws IOException {
|
||||||
|
|
||||||
|
try (InputStream inputStream = getBinaryContent(theEntity)) {
|
||||||
IOUtils.copy(inputStream, theOutputStream);
|
IOUtils.copy(inputStream, theOutputStream);
|
||||||
} catch (SQLException e) {
|
} catch (SQLException e) {
|
||||||
throw new IOException(Msg.code(1341) + e);
|
throw new IOException(Msg.code(1341) + e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
byte[] copyBlobToByteArray(BinaryStorageEntity theEntity) throws IOException {
|
byte[] copyBinaryContentToByteArray(BinaryStorageEntity theEntity) throws IOException {
|
||||||
try {
|
byte[] retVal;
|
||||||
return ByteStreams.toByteArray(theEntity.getBlob().getBinaryStream());
|
|
||||||
|
try (InputStream inputStream = getBinaryContent(theEntity)) {
|
||||||
|
retVal = ByteStreams.toByteArray(inputStream);
|
||||||
} catch (SQLException e) {
|
} catch (SQLException e) {
|
||||||
throw new IOException(Msg.code(1342) + e);
|
throw new IOException(Msg.code(1342) + e);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* The caller is responsible for closing the returned stream.
|
||||||
|
*
|
||||||
|
* @param theEntity
|
||||||
|
* @return
|
||||||
|
* @throws SQLException
|
||||||
|
*/
|
||||||
|
private InputStream getBinaryContent(BinaryStorageEntity theEntity) throws SQLException {
|
||||||
|
InputStream retVal;
|
||||||
|
|
||||||
|
if (theEntity.hasStorageContent()) {
|
||||||
|
retVal = new ByteArrayInputStream(theEntity.getStorageContentBin());
|
||||||
|
} else if (theEntity.hasBlob()) {
|
||||||
|
retVal = theEntity.getBlob().getBinaryStream();
|
||||||
|
} else {
|
||||||
|
retVal = new ByteArrayInputStream(new byte[0]);
|
||||||
|
}
|
||||||
|
|
||||||
|
return retVal;
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -29,11 +29,11 @@ import java.util.Optional;
|
||||||
|
|
||||||
public interface IBinaryStorageEntityDao extends JpaRepository<BinaryStorageEntity, String>, IHapiFhirJpaRepository {
|
public interface IBinaryStorageEntityDao extends JpaRepository<BinaryStorageEntity, String>, IHapiFhirJpaRepository {
|
||||||
|
|
||||||
@Query("SELECT e FROM BinaryStorageEntity e WHERE e.myBlobId = :blob_id AND e.myResourceId = :resource_id")
|
@Query("SELECT e FROM BinaryStorageEntity e WHERE e.myContentId = :content_id AND e.myResourceId = :resource_id")
|
||||||
Optional<BinaryStorageEntity> findByIdAndResourceId(
|
Optional<BinaryStorageEntity> findByIdAndResourceId(
|
||||||
@Param("blob_id") String theBlobId, @Param("resource_id") String theResourceId);
|
@Param("content_id") String theContentId, @Param("resource_id") String theResourceId);
|
||||||
|
|
||||||
@Modifying
|
@Modifying
|
||||||
@Query("DELETE FROM BinaryStorageEntity t WHERE t.myBlobId = :pid")
|
@Query("DELETE FROM BinaryStorageEntity t WHERE t.myContentId = :pid")
|
||||||
void deleteByPid(@Param("pid") String theId);
|
void deleteByPid(@Param("pid") String theId);
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,12 +43,14 @@ import jakarta.persistence.SequenceGenerator;
|
||||||
import jakarta.persistence.Table;
|
import jakarta.persistence.Table;
|
||||||
import jakarta.persistence.Temporal;
|
import jakarta.persistence.Temporal;
|
||||||
import jakarta.persistence.TemporalType;
|
import jakarta.persistence.TemporalType;
|
||||||
|
import jakarta.persistence.Transient;
|
||||||
import jakarta.persistence.UniqueConstraint;
|
import jakarta.persistence.UniqueConstraint;
|
||||||
import org.apache.commons.lang3.Validate;
|
import org.apache.commons.lang3.Validate;
|
||||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||||
|
import org.hibernate.Length;
|
||||||
import org.hibernate.search.engine.backend.types.Projectable;
|
import org.hibernate.search.engine.backend.types.Projectable;
|
||||||
import org.hibernate.search.engine.backend.types.Searchable;
|
import org.hibernate.search.engine.backend.types.Searchable;
|
||||||
import org.hibernate.search.mapper.pojo.bridge.mapping.annotation.PropertyBinderRef;
|
import org.hibernate.search.mapper.pojo.bridge.mapping.annotation.PropertyBinderRef;
|
||||||
|
@ -56,7 +58,10 @@ import org.hibernate.search.mapper.pojo.bridge.mapping.annotation.RoutingBinderR
|
||||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField;
|
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField;
|
||||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField;
|
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField;
|
||||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed;
|
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed;
|
||||||
|
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.IndexingDependency;
|
||||||
|
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.ObjectPath;
|
||||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyBinding;
|
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyBinding;
|
||||||
|
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyValue;
|
||||||
import org.hl7.fhir.r4.model.Coding;
|
import org.hl7.fhir.r4.model.Coding;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
@ -68,6 +73,8 @@ import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import static java.util.Objects.isNull;
|
||||||
|
import static java.util.Objects.nonNull;
|
||||||
import static org.apache.commons.lang3.StringUtils.left;
|
import static org.apache.commons.lang3.StringUtils.left;
|
||||||
import static org.apache.commons.lang3.StringUtils.length;
|
import static org.apache.commons.lang3.StringUtils.length;
|
||||||
|
|
||||||
|
@ -165,15 +172,14 @@ public class TermConcept implements Serializable {
|
||||||
@Column(name = "INDEX_STATUS", nullable = true)
|
@Column(name = "INDEX_STATUS", nullable = true)
|
||||||
private Long myIndexStatus;
|
private Long myIndexStatus;
|
||||||
|
|
||||||
|
@Deprecated(since = "7.2.0")
|
||||||
@Lob
|
@Lob
|
||||||
@Column(name = "PARENT_PIDS", nullable = true)
|
@Column(name = "PARENT_PIDS", nullable = true)
|
||||||
@FullTextField(
|
|
||||||
name = "myParentPids",
|
|
||||||
searchable = Searchable.YES,
|
|
||||||
projectable = Projectable.YES,
|
|
||||||
analyzer = "conceptParentPidsAnalyzer")
|
|
||||||
private String myParentPids;
|
private String myParentPids;
|
||||||
|
|
||||||
|
@Column(name = "PARENT_PIDS_VC", nullable = true, length = Length.LONG32)
|
||||||
|
private String myParentPidsVc;
|
||||||
|
|
||||||
@OneToMany(
|
@OneToMany(
|
||||||
cascade = {},
|
cascade = {},
|
||||||
fetch = FetchType.LAZY,
|
fetch = FetchType.LAZY,
|
||||||
|
@ -356,8 +362,15 @@ public class TermConcept implements Serializable {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Transient
|
||||||
|
@FullTextField(
|
||||||
|
name = "myParentPids",
|
||||||
|
searchable = Searchable.YES,
|
||||||
|
projectable = Projectable.YES,
|
||||||
|
analyzer = "conceptParentPidsAnalyzer")
|
||||||
|
@IndexingDependency(derivedFrom = @ObjectPath({@PropertyValue(propertyName = "myParentPidsVc")}))
|
||||||
public String getParentPidsAsString() {
|
public String getParentPidsAsString() {
|
||||||
return myParentPids;
|
return nonNull(myParentPidsVc) ? myParentPidsVc : myParentPids;
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<TermConceptParentChildLink> getParents() {
|
public List<TermConceptParentChildLink> getParents() {
|
||||||
|
@ -437,7 +450,7 @@ public class TermConcept implements Serializable {
|
||||||
@PreUpdate
|
@PreUpdate
|
||||||
@PrePersist
|
@PrePersist
|
||||||
public void prePersist() {
|
public void prePersist() {
|
||||||
if (myParentPids == null) {
|
if (isNull(myParentPids) && isNull(myParentPidsVc)) {
|
||||||
Set<Long> parentPids = new HashSet<>();
|
Set<Long> parentPids = new HashSet<>();
|
||||||
TermConcept entity = this;
|
TermConcept entity = this;
|
||||||
parentPids(entity, parentPids);
|
parentPids(entity, parentPids);
|
||||||
|
@ -464,6 +477,7 @@ public class TermConcept implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
public TermConcept setParentPids(String theParentPids) {
|
public TermConcept setParentPids(String theParentPids) {
|
||||||
|
myParentPidsVc = theParentPids;
|
||||||
myParentPids = theParentPids;
|
myParentPids = theParentPids;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
package ca.uhn.fhir.jpa.entity;
|
package ca.uhn.fhir.jpa.entity;
|
||||||
|
|
||||||
import ca.uhn.fhir.util.ValidateUtil;
|
import ca.uhn.fhir.util.ValidateUtil;
|
||||||
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import jakarta.annotation.Nonnull;
|
import jakarta.annotation.Nonnull;
|
||||||
import jakarta.persistence.Column;
|
import jakarta.persistence.Column;
|
||||||
import jakarta.persistence.Entity;
|
import jakarta.persistence.Entity;
|
||||||
|
@ -41,6 +42,7 @@ import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||||
|
import org.hibernate.Length;
|
||||||
import org.hibernate.annotations.JdbcTypeCode;
|
import org.hibernate.annotations.JdbcTypeCode;
|
||||||
import org.hibernate.search.engine.backend.types.Projectable;
|
import org.hibernate.search.engine.backend.types.Projectable;
|
||||||
import org.hibernate.search.engine.backend.types.Searchable;
|
import org.hibernate.search.engine.backend.types.Searchable;
|
||||||
|
@ -68,7 +70,7 @@ import static org.apache.commons.lang3.StringUtils.length;
|
||||||
public class TermConceptProperty implements Serializable {
|
public class TermConceptProperty implements Serializable {
|
||||||
public static final int MAX_PROPTYPE_ENUM_LENGTH = 6;
|
public static final int MAX_PROPTYPE_ENUM_LENGTH = 6;
|
||||||
private static final long serialVersionUID = 1L;
|
private static final long serialVersionUID = 1L;
|
||||||
private static final int MAX_LENGTH = 500;
|
public static final int MAX_LENGTH = 500;
|
||||||
|
|
||||||
@ManyToOne(fetch = FetchType.LAZY)
|
@ManyToOne(fetch = FetchType.LAZY)
|
||||||
@JoinColumn(
|
@JoinColumn(
|
||||||
|
@ -106,10 +108,14 @@ public class TermConceptProperty implements Serializable {
|
||||||
@GenericField(name = "myValueString", searchable = Searchable.YES)
|
@GenericField(name = "myValueString", searchable = Searchable.YES)
|
||||||
private String myValue;
|
private String myValue;
|
||||||
|
|
||||||
|
@Deprecated(since = "7.2.0")
|
||||||
@Column(name = "PROP_VAL_LOB")
|
@Column(name = "PROP_VAL_LOB")
|
||||||
@Lob()
|
@Lob()
|
||||||
private byte[] myValueLob;
|
private byte[] myValueLob;
|
||||||
|
|
||||||
|
@Column(name = "PROP_VAL_BIN", nullable = true, length = Length.LONG32)
|
||||||
|
private byte[] myValueBin;
|
||||||
|
|
||||||
@Enumerated(EnumType.ORDINAL)
|
@Enumerated(EnumType.ORDINAL)
|
||||||
@Column(name = "PROP_TYPE", nullable = false, length = MAX_PROPTYPE_ENUM_LENGTH)
|
@Column(name = "PROP_TYPE", nullable = false, length = MAX_PROPTYPE_ENUM_LENGTH)
|
||||||
@JdbcTypeCode(SqlTypes.INTEGER)
|
@JdbcTypeCode(SqlTypes.INTEGER)
|
||||||
|
@ -196,8 +202,8 @@ public class TermConceptProperty implements Serializable {
|
||||||
* property, and the code for a {@link TermConceptPropertyTypeEnum#CODING coding} property.
|
* property, and the code for a {@link TermConceptPropertyTypeEnum#CODING coding} property.
|
||||||
*/
|
*/
|
||||||
public String getValue() {
|
public String getValue() {
|
||||||
if (hasValueLob()) {
|
if (hasValueBin()) {
|
||||||
return getValueLobAsString();
|
return getValueBinAsString();
|
||||||
}
|
}
|
||||||
return myValue;
|
return myValue;
|
||||||
}
|
}
|
||||||
|
@ -208,36 +214,41 @@ public class TermConceptProperty implements Serializable {
|
||||||
*/
|
*/
|
||||||
public TermConceptProperty setValue(String theValue) {
|
public TermConceptProperty setValue(String theValue) {
|
||||||
if (theValue.length() > MAX_LENGTH) {
|
if (theValue.length() > MAX_LENGTH) {
|
||||||
setValueLob(theValue);
|
setValueBin(theValue);
|
||||||
} else {
|
} else {
|
||||||
myValueLob = null;
|
myValueLob = null;
|
||||||
|
myValueBin = null;
|
||||||
}
|
}
|
||||||
myValue = left(theValue, MAX_LENGTH);
|
myValue = left(theValue, MAX_LENGTH);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean hasValueLob() {
|
public boolean hasValueBin() {
|
||||||
|
if (myValueBin != null && myValueBin.length > 0) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
if (myValueLob != null && myValueLob.length > 0) {
|
if (myValueLob != null && myValueLob.length > 0) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
public byte[] getValueLob() {
|
public TermConceptProperty setValueBin(byte[] theValueBin) {
|
||||||
return myValueLob;
|
myValueBin = theValueBin;
|
||||||
}
|
myValueLob = theValueBin;
|
||||||
|
|
||||||
public TermConceptProperty setValueLob(byte[] theValueLob) {
|
|
||||||
myValueLob = theValueLob;
|
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public TermConceptProperty setValueLob(String theValueLob) {
|
public TermConceptProperty setValueBin(String theValueBin) {
|
||||||
myValueLob = theValueLob.getBytes(StandardCharsets.UTF_8);
|
return setValueBin(theValueBin.getBytes(StandardCharsets.UTF_8));
|
||||||
return this;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getValueLobAsString() {
|
public String getValueBinAsString() {
|
||||||
|
if (myValueBin != null && myValueBin.length > 0) {
|
||||||
|
return new String(myValueBin, StandardCharsets.UTF_8);
|
||||||
|
}
|
||||||
|
|
||||||
return new String(myValueLob, StandardCharsets.UTF_8);
|
return new String(myValueLob, StandardCharsets.UTF_8);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -295,4 +306,24 @@ public class TermConceptProperty implements Serializable {
|
||||||
public Long getPid() {
|
public Long getPid() {
|
||||||
return myId;
|
return myId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
public byte[] getValueBlobForTesting() {
|
||||||
|
return myValueLob;
|
||||||
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
public void setValueBlobForTesting(byte[] theValueLob) {
|
||||||
|
myValueLob = theValueLob;
|
||||||
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
public byte[] getValueBinForTesting() {
|
||||||
|
return myValueBin;
|
||||||
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
public void setValueBinForTesting(byte[] theValuebin) {
|
||||||
|
myValueBin = theValuebin;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -40,11 +40,13 @@ import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||||
|
import org.hibernate.Length;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
import static org.apache.commons.lang3.StringUtils.isNotEmpty;
|
||||||
import static org.apache.commons.lang3.StringUtils.left;
|
import static org.apache.commons.lang3.StringUtils.left;
|
||||||
import static org.apache.commons.lang3.StringUtils.length;
|
import static org.apache.commons.lang3.StringUtils.length;
|
||||||
|
|
||||||
|
@ -98,10 +100,14 @@ public class TermValueSetConcept implements Serializable {
|
||||||
@Column(name = "SOURCE_PID", nullable = true)
|
@Column(name = "SOURCE_PID", nullable = true)
|
||||||
private Long mySourceConceptPid;
|
private Long mySourceConceptPid;
|
||||||
|
|
||||||
|
@Deprecated(since = "7.2.0")
|
||||||
@Lob
|
@Lob
|
||||||
@Column(name = "SOURCE_DIRECT_PARENT_PIDS", nullable = true)
|
@Column(name = "SOURCE_DIRECT_PARENT_PIDS", nullable = true)
|
||||||
private String mySourceConceptDirectParentPids;
|
private String mySourceConceptDirectParentPids;
|
||||||
|
|
||||||
|
@Column(name = "SOURCE_DIRECT_PARENT_PIDS_VC", nullable = true, length = Length.LONG32)
|
||||||
|
private String mySourceConceptDirectParentPidsVc;
|
||||||
|
|
||||||
@Column(name = "SYSTEM_URL", nullable = false, length = TermCodeSystem.MAX_URL_LENGTH)
|
@Column(name = "SYSTEM_URL", nullable = false, length = TermCodeSystem.MAX_URL_LENGTH)
|
||||||
private String mySystem;
|
private String mySystem;
|
||||||
|
|
||||||
|
@ -264,7 +270,7 @@ public class TermValueSetConcept implements Serializable {
|
||||||
.append("valueSetName", this.getValueSetName())
|
.append("valueSetName", this.getValueSetName())
|
||||||
.append("display", myDisplay)
|
.append("display", myDisplay)
|
||||||
.append("designationCount", myDesignations != null ? myDesignations.size() : "(null)")
|
.append("designationCount", myDesignations != null ? myDesignations.size() : "(null)")
|
||||||
.append("parentPids", mySourceConceptDirectParentPids)
|
.append("parentPids", getSourceConceptDirectParentPids())
|
||||||
.toString();
|
.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -282,5 +288,12 @@ public class TermValueSetConcept implements Serializable {
|
||||||
|
|
||||||
public void setSourceConceptDirectParentPids(String theSourceConceptDirectParentPids) {
|
public void setSourceConceptDirectParentPids(String theSourceConceptDirectParentPids) {
|
||||||
mySourceConceptDirectParentPids = theSourceConceptDirectParentPids;
|
mySourceConceptDirectParentPids = theSourceConceptDirectParentPids;
|
||||||
|
mySourceConceptDirectParentPidsVc = theSourceConceptDirectParentPids;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getSourceConceptDirectParentPids() {
|
||||||
|
return isNotEmpty(mySourceConceptDirectParentPidsVc)
|
||||||
|
? mySourceConceptDirectParentPidsVc
|
||||||
|
: mySourceConceptDirectParentPids;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -139,6 +139,62 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||||
forcedId.dropIndex("20240402.2", "IDX_FORCEDID_RESID");
|
forcedId.dropIndex("20240402.2", "IDX_FORCEDID_RESID");
|
||||||
forcedId.dropIndex("20240402.3", "IDX_FORCEDID_TYPE_FID");
|
forcedId.dropIndex("20240402.3", "IDX_FORCEDID_TYPE_FID");
|
||||||
forcedId.dropIndex("20240402.4", "IDX_FORCEID_FID");
|
forcedId.dropIndex("20240402.4", "IDX_FORCEID_FID");
|
||||||
|
|
||||||
|
// Migration from LOB
|
||||||
|
{
|
||||||
|
Builder.BuilderWithTableName binaryStorageBlobTable = version.onTable("HFJ_BINARY_STORAGE_BLOB");
|
||||||
|
|
||||||
|
binaryStorageBlobTable
|
||||||
|
.renameColumn("20240404.1", "BLOB_ID", "CONTENT_ID")
|
||||||
|
.renameColumn("20240404.2", "BLOB_SIZE", "CONTENT_SIZE")
|
||||||
|
.renameColumn("20240404.3", "BLOB_HASH", "CONTENT_HASH");
|
||||||
|
|
||||||
|
binaryStorageBlobTable
|
||||||
|
.modifyColumn("20240404.4", "BLOB_DATA")
|
||||||
|
.nullable()
|
||||||
|
.withType(ColumnTypeEnum.BLOB);
|
||||||
|
|
||||||
|
binaryStorageBlobTable
|
||||||
|
.addColumn("20240404.5", "STORAGE_CONTENT_BIN")
|
||||||
|
.nullable()
|
||||||
|
.type(ColumnTypeEnum.BINARY);
|
||||||
|
|
||||||
|
binaryStorageBlobTable.migrateBlobToBinary("20240404.6", "BLOB_DATA", "STORAGE_CONTENT_BIN");
|
||||||
|
|
||||||
|
binaryStorageBlobTable.renameTable("20240404.7", "HFJ_BINARY_STORAGE");
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
Builder.BuilderWithTableName termConceptPropertyTable = version.onTable("TRM_CONCEPT_PROPERTY");
|
||||||
|
|
||||||
|
termConceptPropertyTable
|
||||||
|
.addColumn("20240409.1", "PROP_VAL_BIN")
|
||||||
|
.nullable()
|
||||||
|
.type(ColumnTypeEnum.BINARY);
|
||||||
|
|
||||||
|
termConceptPropertyTable.migrateBlobToBinary("20240409.2", "PROP_VAL_LOB", "PROP_VAL_BIN");
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
Builder.BuilderWithTableName termValueSetConceptTable = version.onTable("TRM_VALUESET_CONCEPT");
|
||||||
|
termValueSetConceptTable
|
||||||
|
.addColumn("20240409.3", "SOURCE_DIRECT_PARENT_PIDS_VC")
|
||||||
|
.nullable()
|
||||||
|
.type(ColumnTypeEnum.TEXT);
|
||||||
|
|
||||||
|
termValueSetConceptTable.migrateClobToText(
|
||||||
|
"20240409.4", "SOURCE_DIRECT_PARENT_PIDS", "SOURCE_DIRECT_PARENT_PIDS_VC");
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
Builder.BuilderWithTableName termConceptTable = version.onTable("TRM_CONCEPT");
|
||||||
|
termConceptTable
|
||||||
|
.addColumn("20240410.1", "PARENT_PIDS_VC")
|
||||||
|
.nullable()
|
||||||
|
.type(ColumnTypeEnum.TEXT);
|
||||||
|
|
||||||
|
termConceptTable.migrateClobToText("20240410.2", "PARENT_PIDS", "PARENT_PIDS_VC");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void init700() {
|
protected void init700() {
|
||||||
|
|
|
@ -243,7 +243,7 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac
|
||||||
*/
|
*/
|
||||||
private byte[] fetchBlobFromBinary(IBaseBinary theBinary) throws IOException {
|
private byte[] fetchBlobFromBinary(IBaseBinary theBinary) throws IOException {
|
||||||
if (myBinaryStorageSvc != null && !(myBinaryStorageSvc instanceof NullBinaryStorageSvcImpl)) {
|
if (myBinaryStorageSvc != null && !(myBinaryStorageSvc instanceof NullBinaryStorageSvcImpl)) {
|
||||||
return myBinaryStorageSvc.fetchDataBlobFromBinary(theBinary);
|
return myBinaryStorageSvc.fetchDataByteArrayFromBinary(theBinary);
|
||||||
} else {
|
} else {
|
||||||
byte[] value = BinaryUtil.getOrCreateData(myCtx, theBinary).getValue();
|
byte[] value = BinaryUtil.getOrCreateData(myCtx, theBinary).getValue();
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
|
|
|
@ -0,0 +1,81 @@
|
||||||
|
package ca.uhn.fhir.jpa.entity;
|
||||||
|
|
||||||
|
import com.google.common.base.Strings;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
import static org.hamcrest.Matchers.startsWith;
|
||||||
|
|
||||||
|
public class TermConceptPropertyTest {
|
||||||
|
|
||||||
|
private static final String ourVeryLongString = Strings.repeat("a", TermConceptProperty.MAX_LENGTH+1);
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSetValue_whenValueExceedsMAX_LENGTH_willWriteToBlobAndBin(){
|
||||||
|
// given
|
||||||
|
TermConceptProperty termConceptProperty = new TermConceptProperty();
|
||||||
|
|
||||||
|
// when
|
||||||
|
termConceptProperty.setValue(ourVeryLongString);
|
||||||
|
|
||||||
|
// then
|
||||||
|
assertThat(termConceptProperty.getValueBlobForTesting(), notNullValue());
|
||||||
|
assertThat(termConceptProperty.getValueBinForTesting(), notNullValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testHasValueBin_willDefaultToAssertingValueBin(){
|
||||||
|
// given
|
||||||
|
TermConceptProperty termConceptProperty = new TermConceptProperty();
|
||||||
|
termConceptProperty.setValueBinForTesting(ourVeryLongString.getBytes());
|
||||||
|
termConceptProperty.setValueBlobForTesting(null);
|
||||||
|
|
||||||
|
// when/then
|
||||||
|
assertThat(termConceptProperty.hasValueBin(), is(true));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testHasValueBin_willAssertValueBlob_whenValueBinNotPresent(){
|
||||||
|
// given
|
||||||
|
TermConceptProperty termConceptProperty = new TermConceptProperty();
|
||||||
|
termConceptProperty.setValueBinForTesting(null);
|
||||||
|
termConceptProperty.setValueBlobForTesting(ourVeryLongString.getBytes());
|
||||||
|
|
||||||
|
// when/then
|
||||||
|
assertThat(termConceptProperty.hasValueBin(), is(true));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testGetValue_whenValueExceedsMAX_LENGTH_willGetValueBinByDefault(){
|
||||||
|
// given
|
||||||
|
TermConceptProperty termConceptProperty = new TermConceptProperty();
|
||||||
|
termConceptProperty.setValueBinForTesting(ourVeryLongString.getBytes());
|
||||||
|
termConceptProperty.setValueBlobForTesting(null);
|
||||||
|
|
||||||
|
// when
|
||||||
|
String value = termConceptProperty.getValue();
|
||||||
|
|
||||||
|
// then
|
||||||
|
assertThat(value, startsWith("a"));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testGetValue_whenOnlyValueBlobIsSet_willGetValueValueBlob(){
|
||||||
|
// given
|
||||||
|
TermConceptProperty termConceptProperty = new TermConceptProperty();
|
||||||
|
termConceptProperty.setValueBinForTesting(null);
|
||||||
|
termConceptProperty.setValueBlobForTesting(ourVeryLongString.getBytes());
|
||||||
|
|
||||||
|
// when
|
||||||
|
String value = termConceptProperty.getValue();
|
||||||
|
|
||||||
|
// then
|
||||||
|
assertThat(value, startsWith("a"));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -26,37 +26,44 @@ import jakarta.persistence.Lob;
|
||||||
import jakarta.persistence.Table;
|
import jakarta.persistence.Table;
|
||||||
import jakarta.persistence.Temporal;
|
import jakarta.persistence.Temporal;
|
||||||
import jakarta.persistence.TemporalType;
|
import jakarta.persistence.TemporalType;
|
||||||
|
import org.hibernate.Length;
|
||||||
|
|
||||||
import java.sql.Blob;
|
import java.sql.Blob;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
|
|
||||||
|
import static java.util.Objects.nonNull;
|
||||||
|
|
||||||
@Entity
|
@Entity
|
||||||
@Table(name = "HFJ_BINARY_STORAGE_BLOB")
|
@Table(name = "HFJ_BINARY_STORAGE")
|
||||||
public class BinaryStorageEntity {
|
public class BinaryStorageEntity {
|
||||||
|
|
||||||
@Id
|
@Id
|
||||||
@Column(name = "BLOB_ID", length = 200, nullable = false)
|
@Column(name = "CONTENT_ID", length = 200, nullable = false)
|
||||||
// N.B GGG: Note that the `blob id` is the same as the `externalized binary id`.
|
// N.B GGG: Note that the `content id` is the same as the `externalized binary id`.
|
||||||
private String myBlobId;
|
private String myContentId;
|
||||||
|
|
||||||
@Column(name = "RESOURCE_ID", length = 100, nullable = false)
|
@Column(name = "RESOURCE_ID", length = 100, nullable = false)
|
||||||
private String myResourceId;
|
private String myResourceId;
|
||||||
|
|
||||||
@Column(name = "BLOB_SIZE", nullable = true)
|
@Column(name = "CONTENT_SIZE", nullable = true)
|
||||||
private long mySize;
|
private long mySize;
|
||||||
|
|
||||||
@Column(name = "CONTENT_TYPE", nullable = false, length = 100)
|
@Column(name = "CONTENT_TYPE", nullable = false, length = 100)
|
||||||
private String myBlobContentType;
|
private String myContentType;
|
||||||
|
|
||||||
@Lob
|
@Deprecated(since = "7.2.0")
|
||||||
@Column(name = "BLOB_DATA", nullable = false, insertable = true, updatable = false)
|
@Lob // TODO: VC column added in 7.2.0 - Remove non-VC column later
|
||||||
|
@Column(name = "BLOB_DATA", nullable = true, insertable = true, updatable = false)
|
||||||
private Blob myBlob;
|
private Blob myBlob;
|
||||||
|
|
||||||
|
@Column(name = "STORAGE_CONTENT_BIN", nullable = true, length = Length.LONG32)
|
||||||
|
private byte[] myStorageContentBin;
|
||||||
|
|
||||||
@Temporal(TemporalType.TIMESTAMP)
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
@Column(name = "PUBLISHED_DATE", nullable = false)
|
@Column(name = "PUBLISHED_DATE", nullable = false)
|
||||||
private Date myPublished;
|
private Date myPublished;
|
||||||
|
|
||||||
@Column(name = "BLOB_HASH", length = 128, nullable = true)
|
@Column(name = "CONTENT_HASH", length = 128, nullable = true)
|
||||||
private String myHash;
|
private String myHash;
|
||||||
|
|
||||||
public Date getPublished() {
|
public Date getPublished() {
|
||||||
|
@ -71,8 +78,8 @@ public class BinaryStorageEntity {
|
||||||
return myHash;
|
return myHash;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setBlobId(String theBlobId) {
|
public void setContentId(String theContentId) {
|
||||||
myBlobId = theBlobId;
|
myContentId = theContentId;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setResourceId(String theResourceId) {
|
public void setResourceId(String theResourceId) {
|
||||||
|
@ -83,12 +90,12 @@ public class BinaryStorageEntity {
|
||||||
return mySize;
|
return mySize;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getBlobContentType() {
|
public String getContentType() {
|
||||||
return myBlobContentType;
|
return myContentType;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setBlobContentType(String theBlobContentType) {
|
public void setContentType(String theContentType) {
|
||||||
myBlobContentType = theBlobContentType;
|
myContentType = theContentType;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Blob getBlob() {
|
public Blob getBlob() {
|
||||||
|
@ -99,8 +106,8 @@ public class BinaryStorageEntity {
|
||||||
myBlob = theBlob;
|
myBlob = theBlob;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getBlobId() {
|
public String getContentId() {
|
||||||
return myBlobId;
|
return myContentId;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setSize(long theSize) {
|
public void setSize(long theSize) {
|
||||||
|
@ -110,4 +117,21 @@ public class BinaryStorageEntity {
|
||||||
public void setHash(String theHash) {
|
public void setHash(String theHash) {
|
||||||
myHash = theHash;
|
myHash = theHash;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public byte[] getStorageContentBin() {
|
||||||
|
return myStorageContentBin;
|
||||||
|
}
|
||||||
|
|
||||||
|
public BinaryStorageEntity setStorageContentBin(byte[] theStorageContentBin) {
|
||||||
|
myStorageContentBin = theStorageContentBin;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean hasStorageContent() {
|
||||||
|
return nonNull(myStorageContentBin);
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean hasBlob() {
|
||||||
|
return nonNull(myBlob);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -103,7 +103,7 @@ public class BinaryAccessProviderTest {
|
||||||
ServletOutputStream sos = spy(ServletOutputStream.class);
|
ServletOutputStream sos = spy(ServletOutputStream.class);
|
||||||
when(myDaoRegistry.getResourceDao(eq("DocumentReference"))).thenReturn(myResourceDao);
|
when(myDaoRegistry.getResourceDao(eq("DocumentReference"))).thenReturn(myResourceDao);
|
||||||
when(myResourceDao.read(any(), any(), anyBoolean())).thenReturn(docRef);
|
when(myResourceDao.read(any(), any(), anyBoolean())).thenReturn(docRef);
|
||||||
when(myBinaryStorageSvc.fetchBlobDetails(any(), any())).thenReturn(blobDetails);
|
when(myBinaryStorageSvc.fetchBinaryContentDetails(any(), any())).thenReturn(blobDetails);
|
||||||
when(theServletResponse.getOutputStream()).thenReturn(sos);
|
when(theServletResponse.getOutputStream()).thenReturn(sos);
|
||||||
myBinaryAccessProvider.setTargetAttachmentIdForUnitTest(true);
|
myBinaryAccessProvider.setTargetAttachmentIdForUnitTest(true);
|
||||||
|
|
||||||
|
@ -111,8 +111,8 @@ public class BinaryAccessProviderTest {
|
||||||
myBinaryAccessProvider.binaryAccessRead(docRef.getIdElement(), new StringType("DocumentReference.content.attachment"), myRequestDetails, theServletRequest, theServletResponse);
|
myBinaryAccessProvider.binaryAccessRead(docRef.getIdElement(), new StringType("DocumentReference.content.attachment"), myRequestDetails, theServletRequest, theServletResponse);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
}
|
}
|
||||||
verify(myBinaryStorageSvc, times(1)).fetchBlobDetails(any(), any());
|
verify(myBinaryStorageSvc, times(1)).fetchBinaryContentDetails(any(), any());
|
||||||
verify(myBinaryStorageSvc, times(1)).writeBlob(any(), any(), any());
|
verify(myBinaryStorageSvc, times(1)).writeBinaryContent(any(), any(), any());
|
||||||
verify(theServletResponse, times(1)).setStatus(200);
|
verify(theServletResponse, times(1)).setStatus(200);
|
||||||
verify(theServletResponse, times(1)).setContentType(any());
|
verify(theServletResponse, times(1)).setContentType(any());
|
||||||
verify(theServletResponse, times(1)).setContentLength(0);
|
verify(theServletResponse, times(1)).setContentLength(0);
|
||||||
|
@ -132,7 +132,7 @@ public class BinaryAccessProviderTest {
|
||||||
} catch (InvalidRequestException | IOException e) {
|
} catch (InvalidRequestException | IOException e) {
|
||||||
assertEquals(Msg.code(1331) + "Can not find the requested binary content. It may have been deleted.", e.getMessage());
|
assertEquals(Msg.code(1331) + "Can not find the requested binary content. It may have been deleted.", e.getMessage());
|
||||||
}
|
}
|
||||||
verify(myBinaryStorageSvc, times(1)).fetchBlobDetails(any(), any());
|
verify(myBinaryStorageSvc, times(1)).fetchBinaryContentDetails(any(), any());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -247,16 +247,16 @@ public class BinaryAccessProviderTest {
|
||||||
DaoMethodOutcome daoOutcome = new DaoMethodOutcome();
|
DaoMethodOutcome daoOutcome = new DaoMethodOutcome();
|
||||||
daoOutcome.setResource(docRef);
|
daoOutcome.setResource(docRef);
|
||||||
StoredDetails sd = spy(StoredDetails.class);
|
StoredDetails sd = spy(StoredDetails.class);
|
||||||
sd.setBlobId("123");
|
sd.setBinaryContentId("123");
|
||||||
sd.setBytes(15);
|
sd.setBytes(15);
|
||||||
when(myDaoRegistry.getResourceDao(eq("DocumentReference"))).thenReturn(myResourceDao);
|
when(myDaoRegistry.getResourceDao(eq("DocumentReference"))).thenReturn(myResourceDao);
|
||||||
when(myResourceDao.read(any(), any(), anyBoolean())).thenReturn(docRef);
|
when(myResourceDao.read(any(), any(), anyBoolean())).thenReturn(docRef);
|
||||||
when(myResourceDao.update(docRef, myRequestDetails)).thenReturn(daoOutcome);
|
when(myResourceDao.update(docRef, myRequestDetails)).thenReturn(daoOutcome);
|
||||||
when(theServletRequest.getContentType()).thenReturn("Integer");
|
when(theServletRequest.getContentType()).thenReturn("Integer");
|
||||||
when(theServletRequest.getContentLength()).thenReturn(15);
|
when(theServletRequest.getContentLength()).thenReturn(15);
|
||||||
when(myBinaryStorageSvc.shouldStoreBlob(15, docRef.getIdElement(), "Integer")).thenReturn(true);
|
when(myBinaryStorageSvc.shouldStoreBinaryContent(15, docRef.getIdElement(), "Integer")).thenReturn(true);
|
||||||
myRequestDetails.setServletRequest(theServletRequest);
|
myRequestDetails.setServletRequest(theServletRequest);
|
||||||
doReturn(sd).when(myBinaryStorageSvc).storeBlob(eq(docRef.getIdElement()), isNull(), eq("Integer"), any(InputStream.class), any(RequestDetails.class));
|
doReturn(sd).when(myBinaryStorageSvc).storeBinaryContent(eq(docRef.getIdElement()), isNull(), eq("Integer"), any(InputStream.class), any(RequestDetails.class));
|
||||||
myRequestDetails.setRequestContents(SOME_BYTES);
|
myRequestDetails.setRequestContents(SOME_BYTES);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@ -265,7 +265,7 @@ public class BinaryAccessProviderTest {
|
||||||
assertEquals(docRef.getId(), outcome.getIdElement().getValue());
|
assertEquals(docRef.getId(), outcome.getIdElement().getValue());
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
}
|
}
|
||||||
verify(myBinaryStorageSvc, times(1)).storeBlob(any(), any(), any(), any(), any(ServletRequestDetails.class));
|
verify(myBinaryStorageSvc, times(1)).storeBinaryContent(any(), any(), any(), any(), any(ServletRequestDetails.class));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.binstore;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.binary.api.IBinaryStorageSvc;
|
import ca.uhn.fhir.jpa.binary.api.IBinaryStorageSvc;
|
||||||
import ca.uhn.fhir.jpa.binary.api.StoredDetails;
|
import ca.uhn.fhir.jpa.binary.api.StoredDetails;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.IBinaryStorageEntityDao;
|
||||||
import ca.uhn.fhir.jpa.model.entity.BinaryStorageEntity;
|
import ca.uhn.fhir.jpa.model.entity.BinaryStorageEntity;
|
||||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||||
|
@ -20,9 +21,11 @@ import java.io.ByteArrayOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.sql.Blob;
|
import java.sql.Blob;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
import static org.hamcrest.CoreMatchers.containsString;
|
import static org.hamcrest.CoreMatchers.containsString;
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
import static org.hamcrest.Matchers.matchesPattern;
|
import static org.hamcrest.Matchers.matchesPattern;
|
||||||
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
|
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
@ -32,16 +35,21 @@ import static org.junit.jupiter.api.Assertions.assertNull;
|
||||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
import static org.junit.jupiter.api.Assertions.fail;
|
import static org.junit.jupiter.api.Assertions.fail;
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
|
import static org.mockito.Mockito.times;
|
||||||
|
import static org.mockito.Mockito.verify;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
@ContextConfiguration(classes = DatabaseBlobBinaryStorageSvcImplTest.MyConfig.class)
|
@ContextConfiguration(classes = DatabaseBinaryContentStorageSvcImplTest.MyConfig.class)
|
||||||
public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test {
|
public class DatabaseBinaryContentStorageSvcImplTest extends BaseJpaR4Test {
|
||||||
private static final byte[] SOME_BYTES = {2, 3, 4, 5, 6, 7, 8, 9, 8, 7, 6, 5, 4, 3, 2, 1};
|
private static final byte[] SOME_BYTES = {2, 3, 4, 5, 6, 7, 8, 9, 8, 7, 6, 5, 4, 3, 2, 1};
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
@Qualifier("databaseBlobBinaryStorageSvc")
|
@Qualifier("databaseBinaryContentStorageSvc")
|
||||||
private IBinaryStorageSvc mySvc;
|
private IBinaryStorageSvc mySvc;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private IBinaryStorageEntityDao myBinaryStorageEntityDao;
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testStoreAndRetrieve() throws IOException {
|
public void testStoreAndRetrieve() throws IOException {
|
||||||
|
|
||||||
|
@ -53,7 +61,7 @@ public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test {
|
||||||
ByteArrayInputStream inputStream = new ByteArrayInputStream(SOME_BYTES);
|
ByteArrayInputStream inputStream = new ByteArrayInputStream(SOME_BYTES);
|
||||||
String contentType = "image/png";
|
String contentType = "image/png";
|
||||||
IdType resourceId = new IdType("Binary/123");
|
IdType resourceId = new IdType("Binary/123");
|
||||||
StoredDetails outcome = mySvc.storeBlob(resourceId, null, contentType, inputStream, new ServletRequestDetails());
|
StoredDetails outcome = mySvc.storeBinaryContent(resourceId, null, contentType, inputStream, new ServletRequestDetails());
|
||||||
|
|
||||||
myCaptureQueriesListener.logAllQueriesForCurrentThread();
|
myCaptureQueriesListener.logAllQueriesForCurrentThread();
|
||||||
|
|
||||||
|
@ -63,16 +71,16 @@ public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test {
|
||||||
|
|
||||||
myCaptureQueriesListener.clear();
|
myCaptureQueriesListener.clear();
|
||||||
|
|
||||||
assertThat(outcome.getBlobId(), matchesPattern("^[a-zA-Z0-9]{100}$"));
|
assertThat(outcome.getBinaryContentId(), matchesPattern("^[a-zA-Z0-9]{100}$"));
|
||||||
assertEquals(16, outcome.getBytes());
|
assertEquals(16, outcome.getBytes());
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Read back the details
|
* Read back the details
|
||||||
*/
|
*/
|
||||||
|
|
||||||
StoredDetails details = mySvc.fetchBlobDetails(resourceId, outcome.getBlobId());
|
StoredDetails details = mySvc.fetchBinaryContentDetails(resourceId, outcome.getBinaryContentId());
|
||||||
assertEquals(16L, details.getBytes());
|
assertEquals(16L, details.getBytes());
|
||||||
assertEquals(outcome.getBlobId(), details.getBlobId());
|
assertEquals(outcome.getBinaryContentId(), details.getBinaryContentId());
|
||||||
assertEquals("image/png", details.getContentType());
|
assertEquals("image/png", details.getContentType());
|
||||||
assertEquals("dc7197cfab936698bef7818975c185a9b88b71a0a0a2493deea487706ddf20cb", details.getHash());
|
assertEquals("dc7197cfab936698bef7818975c185a9b88b71a0a0a2493deea487706ddf20cb", details.getHash());
|
||||||
assertNotNull(details.getPublished());
|
assertNotNull(details.getPublished());
|
||||||
|
@ -82,10 +90,10 @@ public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test {
|
||||||
*/
|
*/
|
||||||
|
|
||||||
ByteArrayOutputStream capture = new ByteArrayOutputStream();
|
ByteArrayOutputStream capture = new ByteArrayOutputStream();
|
||||||
mySvc.writeBlob(resourceId, outcome.getBlobId(), capture);
|
mySvc.writeBinaryContent(resourceId, outcome.getBinaryContentId(), capture);
|
||||||
|
|
||||||
assertArrayEquals(SOME_BYTES, capture.toByteArray());
|
assertArrayEquals(SOME_BYTES, capture.toByteArray());
|
||||||
assertArrayEquals(SOME_BYTES, mySvc.fetchBlob(resourceId, outcome.getBlobId()));
|
assertArrayEquals(SOME_BYTES, mySvc.fetchBinaryContent(resourceId, outcome.getBinaryContentId()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -106,8 +114,8 @@ public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test {
|
||||||
ByteArrayInputStream inputStream = new ByteArrayInputStream(SOME_BYTES);
|
ByteArrayInputStream inputStream = new ByteArrayInputStream(SOME_BYTES);
|
||||||
String contentType = "image/png";
|
String contentType = "image/png";
|
||||||
IdType resourceId = new IdType("Binary/123");
|
IdType resourceId = new IdType("Binary/123");
|
||||||
StoredDetails outcome = mySvc.storeBlob(resourceId, "ABCDEFG", contentType, inputStream, new ServletRequestDetails());
|
StoredDetails outcome = mySvc.storeBinaryContent(resourceId, "ABCDEFG", contentType, inputStream, new ServletRequestDetails());
|
||||||
assertEquals("ABCDEFG", outcome.getBlobId());
|
assertEquals("ABCDEFG", outcome.getBinaryContentId());
|
||||||
|
|
||||||
myCaptureQueriesListener.logAllQueriesForCurrentThread();
|
myCaptureQueriesListener.logAllQueriesForCurrentThread();
|
||||||
|
|
||||||
|
@ -123,9 +131,9 @@ public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test {
|
||||||
* Read back the details
|
* Read back the details
|
||||||
*/
|
*/
|
||||||
|
|
||||||
StoredDetails details = mySvc.fetchBlobDetails(resourceId, outcome.getBlobId());
|
StoredDetails details = mySvc.fetchBinaryContentDetails(resourceId, outcome.getBinaryContentId());
|
||||||
assertEquals(16L, details.getBytes());
|
assertEquals(16L, details.getBytes());
|
||||||
assertEquals(outcome.getBlobId(), details.getBlobId());
|
assertEquals(outcome.getBinaryContentId(), details.getBinaryContentId());
|
||||||
assertEquals("image/png", details.getContentType());
|
assertEquals("image/png", details.getContentType());
|
||||||
assertEquals("dc7197cfab936698bef7818975c185a9b88b71a0a0a2493deea487706ddf20cb", details.getHash());
|
assertEquals("dc7197cfab936698bef7818975c185a9b88b71a0a0a2493deea487706ddf20cb", details.getHash());
|
||||||
assertNotNull(details.getPublished());
|
assertNotNull(details.getPublished());
|
||||||
|
@ -135,43 +143,42 @@ public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test {
|
||||||
*/
|
*/
|
||||||
|
|
||||||
ByteArrayOutputStream capture = new ByteArrayOutputStream();
|
ByteArrayOutputStream capture = new ByteArrayOutputStream();
|
||||||
mySvc.writeBlob(resourceId, outcome.getBlobId(), capture);
|
mySvc.writeBinaryContent(resourceId, outcome.getBinaryContentId(), capture);
|
||||||
|
|
||||||
assertArrayEquals(SOME_BYTES, capture.toByteArray());
|
assertArrayEquals(SOME_BYTES, capture.toByteArray());
|
||||||
assertArrayEquals(SOME_BYTES, mySvc.fetchBlob(resourceId, outcome.getBlobId()));
|
assertArrayEquals(SOME_BYTES, mySvc.fetchBinaryContent(resourceId, outcome.getBinaryContentId()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testFetchBlobUnknown() throws IOException {
|
public void testFetchBinaryContentUnknown() throws IOException {
|
||||||
try {
|
try {
|
||||||
mySvc.fetchBlob(new IdType("Patient/123"), "1111111");
|
mySvc.fetchBinaryContent(new IdType("Patient/123"), "1111111");
|
||||||
fail();
|
fail();
|
||||||
} catch (ResourceNotFoundException e) {
|
} catch (ResourceNotFoundException e) {
|
||||||
assertEquals("Unknown blob ID: 1111111 for resource ID Patient/123", e.getMessage());
|
assertEquals("Unknown BinaryContent ID: 1111111 for resource ID Patient/123", e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
StoredDetails details = mySvc.fetchBlobDetails(new IdType("Patient/123"), "1111111");
|
StoredDetails details = mySvc.fetchBinaryContentDetails(new IdType("Patient/123"), "1111111");
|
||||||
assertNull(details);
|
assertNull(details);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testExpunge() throws IOException {
|
public void testExpunge() throws IOException {
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Store the binary
|
* Store the binary
|
||||||
*/
|
*/
|
||||||
ByteArrayInputStream inputStream = new ByteArrayInputStream(SOME_BYTES);
|
ByteArrayInputStream inputStream = new ByteArrayInputStream(SOME_BYTES);
|
||||||
String contentType = "image/png";
|
String contentType = "image/png";
|
||||||
IdType resourceId = new IdType("Binary/123");
|
IdType resourceId = new IdType("Binary/123");
|
||||||
StoredDetails outcome = mySvc.storeBlob(resourceId, null, contentType, inputStream, new ServletRequestDetails());
|
StoredDetails outcome = mySvc.storeBinaryContent(resourceId, null, contentType, inputStream, new ServletRequestDetails());
|
||||||
String blobId = outcome.getBlobId();
|
String blobId = outcome.getBinaryContentId();
|
||||||
|
|
||||||
// Expunge
|
// Expunge
|
||||||
mySvc.expungeBlob(resourceId, blobId);
|
mySvc.expungeBinaryContent(resourceId, blobId);
|
||||||
|
|
||||||
ByteArrayOutputStream capture = new ByteArrayOutputStream();
|
ByteArrayOutputStream capture = new ByteArrayOutputStream();
|
||||||
assertFalse(mySvc.writeBlob(resourceId, outcome.getBlobId(), capture));
|
assertFalse(mySvc.writeBinaryContent(resourceId, outcome.getBinaryContentId(), capture));
|
||||||
assertEquals(0, capture.size());
|
assertEquals(0, capture.size());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -179,30 +186,29 @@ public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testWrongResourceId() throws IOException {
|
public void testWrongResourceId() throws IOException {
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Store the binary
|
* Store the binary
|
||||||
*/
|
*/
|
||||||
ByteArrayInputStream inputStream = new ByteArrayInputStream(SOME_BYTES);
|
ByteArrayInputStream inputStream = new ByteArrayInputStream(SOME_BYTES);
|
||||||
String contentType = "image/png";
|
String contentType = "image/png";
|
||||||
IdType resourceId = new IdType("Binary/123");
|
IdType resourceId = new IdType("Binary/123");
|
||||||
StoredDetails outcome = mySvc.storeBlob(resourceId, null, contentType, inputStream, new ServletRequestDetails());
|
StoredDetails outcome = mySvc.storeBinaryContent(resourceId, null, contentType, inputStream, new ServletRequestDetails());
|
||||||
|
|
||||||
// Right ID
|
// Right ID
|
||||||
ByteArrayOutputStream capture = new ByteArrayOutputStream();
|
ByteArrayOutputStream capture = new ByteArrayOutputStream();
|
||||||
assertTrue(mySvc.writeBlob(resourceId, outcome.getBlobId(), capture));
|
assertTrue(mySvc.writeBinaryContent(resourceId, outcome.getBinaryContentId(), capture));
|
||||||
assertEquals(16, capture.size());
|
assertEquals(16, capture.size());
|
||||||
|
|
||||||
// Wrong ID
|
// Wrong ID
|
||||||
capture = new ByteArrayOutputStream();
|
capture = new ByteArrayOutputStream();
|
||||||
assertFalse(mySvc.writeBlob(new IdType("Patient/9999"), outcome.getBlobId(), capture));
|
assertFalse(mySvc.writeBinaryContent(new IdType("Patient/9999"), outcome.getBinaryContentId(), capture));
|
||||||
assertEquals(0, capture.size());
|
assertEquals(0, capture.size());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testCopyBlobToOutputStream_Exception() throws SQLException {
|
public void testCopyBinaryContentToOutputStream_Exception() throws SQLException {
|
||||||
DatabaseBlobBinaryStorageSvcImpl svc = new DatabaseBlobBinaryStorageSvcImpl();
|
DatabaseBinaryContentStorageSvcImpl svc = new DatabaseBinaryContentStorageSvcImpl();
|
||||||
|
|
||||||
BinaryStorageEntity mockInput = new BinaryStorageEntity();
|
BinaryStorageEntity mockInput = new BinaryStorageEntity();
|
||||||
Blob blob = mock(Blob.class);
|
Blob blob = mock(Blob.class);
|
||||||
|
@ -210,7 +216,7 @@ public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test {
|
||||||
mockInput.setBlob(blob);
|
mockInput.setBlob(blob);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
svc.copyBlobToOutputStream(new ByteArrayOutputStream(), (mockInput));
|
svc.copyBinaryContentToOutputStream(new ByteArrayOutputStream(), (mockInput));
|
||||||
fail();
|
fail();
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
assertThat(e.getMessage(), containsString("FOO"));
|
assertThat(e.getMessage(), containsString("FOO"));
|
||||||
|
@ -218,8 +224,8 @@ public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testCopyBlobToByteArray_Exception() throws SQLException {
|
public void testCopyBinaryContentToByteArray_Exception() throws SQLException {
|
||||||
DatabaseBlobBinaryStorageSvcImpl svc = new DatabaseBlobBinaryStorageSvcImpl();
|
DatabaseBinaryContentStorageSvcImpl svc = new DatabaseBinaryContentStorageSvcImpl();
|
||||||
|
|
||||||
BinaryStorageEntity mockInput = new BinaryStorageEntity();
|
BinaryStorageEntity mockInput = new BinaryStorageEntity();
|
||||||
Blob blob = mock(Blob.class);
|
Blob blob = mock(Blob.class);
|
||||||
|
@ -227,20 +233,81 @@ public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test {
|
||||||
mockInput.setBlob(blob);
|
mockInput.setBlob(blob);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
svc.copyBlobToByteArray(mockInput);
|
svc.copyBinaryContentToByteArray(mockInput);
|
||||||
fail();
|
fail();
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
assertThat(e.getMessage(), containsString("FOO"));
|
assertThat(e.getMessage(), containsString("FOO"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testReadBinaryStorageEntity_whenHasBinaryContent_defaultsToBinaryContent() throws IOException {
|
||||||
|
// given
|
||||||
|
DatabaseBinaryContentStorageSvcImpl svc = new DatabaseBinaryContentStorageSvcImpl();
|
||||||
|
|
||||||
|
BinaryStorageEntity mockInput = mock(BinaryStorageEntity.class);
|
||||||
|
when(mockInput.hasStorageContent()).thenReturn(true);
|
||||||
|
when(mockInput.getStorageContentBin()).thenReturn(SOME_BYTES);
|
||||||
|
|
||||||
|
// when
|
||||||
|
svc.copyBinaryContentToByteArray(mockInput);
|
||||||
|
|
||||||
|
// then
|
||||||
|
verify(mockInput, times(0)).hasBlob();
|
||||||
|
verify(mockInput, times(0)).getBlob();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testReadBinaryStorageEntity_whenHasBlobOnly_willReadBlobContent() throws IOException {
|
||||||
|
// given
|
||||||
|
DatabaseBinaryContentStorageSvcImpl svc = new DatabaseBinaryContentStorageSvcImpl();
|
||||||
|
|
||||||
|
BinaryStorageEntity mockInput = mock(BinaryStorageEntity.class);
|
||||||
|
when(mockInput.hasStorageContent()).thenReturn(false);
|
||||||
|
when(mockInput.hasBlob()).thenReturn(true);
|
||||||
|
when(mockInput.getBlob()).thenAnswer(t ->{
|
||||||
|
Blob blob = mock(Blob.class);
|
||||||
|
when(blob.getBinaryStream()).thenReturn(new ByteArrayInputStream(SOME_BYTES));
|
||||||
|
return blob;
|
||||||
|
} );
|
||||||
|
|
||||||
|
// when
|
||||||
|
svc.copyBinaryContentToByteArray(mockInput);
|
||||||
|
|
||||||
|
// then
|
||||||
|
verify(mockInput, times(1)).hasBlob();
|
||||||
|
verify(mockInput, times(1)).getBlob();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testStoreBinaryContent_writesBlobAndByteArray() throws IOException {
|
||||||
|
// given
|
||||||
|
ByteArrayInputStream inputStream = new ByteArrayInputStream(SOME_BYTES);
|
||||||
|
String contentType = "image/png";
|
||||||
|
IdType resourceId = new IdType("Binary/123");
|
||||||
|
|
||||||
|
// when
|
||||||
|
StoredDetails outcome = mySvc.storeBinaryContent(resourceId, null, contentType, inputStream, new ServletRequestDetails());
|
||||||
|
|
||||||
|
runInTransaction(() -> {
|
||||||
|
Optional<BinaryStorageEntity> binaryStorageEntityOptional = myBinaryStorageEntityDao.findByIdAndResourceId(outcome.getBinaryContentId(), resourceId.toUnqualifiedVersionless().getValue());
|
||||||
|
BinaryStorageEntity binaryStorageEntity = binaryStorageEntityOptional.get();
|
||||||
|
|
||||||
|
// then
|
||||||
|
assertThat(binaryStorageEntity.hasStorageContent(), is(true));
|
||||||
|
assertThat(binaryStorageEntity.hasBlob(), is(true));
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
@Configuration
|
@Configuration
|
||||||
public static class MyConfig {
|
public static class MyConfig {
|
||||||
|
|
||||||
@Primary
|
@Primary
|
||||||
@Bean
|
@Bean
|
||||||
public IBinaryStorageSvc databaseBlobBinaryStorageSvc() {
|
public IBinaryStorageSvc databaseBinaryContentStorageSvc() {
|
||||||
return new DatabaseBlobBinaryStorageSvcImpl();
|
return new DatabaseBinaryContentStorageSvcImpl();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
|
@ -50,22 +50,22 @@ public class FilesystemBinaryStorageSvcImplTest {
|
||||||
public void testStoreAndRetrieve() throws IOException {
|
public void testStoreAndRetrieve() throws IOException {
|
||||||
IIdType id = new IdType("Patient/123");
|
IIdType id = new IdType("Patient/123");
|
||||||
String contentType = "image/png";
|
String contentType = "image/png";
|
||||||
StoredDetails outcome = mySvc.storeBlob(id, null, contentType, new ByteArrayInputStream(SOME_BYTES), new ServletRequestDetails());
|
StoredDetails outcome = mySvc.storeBinaryContent(id, null, contentType, new ByteArrayInputStream(SOME_BYTES), new ServletRequestDetails());
|
||||||
|
|
||||||
ourLog.info("Got id: {}", outcome);
|
ourLog.info("Got id: {}", outcome);
|
||||||
|
|
||||||
StoredDetails details = mySvc.fetchBlobDetails(id, outcome.getBlobId());
|
StoredDetails details = mySvc.fetchBinaryContentDetails(id, outcome.getBinaryContentId());
|
||||||
assertEquals(16L, details.getBytes());
|
assertEquals(16L, details.getBytes());
|
||||||
assertEquals(outcome.getBlobId(), details.getBlobId());
|
assertEquals(outcome.getBinaryContentId(), details.getBinaryContentId());
|
||||||
assertEquals("image/png", details.getContentType());
|
assertEquals("image/png", details.getContentType());
|
||||||
assertEquals("dc7197cfab936698bef7818975c185a9b88b71a0a0a2493deea487706ddf20cb", details.getHash());
|
assertEquals("dc7197cfab936698bef7818975c185a9b88b71a0a0a2493deea487706ddf20cb", details.getHash());
|
||||||
assertNotNull(details.getPublished());
|
assertNotNull(details.getPublished());
|
||||||
|
|
||||||
ByteArrayOutputStream capture = new ByteArrayOutputStream();
|
ByteArrayOutputStream capture = new ByteArrayOutputStream();
|
||||||
mySvc.writeBlob(id, outcome.getBlobId(), capture);
|
mySvc.writeBinaryContent(id, outcome.getBinaryContentId(), capture);
|
||||||
|
|
||||||
assertArrayEquals(SOME_BYTES, capture.toByteArray());
|
assertArrayEquals(SOME_BYTES, capture.toByteArray());
|
||||||
assertArrayEquals(SOME_BYTES, mySvc.fetchBlob(id, outcome.getBlobId()));
|
assertArrayEquals(SOME_BYTES, mySvc.fetchBinaryContent(id, outcome.getBinaryContentId()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -73,30 +73,30 @@ public class FilesystemBinaryStorageSvcImplTest {
|
||||||
IIdType id = new IdType("Patient/123");
|
IIdType id = new IdType("Patient/123");
|
||||||
String contentType = "image/png";
|
String contentType = "image/png";
|
||||||
String blobId = "ABCDEFGHIJKLMNOPQRSTUV";
|
String blobId = "ABCDEFGHIJKLMNOPQRSTUV";
|
||||||
StoredDetails outcome = mySvc.storeBlob(id, blobId, contentType, new ByteArrayInputStream(SOME_BYTES), new ServletRequestDetails());
|
StoredDetails outcome = mySvc.storeBinaryContent(id, blobId, contentType, new ByteArrayInputStream(SOME_BYTES), new ServletRequestDetails());
|
||||||
assertEquals(blobId, outcome.getBlobId());
|
assertEquals(blobId, outcome.getBinaryContentId());
|
||||||
|
|
||||||
ourLog.info("Got id: {}", outcome);
|
ourLog.info("Got id: {}", outcome);
|
||||||
|
|
||||||
StoredDetails details = mySvc.fetchBlobDetails(id, outcome.getBlobId());
|
StoredDetails details = mySvc.fetchBinaryContentDetails(id, outcome.getBinaryContentId());
|
||||||
assertEquals(16L, details.getBytes());
|
assertEquals(16L, details.getBytes());
|
||||||
assertEquals(outcome.getBlobId(), details.getBlobId());
|
assertEquals(outcome.getBinaryContentId(), details.getBinaryContentId());
|
||||||
assertEquals("image/png", details.getContentType());
|
assertEquals("image/png", details.getContentType());
|
||||||
assertEquals("dc7197cfab936698bef7818975c185a9b88b71a0a0a2493deea487706ddf20cb", details.getHash());
|
assertEquals("dc7197cfab936698bef7818975c185a9b88b71a0a0a2493deea487706ddf20cb", details.getHash());
|
||||||
assertNotNull(details.getPublished());
|
assertNotNull(details.getPublished());
|
||||||
|
|
||||||
ByteArrayOutputStream capture = new ByteArrayOutputStream();
|
ByteArrayOutputStream capture = new ByteArrayOutputStream();
|
||||||
mySvc.writeBlob(id, outcome.getBlobId(), capture);
|
mySvc.writeBinaryContent(id, outcome.getBinaryContentId(), capture);
|
||||||
|
|
||||||
assertArrayEquals(SOME_BYTES, capture.toByteArray());
|
assertArrayEquals(SOME_BYTES, capture.toByteArray());
|
||||||
assertArrayEquals(SOME_BYTES, mySvc.fetchBlob(id, outcome.getBlobId()));
|
assertArrayEquals(SOME_BYTES, mySvc.fetchBinaryContent(id, outcome.getBinaryContentId()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testFetchBlobUnknown() throws IOException {
|
public void testFetchBinaryContentUnknown() throws IOException {
|
||||||
try {
|
try {
|
||||||
mySvc.fetchBlob(new IdType("Patient/123"), "1111111");
|
mySvc.fetchBinaryContent(new IdType("Patient/123"), "1111111");
|
||||||
fail();
|
fail();
|
||||||
} catch (ResourceNotFoundException e) {
|
} catch (ResourceNotFoundException e) {
|
||||||
assertEquals(Msg.code(1327) + "Unknown blob ID: 1111111 for resource ID Patient/123", e.getMessage());
|
assertEquals(Msg.code(1327) + "Unknown blob ID: 1111111 for resource ID Patient/123", e.getMessage());
|
||||||
|
@ -108,21 +108,21 @@ public class FilesystemBinaryStorageSvcImplTest {
|
||||||
public void testExpunge() throws IOException {
|
public void testExpunge() throws IOException {
|
||||||
IIdType id = new IdType("Patient/123");
|
IIdType id = new IdType("Patient/123");
|
||||||
String contentType = "image/png";
|
String contentType = "image/png";
|
||||||
StoredDetails outcome = mySvc.storeBlob(id, null, contentType, new ByteArrayInputStream(SOME_BYTES), new ServletRequestDetails());
|
StoredDetails outcome = mySvc.storeBinaryContent(id, null, contentType, new ByteArrayInputStream(SOME_BYTES), new ServletRequestDetails());
|
||||||
|
|
||||||
ourLog.info("Got id: {}", outcome);
|
ourLog.info("Got id: {}", outcome);
|
||||||
|
|
||||||
StoredDetails details = mySvc.fetchBlobDetails(id, outcome.getBlobId());
|
StoredDetails details = mySvc.fetchBinaryContentDetails(id, outcome.getBinaryContentId());
|
||||||
assertEquals(16L, details.getBytes());
|
assertEquals(16L, details.getBytes());
|
||||||
assertEquals(outcome.getBlobId(), details.getBlobId());
|
assertEquals(outcome.getBinaryContentId(), details.getBinaryContentId());
|
||||||
assertEquals("image/png", details.getContentType());
|
assertEquals("image/png", details.getContentType());
|
||||||
assertEquals("dc7197cfab936698bef7818975c185a9b88b71a0a0a2493deea487706ddf20cb", details.getHash());
|
assertEquals("dc7197cfab936698bef7818975c185a9b88b71a0a0a2493deea487706ddf20cb", details.getHash());
|
||||||
assertNotNull(details.getPublished());
|
assertNotNull(details.getPublished());
|
||||||
|
|
||||||
mySvc.expungeBlob(id, outcome.getBlobId());
|
mySvc.expungeBinaryContent(id, outcome.getBinaryContentId());
|
||||||
|
|
||||||
ByteArrayOutputStream capture = new ByteArrayOutputStream();
|
ByteArrayOutputStream capture = new ByteArrayOutputStream();
|
||||||
mySvc.writeBlob(id, outcome.getBlobId(), capture);
|
mySvc.writeBinaryContent(id, outcome.getBinaryContentId(), capture);
|
||||||
assertEquals(0, capture.size());
|
assertEquals(0, capture.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -134,7 +134,7 @@ public class FilesystemBinaryStorageSvcImplTest {
|
||||||
IIdType id = new IdType("Patient/123");
|
IIdType id = new IdType("Patient/123");
|
||||||
String contentType = "image/png";
|
String contentType = "image/png";
|
||||||
try {
|
try {
|
||||||
mySvc.storeBlob(id, null, contentType, new ByteArrayInputStream(SOME_BYTES), new ServletRequestDetails());
|
mySvc.storeBinaryContent(id, null, contentType, new ByteArrayInputStream(SOME_BYTES), new ServletRequestDetails());
|
||||||
fail();
|
fail();
|
||||||
} catch (PayloadTooLargeException e) {
|
} catch (PayloadTooLargeException e) {
|
||||||
assertEquals(Msg.code(1343) + "Binary size exceeds maximum: 5", e.getMessage());
|
assertEquals(Msg.code(1343) + "Binary size exceeds maximum: 5", e.getMessage());
|
||||||
|
|
|
@ -12,37 +12,37 @@ public class NullBinaryStorageSvcImplTest {
|
||||||
private final NullBinaryStorageSvcImpl mySvc = new NullBinaryStorageSvcImpl();
|
private final NullBinaryStorageSvcImpl mySvc = new NullBinaryStorageSvcImpl();
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void shouldStoreBlob() {
|
public void shouldStoreBinaryContent() {
|
||||||
assertFalse(mySvc.shouldStoreBlob(1, new IdType("Patient/2"), "application/json"));
|
assertFalse(mySvc.shouldStoreBinaryContent(1, new IdType("Patient/2"), "application/json"));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void storeBlob() {
|
public void storeBinaryContent() {
|
||||||
assertThrows(UnsupportedOperationException.class, () -> mySvc.storeBlob(null, null, null, null, null));
|
assertThrows(UnsupportedOperationException.class, () -> mySvc.storeBinaryContent(null, null, null, null, null));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void fetchBlobDetails() {
|
public void fetchBinaryContentDetails() {
|
||||||
assertThrows(UnsupportedOperationException.class, () -> mySvc.fetchBlobDetails(null, null));
|
assertThrows(UnsupportedOperationException.class, () -> mySvc.fetchBinaryContentDetails(null, null));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void writeBlob() {
|
public void writeBinaryContent() {
|
||||||
assertThrows(UnsupportedOperationException.class, () -> mySvc.writeBlob(null, null, null));
|
assertThrows(UnsupportedOperationException.class, () -> mySvc.writeBinaryContent(null, null, null));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void expungeBlob() {
|
public void expungeBinaryContent() {
|
||||||
assertThrows(UnsupportedOperationException.class, () -> mySvc.expungeBlob(null, null));
|
assertThrows(UnsupportedOperationException.class, () -> mySvc.expungeBinaryContent(null, null));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void fetchBlob() {
|
public void fetchBinaryContent() {
|
||||||
assertThrows(UnsupportedOperationException.class, () -> mySvc.fetchBlob(null, null));
|
assertThrows(UnsupportedOperationException.class, () -> mySvc.fetchBinaryContent(null, null));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void newBlobId() {
|
public void newBinaryContentId() {
|
||||||
assertThrows(UnsupportedOperationException.class, () -> mySvc.newBlobId());
|
assertThrows(UnsupportedOperationException.class, () -> mySvc.newBinaryContentId());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -264,7 +264,7 @@ public class BinaryAccessProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
myBinaryStorageSvc.expungeBlob(id, attachmentId);
|
myBinaryStorageSvc.expungeBinaryContent(id, attachmentId);
|
||||||
|
|
||||||
path = myServerBase +
|
path = myServerBase +
|
||||||
"/DocumentReference/" + id.getIdPart() + "/" +
|
"/DocumentReference/" + id.getIdPart() + "/" +
|
||||||
|
@ -721,7 +721,7 @@ public class BinaryAccessProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
}
|
}
|
||||||
|
|
||||||
ByteArrayOutputStream capture = new ByteArrayOutputStream();
|
ByteArrayOutputStream capture = new ByteArrayOutputStream();
|
||||||
myStorageSvc.writeBlob(id, attachmentId, capture);
|
myStorageSvc.writeBinaryContent(id, attachmentId, capture);
|
||||||
assertEquals(15, capture.size());
|
assertEquals(15, capture.size());
|
||||||
|
|
||||||
// Now delete (logical delete- should not expunge the binary)
|
// Now delete (logical delete- should not expunge the binary)
|
||||||
|
@ -734,7 +734,7 @@ public class BinaryAccessProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
}
|
}
|
||||||
|
|
||||||
capture = new ByteArrayOutputStream();
|
capture = new ByteArrayOutputStream();
|
||||||
myStorageSvc.writeBlob(id, attachmentId, capture);
|
myStorageSvc.writeBinaryContent(id, attachmentId, capture);
|
||||||
assertEquals(15, capture.size());
|
assertEquals(15, capture.size());
|
||||||
|
|
||||||
// Now expunge
|
// Now expunge
|
||||||
|
@ -748,7 +748,7 @@ public class BinaryAccessProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
.execute();
|
.execute();
|
||||||
|
|
||||||
capture = new ByteArrayOutputStream();
|
capture = new ByteArrayOutputStream();
|
||||||
assertFalse(myStorageSvc.writeBlob(id, attachmentId, capture));
|
assertFalse(myStorageSvc.writeBinaryContent(id, attachmentId, capture));
|
||||||
assertEquals(0, capture.size());
|
assertEquals(0, capture.size());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,94 @@
|
||||||
|
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.embedded.PostgresEmbeddedDatabase;
|
||||||
|
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||||
|
import ca.uhn.fhir.jpa.migrate.HapiMigrator;
|
||||||
|
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
|
||||||
|
import ca.uhn.fhir.util.VersionEnum;
|
||||||
|
import org.junit.jupiter.api.AfterAll;
|
||||||
|
import org.junit.jupiter.api.AfterEach;
|
||||||
|
import org.junit.jupiter.api.BeforeAll;
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.hasSize;
|
||||||
|
|
||||||
|
public class MigrateColumBlobTypeToBinaryTypeTaskTest {
|
||||||
|
|
||||||
|
private static PostgresEmbeddedDatabase ourPostgresEmbeddedDatabase;
|
||||||
|
|
||||||
|
@BeforeAll
|
||||||
|
public static void beforeAll(){
|
||||||
|
ourPostgresEmbeddedDatabase = new PostgresEmbeddedDatabase();
|
||||||
|
}
|
||||||
|
|
||||||
|
private HapiMigrator myMigrator;
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
public void beforeEach(){
|
||||||
|
myMigrator = new HapiMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, ourPostgresEmbeddedDatabase.getDataSource(), DriverTypeEnum.POSTGRES_9_4);
|
||||||
|
myMigrator.createMigrationTableIfRequired();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testMigrationTask_OidValueIsCopiedIntoBytea(){
|
||||||
|
// given
|
||||||
|
final String expectedString ="Hello world!";
|
||||||
|
|
||||||
|
ourPostgresEmbeddedDatabase.executeSqlAsBatch(List.of(
|
||||||
|
"create table HFJ_STORAGE_WITH_BLOB (BLOB_DATA oid, STORAGE_CONTENT_BIN bytea)",
|
||||||
|
"select lo_create(1234)", // create empty LOB with id 1234
|
||||||
|
"select lo_put(1234, 0, '\\x48656c6c6f20776f726c6421')", // insert data (Hello world!) in the LOB with id 1234
|
||||||
|
"insert into HFJ_STORAGE_WITH_BLOB (BLOB_DATA) values (1234)" // assign LOB id to colum
|
||||||
|
));
|
||||||
|
|
||||||
|
// when
|
||||||
|
BaseTask task = new MigrateColumBlobTypeToBinaryTypeTask(
|
||||||
|
VersionEnum.V7_2_0.toString(),
|
||||||
|
"1",
|
||||||
|
"HFJ_STORAGE_WITH_BLOB",
|
||||||
|
"BLOB_DATA", // colum of oid type
|
||||||
|
"STORAGE_CONTENT_BIN" // colum of bytea type
|
||||||
|
);
|
||||||
|
|
||||||
|
myMigrator.addTask(task);
|
||||||
|
myMigrator.migrate();
|
||||||
|
|
||||||
|
// then
|
||||||
|
List<Map<String, Object>> rows = ourPostgresEmbeddedDatabase.query("select * from HFJ_STORAGE_WITH_BLOB");
|
||||||
|
|
||||||
|
assertThat(rows, hasSize(1));
|
||||||
|
|
||||||
|
Map<String, Object> stringObjectMap = rows.get(0);
|
||||||
|
|
||||||
|
String storedContent = convertToString(stringObjectMap, "storage_content_bin");
|
||||||
|
|
||||||
|
assertThat(storedContent, equalTo(expectedString));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public String convertToString(Map<String, Object> theMap, String theKey){
|
||||||
|
Object o = theMap.get(theKey);
|
||||||
|
|
||||||
|
byte[] convertedOidToBytea = ((byte[])o);
|
||||||
|
return new String(convertedOidToBytea);
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterEach
|
||||||
|
public void afterEach(){
|
||||||
|
ourPostgresEmbeddedDatabase.clearDatabase();
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterAll
|
||||||
|
public static void afterAll(){
|
||||||
|
|
||||||
|
ourPostgresEmbeddedDatabase.stop();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,87 @@
|
||||||
|
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.embedded.PostgresEmbeddedDatabase;
|
||||||
|
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||||
|
import ca.uhn.fhir.jpa.migrate.HapiMigrator;
|
||||||
|
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
|
||||||
|
import ca.uhn.fhir.util.VersionEnum;
|
||||||
|
import org.junit.jupiter.api.AfterAll;
|
||||||
|
import org.junit.jupiter.api.AfterEach;
|
||||||
|
import org.junit.jupiter.api.BeforeAll;
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.hasSize;
|
||||||
|
|
||||||
|
public class MigrateColumnClobTypeToTextTypeTaskTest {
|
||||||
|
|
||||||
|
private static PostgresEmbeddedDatabase ourPostgresEmbeddedDatabase;
|
||||||
|
|
||||||
|
@BeforeAll
|
||||||
|
public static void beforeAll(){
|
||||||
|
ourPostgresEmbeddedDatabase = new PostgresEmbeddedDatabase();
|
||||||
|
}
|
||||||
|
|
||||||
|
private HapiMigrator myMigrator;
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
public void beforeEach(){
|
||||||
|
myMigrator = new HapiMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, ourPostgresEmbeddedDatabase.getDataSource(), DriverTypeEnum.POSTGRES_9_4);
|
||||||
|
myMigrator.createMigrationTableIfRequired();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testMigrationTask_OidValueIsCopiedIntoText(){
|
||||||
|
// given
|
||||||
|
final String expectedString ="Hello world!";
|
||||||
|
|
||||||
|
ourPostgresEmbeddedDatabase.executeSqlAsBatch(List.of(
|
||||||
|
"create table HFJ_STORAGE_WITH_OID (OID_DATA oid, STORAGE_CONTENT_TEXT text)",
|
||||||
|
"select lo_create(1234)", // create empty LOB with id 1234
|
||||||
|
"select lo_put(1234, 0, 'Hello world!')", // insert data in the LOB with id 1234
|
||||||
|
"insert into HFJ_STORAGE_WITH_OID (OID_DATA) values (1234)" // assign LOB id to colum
|
||||||
|
));
|
||||||
|
|
||||||
|
// when
|
||||||
|
BaseTask task = new MigrateColumnClobTypeToTextTypeTask(
|
||||||
|
VersionEnum.V7_2_0.toString(),
|
||||||
|
"1",
|
||||||
|
"HFJ_STORAGE_WITH_OID",
|
||||||
|
"OID_DATA", // colum of oid type
|
||||||
|
"STORAGE_CONTENT_TEXT" // colum of bytea type
|
||||||
|
);
|
||||||
|
|
||||||
|
myMigrator.addTask(task);
|
||||||
|
myMigrator.migrate();
|
||||||
|
|
||||||
|
// then
|
||||||
|
List<Map<String, Object>> rows = ourPostgresEmbeddedDatabase.query("select * from HFJ_STORAGE_WITH_OID");
|
||||||
|
|
||||||
|
assertThat(rows, hasSize(1));
|
||||||
|
|
||||||
|
Map<String, Object> stringObjectMap = rows.get(0);
|
||||||
|
|
||||||
|
String storedContent = (String) stringObjectMap.get("storage_content_text");
|
||||||
|
|
||||||
|
assertThat(storedContent, equalTo(expectedString));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterEach
|
||||||
|
public void afterEach(){
|
||||||
|
ourPostgresEmbeddedDatabase.clearDatabase();
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterAll
|
||||||
|
public static void afterAll(){
|
||||||
|
|
||||||
|
ourPostgresEmbeddedDatabase.stop();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -104,6 +104,17 @@ public class HapiEmbeddedDatabasesExtension implements AfterAllCallback {
|
||||||
myDatabaseInitializerHelper.insertPersistenceTestData(getEmbeddedDatabase(theDriverType), theVersionEnum);
|
myDatabaseInitializerHelper.insertPersistenceTestData(getEmbeddedDatabase(theDriverType), theVersionEnum);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void maybeInsertPersistenceTestData(DriverTypeEnum theDriverType, VersionEnum theVersionEnum) {
|
||||||
|
try {
|
||||||
|
myDatabaseInitializerHelper.insertPersistenceTestData(getEmbeddedDatabase(theDriverType), theVersionEnum);
|
||||||
|
} catch (Exception theE) {
|
||||||
|
ourLog.info(
|
||||||
|
"Could not insert persistence test data most likely because we don't have any for version {} and driver {}",
|
||||||
|
theVersionEnum,
|
||||||
|
theDriverType);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public String getSqlFromResourceFile(String theFileName) {
|
public String getSqlFromResourceFile(String theFileName) {
|
||||||
try {
|
try {
|
||||||
ourLog.info("Loading file: {}", theFileName);
|
ourLog.info("Loading file: {}", theFileName);
|
||||||
|
|
|
@ -48,7 +48,7 @@ INSERT INTO HFJ_RESOURCE (RES_ID, PARTITION_DATE, PARTITION_ID, RES_DELETED_AT,
|
||||||
INSERT INTO HFJ_RESOURCE (RES_ID, PARTITION_DATE, PARTITION_ID, RES_DELETED_AT, RES_VERSION, HAS_TAGS, RES_PUBLISHED, RES_UPDATED, SP_HAS_LINKS, HASH_SHA256, SP_INDEX_STATUS, RES_LANGUAGE, SP_CMPSTR_UNIQ_PRESENT, SP_COORDS_PRESENT, SP_DATE_PRESENT, SP_NUMBER_PRESENT, SP_QUANTITY_PRESENT, SP_STRING_PRESENT, SP_TOKEN_PRESENT, SP_URI_PRESENT, RES_TYPE, RES_VER) VALUES (1780, NULL, NULL, NULL, 'R4', FALSE, '2023-05-01 15:26:08.82', '2023-05-01 15:26:08.82', FALSE, 'F30D68F5D00D440BD35E9CF7CCF1250234106A4B52AE016ACEE0F4291FB5AF5F', 1, NULL, FALSE, FALSE, FALSE, FALSE, FALSE, TRUE, TRUE, TRUE, 'CODESYSTEM', 1);
|
INSERT INTO HFJ_RESOURCE (RES_ID, PARTITION_DATE, PARTITION_ID, RES_DELETED_AT, RES_VERSION, HAS_TAGS, RES_PUBLISHED, RES_UPDATED, SP_HAS_LINKS, HASH_SHA256, SP_INDEX_STATUS, RES_LANGUAGE, SP_CMPSTR_UNIQ_PRESENT, SP_COORDS_PRESENT, SP_DATE_PRESENT, SP_NUMBER_PRESENT, SP_QUANTITY_PRESENT, SP_STRING_PRESENT, SP_TOKEN_PRESENT, SP_URI_PRESENT, RES_TYPE, RES_VER) VALUES (1780, NULL, NULL, NULL, 'R4', FALSE, '2023-05-01 15:26:08.82', '2023-05-01 15:26:08.82', FALSE, 'F30D68F5D00D440BD35E9CF7CCF1250234106A4B52AE016ACEE0F4291FB5AF5F', 1, NULL, FALSE, FALSE, FALSE, FALSE, FALSE, TRUE, TRUE, TRUE, 'CODESYSTEM', 1);
|
||||||
INSERT INTO HFJ_RESOURCE (RES_ID, PARTITION_DATE, PARTITION_ID, RES_DELETED_AT, RES_VERSION, HAS_TAGS, RES_PUBLISHED, RES_UPDATED, SP_HAS_LINKS, HASH_SHA256, SP_INDEX_STATUS, RES_LANGUAGE, SP_CMPSTR_UNIQ_PRESENT, SP_COORDS_PRESENT, SP_DATE_PRESENT, SP_NUMBER_PRESENT, SP_QUANTITY_PRESENT, SP_STRING_PRESENT, SP_TOKEN_PRESENT, SP_URI_PRESENT, RES_TYPE, RES_VER) VALUES (1788, NULL, NULL, NULL, 'R4', FALSE, '2023-05-01 15:26:12.178', '2023-05-01 15:26:12.178', FALSE, 'EDBD79B1632719D08AA733AF7F99DE17009D1CE4605F439B4F5BDAAC98AABDD8', 1, NULL, FALSE, FALSE, FALSE, FALSE, FALSE, TRUE, TRUE, TRUE, 'VALUESET', 1);
|
INSERT INTO HFJ_RESOURCE (RES_ID, PARTITION_DATE, PARTITION_ID, RES_DELETED_AT, RES_VERSION, HAS_TAGS, RES_PUBLISHED, RES_UPDATED, SP_HAS_LINKS, HASH_SHA256, SP_INDEX_STATUS, RES_LANGUAGE, SP_CMPSTR_UNIQ_PRESENT, SP_COORDS_PRESENT, SP_DATE_PRESENT, SP_NUMBER_PRESENT, SP_QUANTITY_PRESENT, SP_STRING_PRESENT, SP_TOKEN_PRESENT, SP_URI_PRESENT, RES_TYPE, RES_VER) VALUES (1788, NULL, NULL, NULL, 'R4', FALSE, '2023-05-01 15:26:12.178', '2023-05-01 15:26:12.178', FALSE, 'EDBD79B1632719D08AA733AF7F99DE17009D1CE4605F439B4F5BDAAC98AABDD8', 1, NULL, FALSE, FALSE, FALSE, FALSE, FALSE, TRUE, TRUE, TRUE, 'VALUESET', 1);
|
||||||
INSERT INTO HFJ_RESOURCE (RES_ID, PARTITION_DATE, PARTITION_ID, RES_DELETED_AT, RES_VERSION, HAS_TAGS, RES_PUBLISHED, RES_UPDATED, SP_HAS_LINKS, HASH_SHA256, SP_INDEX_STATUS, RES_LANGUAGE, SP_CMPSTR_UNIQ_PRESENT, SP_COORDS_PRESENT, SP_DATE_PRESENT, SP_NUMBER_PRESENT, SP_QUANTITY_PRESENT, SP_STRING_PRESENT, SP_TOKEN_PRESENT, SP_URI_PRESENT, RES_TYPE, RES_VER) VALUES (1796, NULL, NULL, NULL, 'R4', FALSE, '2023-05-01 15:26:12.422', '2023-05-01 15:26:12.422', FALSE, 'A71FACD8BB1828ACD8718959949D5A16BE31F2E350671C5326A8E0BB9799357E', 1, NULL, FALSE, FALSE, FALSE, FALSE, FALSE, TRUE, TRUE, TRUE, 'CONCEPTMAP', 1);
|
INSERT INTO HFJ_RESOURCE (RES_ID, PARTITION_DATE, PARTITION_ID, RES_DELETED_AT, RES_VERSION, HAS_TAGS, RES_PUBLISHED, RES_UPDATED, SP_HAS_LINKS, HASH_SHA256, SP_INDEX_STATUS, RES_LANGUAGE, SP_CMPSTR_UNIQ_PRESENT, SP_COORDS_PRESENT, SP_DATE_PRESENT, SP_NUMBER_PRESENT, SP_QUANTITY_PRESENT, SP_STRING_PRESENT, SP_TOKEN_PRESENT, SP_URI_PRESENT, RES_TYPE, RES_VER) VALUES (1796, NULL, NULL, NULL, 'R4', FALSE, '2023-05-01 15:26:12.422', '2023-05-01 15:26:12.422', FALSE, 'A71FACD8BB1828ACD8718959949D5A16BE31F2E350671C5326A8E0BB9799357E', 1, NULL, FALSE, FALSE, FALSE, FALSE, FALSE, TRUE, TRUE, TRUE, 'CONCEPTMAP', 1);
|
||||||
INSERT INTO HFJ_BINARY_STORAGE_BLOB (BLOB_ID, BLOB_DATA, CONTENT_TYPE, BLOB_HASH, PUBLISHED_DATE, RESOURCE_ID, BLOB_SIZE) VALUES ('QE7FP9VMEDPQTKOL9ENJXJEXBCIPTZKP2WOGWLMGDKJMKWXONGLLWBCUGDM2YH88ZXSG6V5PGDTSMQDMOXHEXBEFNOE0GIURJPOD', 1415364, 'APPLICATION/XML', 'D28743293C2FB67B72D06C80D055B66CCB58D70030E460450CD661AE2ED7225E', '2023-05-01 16:15:16.469', 'BINARY/2003', 36781);
|
INSERT INTO HFJ_BINARY_STORAGE_BLOB (BLOB_ID, BLOB_DATA, CONTENT_TYPE, BLOB_HASH, PUBLISHED_DATE, RESOURCE_ID, BLOB_SIZE) VALUES ('QE7FP9VMEDPQTKOL9ENJXJEXBCIPTZKP2WOGWLMGDKJMKWXONGLLWBCUGDM2YH88ZXSG6V5PGDTSMQDMOXHEXBEFNOE0GIURJPOD', lo_create(1415364), 'APPLICATION/XML', 'D28743293C2FB67B72D06C80D055B66CCB58D70030E460450CD661AE2ED7225E', '2023-05-01 16:15:16.469', 'BINARY/2003', 36781);
|
||||||
INSERT INTO HFJ_BLK_EXPORT_JOB (PID, CREATED_TIME, EXP_TIME, JOB_ID, REQUEST, EXP_SINCE, JOB_STATUS, STATUS_MESSAGE, STATUS_TIME, OPTLOCK) VALUES (1, '2023-05-01 16:31:20.175', '2023-05-01 18:31:20.175', '077DCFC3-DDC1-4C83-BD15-82867912F79D', '/$EXPORT?_OUTPUTFORMAT=APPLICATION%2FFHIR%2BNDJSON', '2023-04-30 16:31:20.171', 'COMPLETE', NULL, '2023-05-01 16:31:36.011', 1);
|
INSERT INTO HFJ_BLK_EXPORT_JOB (PID, CREATED_TIME, EXP_TIME, JOB_ID, REQUEST, EXP_SINCE, JOB_STATUS, STATUS_MESSAGE, STATUS_TIME, OPTLOCK) VALUES (1, '2023-05-01 16:31:20.175', '2023-05-01 18:31:20.175', '077DCFC3-DDC1-4C83-BD15-82867912F79D', '/$EXPORT?_OUTPUTFORMAT=APPLICATION%2FFHIR%2BNDJSON', '2023-04-30 16:31:20.171', 'COMPLETE', NULL, '2023-05-01 16:31:36.011', 1);
|
||||||
INSERT INTO HFJ_BLK_EXPORT_COLLECTION (PID, TYPE_FILTER, RES_TYPE, OPTLOCK, JOB_PID) VALUES (131, NULL, 'PATIENT', 0, 1);
|
INSERT INTO HFJ_BLK_EXPORT_COLLECTION (PID, TYPE_FILTER, RES_TYPE, OPTLOCK, JOB_PID) VALUES (131, NULL, 'PATIENT', 0, 1);
|
||||||
INSERT INTO HFJ_BLK_EXPORT_COLFILE (PID, RES_ID, COLLECTION_PID) VALUES (13, '2018', 131);
|
INSERT INTO HFJ_BLK_EXPORT_COLFILE (PID, RES_ID, COLLECTION_PID) VALUES (13, '2018', 131);
|
||||||
|
@ -98,8 +98,8 @@ INSERT INTO NPM_PACKAGE_VER (PID, CURRENT_VERSION, PKG_DESC, DESC_UPPER, FHIR_VE
|
||||||
INSERT INTO NPM_PACKAGE_VER_RES (PID, CANONICAL_URL, CANONICAL_VERSION, FILE_DIR, FHIR_VERSION, FHIR_VERSION_ID, FILE_NAME, RES_SIZE_BYTES, RES_TYPE, UPDATED_TIME, PACKVER_PID, BINARY_RES_ID) VALUES (1, NULL, NULL, 'PACKAGE', 'R4', '4.0.1', 'TESTPATIENT.JSON', 225, 'PATIENT', '2023-05-01 15:22:38.057', 1, 2);
|
INSERT INTO NPM_PACKAGE_VER_RES (PID, CANONICAL_URL, CANONICAL_VERSION, FILE_DIR, FHIR_VERSION, FHIR_VERSION_ID, FILE_NAME, RES_SIZE_BYTES, RES_TYPE, UPDATED_TIME, PACKVER_PID, BINARY_RES_ID) VALUES (1, NULL, NULL, 'PACKAGE', 'R4', '4.0.1', 'TESTPATIENT.JSON', 225, 'PATIENT', '2023-05-01 15:22:38.057', 1, 2);
|
||||||
INSERT INTO TRM_CODESYSTEM (PID, CODE_SYSTEM_URI, CURRENT_VERSION_PID, CS_NAME, RES_ID) VALUES (1, 'HTTP://LOINC.ORG', 54, 'LOINC', 1780);
|
INSERT INTO TRM_CODESYSTEM (PID, CODE_SYSTEM_URI, CURRENT_VERSION_PID, CS_NAME, RES_ID) VALUES (1, 'HTTP://LOINC.ORG', 54, 'LOINC', 1780);
|
||||||
INSERT INTO TRM_CODESYSTEM_VER (PID, CS_DISPLAY, CODESYSTEM_PID, CS_VERSION_ID, RES_ID) VALUES (54, 'LOINC', 1, NULL, 1780);
|
INSERT INTO TRM_CODESYSTEM_VER (PID, CS_DISPLAY, CODESYSTEM_PID, CS_VERSION_ID, RES_ID) VALUES (54, 'LOINC', 1, NULL, 1780);
|
||||||
INSERT INTO TRM_CONCEPT (PID, CODEVAL, CODESYSTEM_PID, DISPLAY, INDEX_STATUS, PARENT_PIDS, CODE_SEQUENCE, CONCEPT_UPDATED) VALUES (150, 'LL1001-8', 54, 'V2.67 PHENX05_14_30D FREQ AMTS', 1, '1415721', NULL, '2023-05-01 17:02:39.139');
|
INSERT INTO TRM_CONCEPT (PID, CODEVAL, CODESYSTEM_PID, DISPLAY, INDEX_STATUS, PARENT_PIDS, CODE_SEQUENCE, CONCEPT_UPDATED) VALUES (150, 'LL1001-8', 54, 'V2.67 PHENX05_14_30D FREQ AMTS', 1, lo_create(1415721), NULL, '2023-05-01 17:02:39.139');
|
||||||
INSERT INTO TRM_CONCEPT (PID, CODEVAL, CODESYSTEM_PID, DISPLAY, INDEX_STATUS, PARENT_PIDS, CODE_SEQUENCE, CONCEPT_UPDATED) VALUES (151, 'LA13892-7', 54, 'V2.67 MORE THAN 2 SLICES OR 2 DINNER ROLLS', 1, '1415722', 3, '2023-05-01 17:02:39.14');
|
INSERT INTO TRM_CONCEPT (PID, CODEVAL, CODESYSTEM_PID, DISPLAY, INDEX_STATUS, PARENT_PIDS, CODE_SEQUENCE, CONCEPT_UPDATED) VALUES (151, 'LA13892-7', 54, 'V2.67 MORE THAN 2 SLICES OR 2 DINNER ROLLS', 1, lo_create(1415722), 3, '2023-05-01 17:02:39.14');
|
||||||
INSERT INTO TRM_CONCEPT_DESIG (PID, LANG, USE_CODE, USE_DISPLAY, USE_SYSTEM, VAL, CS_VER_PID, CONCEPT_PID) VALUES (105, 'NL', '900000000000013009', 'SYNONYM', 'HTTP://SNOMED.INFO/SCT', 'SYSTOLISCHE BLOEDDRUK - EXPIRATIE', 54, 150);
|
INSERT INTO TRM_CONCEPT_DESIG (PID, LANG, USE_CODE, USE_DISPLAY, USE_SYSTEM, VAL, CS_VER_PID, CONCEPT_PID) VALUES (105, 'NL', '900000000000013009', 'SYNONYM', 'HTTP://SNOMED.INFO/SCT', 'SYSTOLISCHE BLOEDDRUK - EXPIRATIE', 54, 150);
|
||||||
INSERT INTO TRM_CONCEPT_MAP (PID, RES_ID, SOURCE_URL, TARGET_URL, URL) VALUES (54, 1796, NULL, NULL, 'HTTP://LOINC.ORG/CM/LOINC-PARTS-TO-PUBCHEM');
|
INSERT INTO TRM_CONCEPT_MAP (PID, RES_ID, SOURCE_URL, TARGET_URL, URL) VALUES (54, 1796, NULL, NULL, 'HTTP://LOINC.ORG/CM/LOINC-PARTS-TO-PUBCHEM');
|
||||||
INSERT INTO TRM_CONCEPT_MAP_GROUP (PID, CONCEPT_MAP_URL, SOURCE_URL, SOURCE_VS, SOURCE_VERSION, TARGET_URL, TARGET_VS, TARGET_VERSION, CONCEPT_MAP_PID) VALUES (54, NULL, 'HTTP://LOINC.ORG', NULL, NULL, 'HTTP://PUBCHEM.NCBI.NLM.NIH.GOV', NULL, NULL, 54);
|
INSERT INTO TRM_CONCEPT_MAP_GROUP (PID, CONCEPT_MAP_URL, SOURCE_URL, SOURCE_VS, SOURCE_VERSION, TARGET_URL, TARGET_VS, TARGET_VERSION, CONCEPT_MAP_PID) VALUES (54, NULL, 'HTTP://LOINC.ORG', NULL, NULL, 'HTTP://PUBCHEM.NCBI.NLM.NIH.GOV', NULL, NULL, 54);
|
||||||
|
@ -107,6 +107,7 @@ INSERT INTO TRM_CONCEPT_MAP_GRP_ELEMENT (PID, SOURCE_CODE, CONCEPT_MAP_URL, SOUR
|
||||||
INSERT INTO TRM_CONCEPT_MAP_GRP_ELM_TGT (PID, TARGET_CODE, CONCEPT_MAP_URL, TARGET_DISPLAY, TARGET_EQUIVALENCE, SYSTEM_URL, SYSTEM_VERSION, VALUESET_URL, CONCEPT_MAP_GRP_ELM_PID) VALUES (60, '1054', NULL, 'PYRIDOXINE', 'EQUAL', NULL, NULL, NULL, 60);
|
INSERT INTO TRM_CONCEPT_MAP_GRP_ELM_TGT (PID, TARGET_CODE, CONCEPT_MAP_URL, TARGET_DISPLAY, TARGET_EQUIVALENCE, SYSTEM_URL, SYSTEM_VERSION, VALUESET_URL, CONCEPT_MAP_GRP_ELM_PID) VALUES (60, '1054', NULL, 'PYRIDOXINE', 'EQUAL', NULL, NULL, NULL, 60);
|
||||||
INSERT INTO TRM_CONCEPT_PC_LINK (PID, CHILD_PID, CODESYSTEM_PID, PARENT_PID, REL_TYPE) VALUES (54, 150, 54, 151, 0);
|
INSERT INTO TRM_CONCEPT_PC_LINK (PID, CHILD_PID, CODESYSTEM_PID, PARENT_PID, REL_TYPE) VALUES (54, 150, 54, 151, 0);
|
||||||
INSERT INTO TRM_CONCEPT_PROPERTY (PID, PROP_CODESYSTEM, PROP_DISPLAY, PROP_KEY, PROP_TYPE, PROP_VAL, PROP_VAL_LOB, CS_VER_PID, CONCEPT_PID) VALUES (152, NULL, NULL, 'CLASSTYPE', 0, '2', NULL, 54, 150);
|
INSERT INTO TRM_CONCEPT_PROPERTY (PID, PROP_CODESYSTEM, PROP_DISPLAY, PROP_KEY, PROP_TYPE, PROP_VAL, PROP_VAL_LOB, CS_VER_PID, CONCEPT_PID) VALUES (152, NULL, NULL, 'CLASSTYPE', 0, '2', NULL, 54, 150);
|
||||||
|
INSERT INTO TRM_CONCEPT_PROPERTY (PID, PROP_CODESYSTEM, PROP_DISPLAY, PROP_KEY, PROP_TYPE, PROP_VAL, PROP_VAL_LOB, CS_VER_PID, CONCEPT_PID) VALUES (153, NULL, NULL, 'CLASSTYPE', 0, NULL, lo_create(1415723), 54, 150);
|
||||||
INSERT INTO TRM_VALUESET (PID, EXPANSION_STATUS, VSNAME, RES_ID, TOTAL_CONCEPT_DESIGNATIONS, TOTAL_CONCEPTS, URL) VALUES (59, 'EXPANDED', 'LOINC UNIVERSAL ORDER SET', 1788, 0, 0, 'HTTP://LOINC.ORG/VS/LOINC-UNIVERSAL-ORDER-SET');
|
INSERT INTO TRM_VALUESET (PID, EXPANSION_STATUS, VSNAME, RES_ID, TOTAL_CONCEPT_DESIGNATIONS, TOTAL_CONCEPTS, URL) VALUES (59, 'EXPANDED', 'LOINC UNIVERSAL ORDER SET', 1788, 0, 0, 'HTTP://LOINC.ORG/VS/LOINC-UNIVERSAL-ORDER-SET');
|
||||||
INSERT INTO TRM_VALUESET_CONCEPT (PID, CODEVAL, DISPLAY, VALUESET_ORDER, SYSTEM_URL, VALUESET_PID) VALUES (176, 'LA13892-7', 'V2.67 MORE THAN 2 SLICES OR 2 DINNER ROLLS', 0, 'HTTP://LOINC.ORG', 59);
|
INSERT INTO TRM_VALUESET_CONCEPT (PID, CODEVAL, DISPLAY, VALUESET_ORDER, SYSTEM_URL, VALUESET_PID) VALUES (176, 'LA13892-7', 'V2.67 MORE THAN 2 SLICES OR 2 DINNER ROLLS', 0, 'HTTP://LOINC.ORG', 59);
|
||||||
INSERT INTO TRM_VALUESET_C_DESIGNATION (PID, VALUESET_CONCEPT_PID, LANG, USE_CODE, USE_DISPLAY, USE_SYSTEM, VAL, VALUESET_PID) VALUES (4, 176, NULL, NULL, NULL, NULL, 'NM THYROID STUDY REPORT', 59);
|
INSERT INTO TRM_VALUESET_C_DESIGNATION (PID, VALUESET_CONCEPT_PID, LANG, USE_CODE, USE_DISPLAY, USE_SYSTEM, VAL, VALUESET_PID) VALUES (4, 176, NULL, NULL, NULL, NULL, 'NM THYROID STUDY REPORT', 59);
|
||||||
|
|
|
@ -54,7 +54,7 @@ INSERT INTO TRM_VALUESET_CONCEPT (
|
||||||
'HTTP://LOINC.ORG',
|
'HTTP://LOINC.ORG',
|
||||||
59,
|
59,
|
||||||
1,
|
1,
|
||||||
10820244,
|
lo_create(10820244),
|
||||||
4824
|
4824
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -1 +1,5 @@
|
||||||
INSERT INTO TRM_CONCEPT_MAP_GRP_ELM_TGT (PID, TARGET_CODE, CONCEPT_MAP_URL, TARGET_DISPLAY, TARGET_EQUIVALENCE, SYSTEM_URL, SYSTEM_VERSION, VALUESET_URL, CONCEPT_MAP_GRP_ELM_PID) VALUES (61, NULL, NULL, 'PYRIDOXINE', 'UNMATCHED', NULL, NULL, NULL, 60);
|
INSERT INTO TRM_CONCEPT_MAP_GRP_ELM_TGT (PID, TARGET_CODE, CONCEPT_MAP_URL, TARGET_DISPLAY, TARGET_EQUIVALENCE, SYSTEM_URL, SYSTEM_VERSION, VALUESET_URL, CONCEPT_MAP_GRP_ELM_PID) VALUES (61, NULL, NULL, 'PYRIDOXINE', 'UNMATCHED', NULL, NULL, NULL, 60);
|
||||||
|
INSERT INTO HFJ_BINARY_STORAGE (CONTENT_ID, RESOURCE_ID, CONTENT_TYPE, STORAGE_CONTENT_BIN, PUBLISHED_DATE ) VALUES ('1', '2', 'TEXT', '\x48656c6c6f20776f726c6421', '2023-06-15 09:58:42.92');
|
||||||
|
INSERT INTO TRM_CONCEPT (PID, CODEVAL, PARENT_PIDS_VC ) VALUES (1, 'aCode', '1 2 3 4');
|
||||||
|
INSERT INTO TRM_CONCEPT_PROPERTY (PID, PROP_KEY, PROP_VAL_BIN, PROP_TYPE) VALUES (1, 'key', '\x48656c6c6f20776f726c6421', 1);
|
||||||
|
INSERT INTO TRM_VALUESET_CONCEPT (PID, VALUESET_PID, VALUESET_ORDER, SOURCE_DIRECT_PARENT_PIDS_VC, SYSTEM_URL, CODEVAL) VALUES (1, 59, 1, '1 2 3 4 5 6', 'http://systemUlr', 'codeVal');
|
||||||
|
|
|
@ -19,7 +19,7 @@ public class BaseBinaryStorageSvcImplTest {
|
||||||
svc.setFhirContextForTests(FhirContext.forR4Cached());
|
svc.setFhirContextForTests(FhirContext.forR4Cached());
|
||||||
svc.setInterceptorBroadcasterForTests(new InterceptorService());
|
svc.setInterceptorBroadcasterForTests(new InterceptorService());
|
||||||
|
|
||||||
String id = svc.newBlobId();
|
String id = svc.newBinaryContentId();
|
||||||
ourLog.info(id);
|
ourLog.info(id);
|
||||||
assertThat(id, matchesPattern("^[a-zA-Z0-9]{100}$"));
|
assertThat(id, matchesPattern("^[a-zA-Z0-9]{100}$"));
|
||||||
}
|
}
|
||||||
|
|
|
@ -75,34 +75,12 @@ public class HapiSchemaMigrationTest {
|
||||||
HapiMigrationDao hapiMigrationDao = new HapiMigrationDao(dataSource, theDriverType, HAPI_FHIR_MIGRATION_TABLENAME);
|
HapiMigrationDao hapiMigrationDao = new HapiMigrationDao(dataSource, theDriverType, HAPI_FHIR_MIGRATION_TABLENAME);
|
||||||
HapiMigrationStorageSvc hapiMigrationStorageSvc = new HapiMigrationStorageSvc(hapiMigrationDao);
|
HapiMigrationStorageSvc hapiMigrationStorageSvc = new HapiMigrationStorageSvc(hapiMigrationDao);
|
||||||
|
|
||||||
VersionEnum[] allVersions = VersionEnum.values();
|
for (VersionEnum aVersion : VersionEnum.values()) {
|
||||||
|
ourLog.info("Applying migrations for {}", aVersion);
|
||||||
|
migrate(theDriverType, dataSource, hapiMigrationStorageSvc, aVersion);
|
||||||
|
|
||||||
List<VersionEnum> dataVersions = List.of(
|
if (aVersion.isNewerThan(FIRST_TESTED_VERSION)) {
|
||||||
VersionEnum.V5_2_0,
|
myEmbeddedServersExtension.maybeInsertPersistenceTestData(theDriverType, aVersion);
|
||||||
VersionEnum.V5_3_0,
|
|
||||||
VersionEnum.V5_4_0,
|
|
||||||
VersionEnum.V5_5_0,
|
|
||||||
VersionEnum.V5_7_0,
|
|
||||||
VersionEnum.V6_0_0,
|
|
||||||
VersionEnum.V6_1_0,
|
|
||||||
VersionEnum.V6_2_0,
|
|
||||||
VersionEnum.V6_3_0,
|
|
||||||
VersionEnum.V6_6_0,
|
|
||||||
VersionEnum.V6_8_0,
|
|
||||||
VersionEnum.V7_0_0,
|
|
||||||
VersionEnum.V7_2_0
|
|
||||||
);
|
|
||||||
|
|
||||||
int fromVersion = 0;
|
|
||||||
VersionEnum from = allVersions[fromVersion];
|
|
||||||
VersionEnum toVersion;
|
|
||||||
|
|
||||||
for (int i = 0; i < allVersions.length; i++) {
|
|
||||||
toVersion = allVersions[i];
|
|
||||||
ourLog.info("Applying migrations for {}", toVersion);
|
|
||||||
migrate(theDriverType, dataSource, hapiMigrationStorageSvc, toVersion);
|
|
||||||
if (dataVersions.contains(toVersion)) {
|
|
||||||
myEmbeddedServersExtension.insertPersistenceTestData(theDriverType, toVersion);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -119,14 +97,6 @@ public class HapiSchemaMigrationTest {
|
||||||
verifyForcedIdMigration(dataSource);
|
verifyForcedIdMigration(dataSource);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void migrate(DriverTypeEnum theDriverType, DataSource dataSource, HapiMigrationStorageSvc hapiMigrationStorageSvc, VersionEnum from, VersionEnum to) throws SQLException {
|
|
||||||
MigrationTaskList migrationTasks = new HapiFhirJpaMigrationTasks(Collections.emptySet()).getTaskList(from, to);
|
|
||||||
SchemaMigrator schemaMigrator = new SchemaMigrator(TEST_SCHEMA_NAME, HAPI_FHIR_MIGRATION_TABLENAME, dataSource, new Properties(), migrationTasks, hapiMigrationStorageSvc);
|
|
||||||
schemaMigrator.setDriverType(theDriverType);
|
|
||||||
schemaMigrator.createMigrationTableIfRequired();
|
|
||||||
schemaMigrator.migrate();
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void migrate(DriverTypeEnum theDriverType, DataSource dataSource, HapiMigrationStorageSvc hapiMigrationStorageSvc, VersionEnum to) throws SQLException {
|
private static void migrate(DriverTypeEnum theDriverType, DataSource dataSource, HapiMigrationStorageSvc hapiMigrationStorageSvc, VersionEnum to) throws SQLException {
|
||||||
MigrationTaskList migrationTasks = new HapiFhirJpaMigrationTasks(Collections.emptySet()).getAllTasks(new VersionEnum[]{to});
|
MigrationTaskList migrationTasks = new HapiFhirJpaMigrationTasks(Collections.emptySet()).getAllTasks(new VersionEnum[]{to});
|
||||||
SchemaMigrator schemaMigrator = new SchemaMigrator(TEST_SCHEMA_NAME, HAPI_FHIR_MIGRATION_TABLENAME, dataSource, new Properties(), migrationTasks, hapiMigrationStorageSvc);
|
SchemaMigrator schemaMigrator = new SchemaMigrator(TEST_SCHEMA_NAME, HAPI_FHIR_MIGRATION_TABLENAME, dataSource, new Properties(), migrationTasks, hapiMigrationStorageSvc);
|
||||||
|
@ -169,5 +139,4 @@ public class HapiSchemaMigrationTest {
|
||||||
assertFalse(schemaMigrator.createMigrationTableIfRequired());
|
assertFalse(schemaMigrator.createMigrationTableIfRequired());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,85 @@
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR Server - SQL Migration
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.i18n.Msg;
|
||||||
|
|
||||||
|
import java.sql.SQLException;
|
||||||
|
|
||||||
|
public class MigrateColumBlobTypeToBinaryTypeTask extends BaseTableColumnTask {
|
||||||
|
|
||||||
|
private final String myFromColumName;
|
||||||
|
private final String myToColumName;
|
||||||
|
|
||||||
|
public MigrateColumBlobTypeToBinaryTypeTask(
|
||||||
|
String theProductVersion,
|
||||||
|
String theSchemaVersion,
|
||||||
|
String theTableName,
|
||||||
|
String theFromColumName,
|
||||||
|
String theToColumName) {
|
||||||
|
super(theProductVersion, theSchemaVersion);
|
||||||
|
myFromColumName = theFromColumName;
|
||||||
|
myToColumName = theToColumName;
|
||||||
|
|
||||||
|
setTableName(theTableName);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void validate() {
|
||||||
|
setDescription("Migrating BLob (oid) from colum " + myFromColumName + " to BINARY on colum " + myToColumName
|
||||||
|
+ " for table " + getTableName() + " (only affects Postgresql)");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void doExecute() throws SQLException {
|
||||||
|
String sql = buildSqlStatement();
|
||||||
|
|
||||||
|
executeSql(getTableName(), sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
String buildSqlStatement() {
|
||||||
|
String tableName = getTableName().toLowerCase();
|
||||||
|
String fromColumName = myFromColumName.toLowerCase();
|
||||||
|
String toColumName = myToColumName.toLowerCase();
|
||||||
|
|
||||||
|
String retVal;
|
||||||
|
|
||||||
|
switch (getDriverType()) {
|
||||||
|
case MYSQL_5_7:
|
||||||
|
case DERBY_EMBEDDED:
|
||||||
|
case ORACLE_12C:
|
||||||
|
case MARIADB_10_1:
|
||||||
|
case COCKROACHDB_21_1:
|
||||||
|
case H2_EMBEDDED:
|
||||||
|
case MSSQL_2012:
|
||||||
|
retVal = "update " + tableName + " set " + toColumName + " = " + fromColumName + " where "
|
||||||
|
+ fromColumName + " is not null";
|
||||||
|
break;
|
||||||
|
case POSTGRES_9_4:
|
||||||
|
retVal = "update " + tableName + " set " + toColumName + " = lo_get(" + fromColumName + ") where "
|
||||||
|
+ fromColumName + " is not null";
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new IllegalStateException(Msg.code(2514) + "Driver is not supported or null.");
|
||||||
|
}
|
||||||
|
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,84 @@
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR Server - SQL Migration
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.i18n.Msg;
|
||||||
|
|
||||||
|
import java.sql.SQLException;
|
||||||
|
|
||||||
|
public class MigrateColumnClobTypeToTextTypeTask extends BaseTableColumnTask {
|
||||||
|
|
||||||
|
private final String myFromColumName;
|
||||||
|
private final String myToColumName;
|
||||||
|
|
||||||
|
public MigrateColumnClobTypeToTextTypeTask(
|
||||||
|
String theProductVersion,
|
||||||
|
String theSchemaVersion,
|
||||||
|
String theTableName,
|
||||||
|
String theFromColumName,
|
||||||
|
String theToColumName) {
|
||||||
|
super(theProductVersion, theSchemaVersion);
|
||||||
|
myFromColumName = theFromColumName;
|
||||||
|
myToColumName = theToColumName;
|
||||||
|
|
||||||
|
setTableName(theTableName);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void validate() {
|
||||||
|
setDescription("Migrating CLob (oid) from colum " + myFromColumName + " to " + myToColumName
|
||||||
|
+ ".TEXT for table " + getTableName() + " (only affects Postgresql)");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void doExecute() throws SQLException {
|
||||||
|
String sql = buildSqlStatement();
|
||||||
|
executeSql(getTableName(), sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
String buildSqlStatement() {
|
||||||
|
String tableName = getTableName().toLowerCase();
|
||||||
|
String fromColumName = myFromColumName.toLowerCase();
|
||||||
|
String toColumName = myToColumName.toLowerCase();
|
||||||
|
|
||||||
|
String retVal;
|
||||||
|
|
||||||
|
switch (getDriverType()) {
|
||||||
|
case MYSQL_5_7:
|
||||||
|
case DERBY_EMBEDDED:
|
||||||
|
case ORACLE_12C:
|
||||||
|
case MARIADB_10_1:
|
||||||
|
case COCKROACHDB_21_1:
|
||||||
|
case H2_EMBEDDED:
|
||||||
|
case MSSQL_2012:
|
||||||
|
retVal = "update " + tableName + " set " + toColumName + " = " + fromColumName + " where "
|
||||||
|
+ fromColumName + " is not null";
|
||||||
|
break;
|
||||||
|
case POSTGRES_9_4:
|
||||||
|
retVal = "update " + tableName + " set " + toColumName + " = convert_from(lo_get(" + fromColumName
|
||||||
|
+ "), 'UTF8') where " + fromColumName + " is not null";
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new IllegalStateException(Msg.code(2515));
|
||||||
|
}
|
||||||
|
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,151 @@
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR Server - SQL Migration
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.i18n.Msg;
|
||||||
|
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||||
|
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||||
|
import org.intellij.lang.annotations.Language;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.springframework.jdbc.core.ColumnMapRowMapper;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
public class RenameTableTask extends BaseTableTask {
|
||||||
|
|
||||||
|
private static final Logger ourLog = LoggerFactory.getLogger(RenameTableTask.class);
|
||||||
|
|
||||||
|
private final String myOldTableName;
|
||||||
|
private final String myNewTableName;
|
||||||
|
private boolean myDeleteTargetColumnFirstIfExist = true;
|
||||||
|
|
||||||
|
public RenameTableTask(
|
||||||
|
String theProductVersion, String theSchemaVersion, String theOldTableName, String theNewTableName) {
|
||||||
|
super(theProductVersion, theSchemaVersion);
|
||||||
|
myOldTableName = theOldTableName;
|
||||||
|
myNewTableName = theNewTableName;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void validate() {
|
||||||
|
setDescription("Rename table " + getOldTableName());
|
||||||
|
}
|
||||||
|
|
||||||
|
private void handleTableWithNewTableName() throws SQLException {
|
||||||
|
|
||||||
|
if (!myDeleteTargetColumnFirstIfExist) {
|
||||||
|
throw new SQLException(Msg.code(2517) + "Can not rename " + getOldTableName() + " to " + getNewTableName()
|
||||||
|
+ " because a table with name " + getNewTableName() + " already exists");
|
||||||
|
}
|
||||||
|
|
||||||
|
// a table with the new tableName already exists and we can delete it. we will only do so if it is empty.
|
||||||
|
Integer rowsWithData = getConnectionProperties().getTxTemplate().execute(t -> {
|
||||||
|
String sql = "SELECT * FROM " + getNewTableName();
|
||||||
|
JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate();
|
||||||
|
jdbcTemplate.setMaxRows(1);
|
||||||
|
return jdbcTemplate.query(sql, new ColumnMapRowMapper()).size();
|
||||||
|
});
|
||||||
|
|
||||||
|
if (rowsWithData != null && rowsWithData > 0) {
|
||||||
|
throw new SQLException(Msg.code(2518) + "Can not rename " + getOldTableName() + " to " + getNewTableName()
|
||||||
|
+ " because a table with name " + getNewTableName() + " already exists and is populated.");
|
||||||
|
}
|
||||||
|
|
||||||
|
logInfo(
|
||||||
|
ourLog,
|
||||||
|
"Table {} already exists - Going to drop it before renaming table {} to {}",
|
||||||
|
getNewTableName(),
|
||||||
|
getOldTableName(),
|
||||||
|
getNewTableName());
|
||||||
|
|
||||||
|
@Language("SQL")
|
||||||
|
String sql = "DROP TABLE " + getNewTableName();
|
||||||
|
executeSql(getNewTableName(), sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void doExecute() throws SQLException {
|
||||||
|
|
||||||
|
Set<String> tableNames = JdbcUtils.getTableNames(getConnectionProperties());
|
||||||
|
boolean hasTableWithNewTableName = tableNames.contains(getNewTableName());
|
||||||
|
|
||||||
|
if (!tableNames.contains(getOldTableName())) {
|
||||||
|
throw new SQLException(Msg.code(2516) + "Can not rename " + getOldTableName() + " to " + getNewTableName()
|
||||||
|
+ " because the original table does not exists");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hasTableWithNewTableName) {
|
||||||
|
handleTableWithNewTableName();
|
||||||
|
}
|
||||||
|
|
||||||
|
String sql = buildRenameTableSqlStatement();
|
||||||
|
logInfo(ourLog, "Renaming table: {}", getOldTableName());
|
||||||
|
|
||||||
|
executeSql(getOldTableName(), sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDeleteTargetColumnFirstIfExist(boolean theDeleteTargetColumnFirstIfExist) {
|
||||||
|
myDeleteTargetColumnFirstIfExist = theDeleteTargetColumnFirstIfExist;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getNewTableName() {
|
||||||
|
return myNewTableName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getOldTableName() {
|
||||||
|
return myOldTableName;
|
||||||
|
}
|
||||||
|
|
||||||
|
String buildRenameTableSqlStatement() {
|
||||||
|
String retVal;
|
||||||
|
|
||||||
|
final String oldTableName = getOldTableName();
|
||||||
|
final String newTableName = getNewTableName();
|
||||||
|
|
||||||
|
switch (getDriverType()) {
|
||||||
|
case MYSQL_5_7:
|
||||||
|
case DERBY_EMBEDDED:
|
||||||
|
retVal = "rename table " + oldTableName + " to " + newTableName;
|
||||||
|
break;
|
||||||
|
case ORACLE_12C:
|
||||||
|
case MARIADB_10_1:
|
||||||
|
case POSTGRES_9_4:
|
||||||
|
case COCKROACHDB_21_1:
|
||||||
|
case H2_EMBEDDED:
|
||||||
|
retVal = "alter table " + oldTableName + " rename to " + newTableName;
|
||||||
|
break;
|
||||||
|
case MSSQL_2012:
|
||||||
|
retVal = "sp_rename '" + oldTableName + "', '" + newTableName + "'";
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new IllegalStateException(Msg.code(2513));
|
||||||
|
}
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void generateHashCode(HashCodeBuilder theBuilder) {
|
||||||
|
super.generateHashCode(theBuilder);
|
||||||
|
theBuilder.append(myOldTableName);
|
||||||
|
theBuilder.append(myNewTableName);
|
||||||
|
}
|
||||||
|
}
|
|
@ -31,6 +31,8 @@ import org.flywaydb.core.api.MigrationVersion;
|
||||||
|
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
|
||||||
|
import static java.util.Objects.nonNull;
|
||||||
|
|
||||||
public class BaseMigrationTasks<T extends Enum> {
|
public class BaseMigrationTasks<T extends Enum> {
|
||||||
MigrationVersion lastVersion;
|
MigrationVersion lastVersion;
|
||||||
private Multimap<T, BaseTask> myTasks =
|
private Multimap<T, BaseTask> myTasks =
|
||||||
|
@ -71,7 +73,7 @@ public class BaseMigrationTasks<T extends Enum> {
|
||||||
return theRelease.name();
|
return theRelease.name();
|
||||||
}
|
}
|
||||||
|
|
||||||
public MigrationTaskList getAllTasks(T[] theVersionEnumValues) {
|
public MigrationTaskList getAllTasks(T... theVersionEnumValues) {
|
||||||
MigrationTaskList retval = new MigrationTaskList();
|
MigrationTaskList retval = new MigrationTaskList();
|
||||||
for (T nextVersion : theVersionEnumValues) {
|
for (T nextVersion : theVersionEnumValues) {
|
||||||
Collection<BaseTask> nextValues = myTasks.get(nextVersion);
|
Collection<BaseTask> nextValues = myTasks.get(nextVersion);
|
||||||
|
@ -84,6 +86,11 @@ public class BaseMigrationTasks<T extends Enum> {
|
||||||
return retval;
|
return retval;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean hasTasksForVersion(T theRelease) {
|
||||||
|
Collection<BaseTask> baseTasks = myTasks.get(theRelease);
|
||||||
|
return nonNull(baseTasks) && !baseTasks.isEmpty();
|
||||||
|
}
|
||||||
|
|
||||||
protected BaseTask getTaskWithVersion(String theMigrationVersion) {
|
protected BaseTask getTaskWithVersion(String theMigrationVersion) {
|
||||||
// First normalize the version number
|
// First normalize the version number
|
||||||
String expectedVersion =
|
String expectedVersion =
|
||||||
|
|
|
@ -39,11 +39,14 @@ import ca.uhn.fhir.jpa.migrate.taskdef.DropTableTask;
|
||||||
import ca.uhn.fhir.jpa.migrate.taskdef.ExecuteRawSqlTask;
|
import ca.uhn.fhir.jpa.migrate.taskdef.ExecuteRawSqlTask;
|
||||||
import ca.uhn.fhir.jpa.migrate.taskdef.ExecuteTaskPrecondition;
|
import ca.uhn.fhir.jpa.migrate.taskdef.ExecuteTaskPrecondition;
|
||||||
import ca.uhn.fhir.jpa.migrate.taskdef.InitializeSchemaTask;
|
import ca.uhn.fhir.jpa.migrate.taskdef.InitializeSchemaTask;
|
||||||
|
import ca.uhn.fhir.jpa.migrate.taskdef.MigrateColumBlobTypeToBinaryTypeTask;
|
||||||
|
import ca.uhn.fhir.jpa.migrate.taskdef.MigrateColumnClobTypeToTextTypeTask;
|
||||||
import ca.uhn.fhir.jpa.migrate.taskdef.MigratePostgresTextClobToBinaryClobTask;
|
import ca.uhn.fhir.jpa.migrate.taskdef.MigratePostgresTextClobToBinaryClobTask;
|
||||||
import ca.uhn.fhir.jpa.migrate.taskdef.ModifyColumnTask;
|
import ca.uhn.fhir.jpa.migrate.taskdef.ModifyColumnTask;
|
||||||
import ca.uhn.fhir.jpa.migrate.taskdef.NopTask;
|
import ca.uhn.fhir.jpa.migrate.taskdef.NopTask;
|
||||||
import ca.uhn.fhir.jpa.migrate.taskdef.RenameColumnTask;
|
import ca.uhn.fhir.jpa.migrate.taskdef.RenameColumnTask;
|
||||||
import ca.uhn.fhir.jpa.migrate.taskdef.RenameIndexTask;
|
import ca.uhn.fhir.jpa.migrate.taskdef.RenameIndexTask;
|
||||||
|
import ca.uhn.fhir.jpa.migrate.taskdef.RenameTableTask;
|
||||||
import org.apache.commons.lang3.Validate;
|
import org.apache.commons.lang3.Validate;
|
||||||
import org.intellij.lang.annotations.Language;
|
import org.intellij.lang.annotations.Language;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
|
@ -320,6 +323,11 @@ public class Builder {
|
||||||
addTask(task);
|
addTask(task);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void renameTable(String theVersion, String theNewTableName) {
|
||||||
|
RenameTableTask task = new RenameTableTask(myRelease, theVersion, getTableName(), theNewTableName);
|
||||||
|
addTask(task);
|
||||||
|
}
|
||||||
|
|
||||||
public void migratePostgresTextClobToBinaryClob(String theVersion, String theColumnName) {
|
public void migratePostgresTextClobToBinaryClob(String theVersion, String theColumnName) {
|
||||||
MigratePostgresTextClobToBinaryClobTask task =
|
MigratePostgresTextClobToBinaryClobTask task =
|
||||||
new MigratePostgresTextClobToBinaryClobTask(myRelease, theVersion);
|
new MigratePostgresTextClobToBinaryClobTask(myRelease, theVersion);
|
||||||
|
@ -328,6 +336,20 @@ public class Builder {
|
||||||
addTask(task);
|
addTask(task);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void migrateBlobToBinary(String theVersion, String theFromColumName, String theToColumName) {
|
||||||
|
MigrateColumBlobTypeToBinaryTypeTask task = new MigrateColumBlobTypeToBinaryTypeTask(
|
||||||
|
myRelease, theVersion, getTableName(), theFromColumName, theToColumName);
|
||||||
|
|
||||||
|
addTask(task);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void migrateClobToText(String theVersion, String theFromColumName, String theToColumName) {
|
||||||
|
MigrateColumnClobTypeToTextTypeTask task = new MigrateColumnClobTypeToTextTypeTask(
|
||||||
|
myRelease, theVersion, getTableName(), theFromColumName, theToColumName);
|
||||||
|
|
||||||
|
addTask(task);
|
||||||
|
}
|
||||||
|
|
||||||
public class BuilderAddIndexWithName {
|
public class BuilderAddIndexWithName {
|
||||||
private final String myVersion;
|
private final String myVersion;
|
||||||
private final String myIndexName;
|
private final String myIndexName;
|
||||||
|
|
|
@ -136,6 +136,7 @@ public abstract class BaseTest {
|
||||||
if (getConnectionProperties() != null) {
|
if (getConnectionProperties() != null) {
|
||||||
Set<String> tableNames = JdbcUtils.getTableNames(getConnectionProperties());
|
Set<String> tableNames = JdbcUtils.getTableNames(getConnectionProperties());
|
||||||
if (tableNames.contains(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME)) {
|
if (tableNames.contains(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME)) {
|
||||||
|
ourLog.info("Deleting entries in " + SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME);
|
||||||
executeSql("DELETE from " + SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME + " where \"installed_rank\" > 0");
|
executeSql("DELETE from " + SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME + " where \"installed_rank\" > 0");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -162,6 +163,7 @@ public abstract class BaseTest {
|
||||||
public void after() {
|
public void after() {
|
||||||
if (myConnectionProperties != null) {
|
if (myConnectionProperties != null) {
|
||||||
myConnectionProperties.close();
|
myConnectionProperties.close();
|
||||||
|
ourLog.info("connectionProperties was closed");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,42 @@
|
||||||
|
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||||
|
|
||||||
|
import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.*;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||||
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
|
import org.junit.jupiter.params.provider.Arguments;
|
||||||
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
|
||||||
|
public class MigrateColumBlobTypeToBinaryTypeTaskDbSpecificTest {
|
||||||
|
|
||||||
|
private String createMigrationSqlForDriverType(DriverTypeEnum theDriverTypeEnum) {
|
||||||
|
MigrateColumBlobTypeToBinaryTypeTask task = new MigrateColumBlobTypeToBinaryTypeTask("1", "1", "SOMETABLE", "BLOB_COLUM_NAME", "BIN_COLUM_NAME");
|
||||||
|
task.setDriverType(theDriverTypeEnum);
|
||||||
|
|
||||||
|
return task.buildSqlStatement();
|
||||||
|
}
|
||||||
|
|
||||||
|
static Stream<Arguments> paramArguments(){
|
||||||
|
return Stream.of(
|
||||||
|
Arguments.of("update sometable set bin_colum_name = blob_colum_name where blob_colum_name is not null", MYSQL_5_7),
|
||||||
|
Arguments.of("update sometable set bin_colum_name = blob_colum_name where blob_colum_name is not null", DERBY_EMBEDDED),
|
||||||
|
Arguments.of("update sometable set bin_colum_name = blob_colum_name where blob_colum_name is not null", ORACLE_12C),
|
||||||
|
Arguments.of("update sometable set bin_colum_name = blob_colum_name where blob_colum_name is not null", MARIADB_10_1),
|
||||||
|
Arguments.of("update sometable set bin_colum_name = blob_colum_name where blob_colum_name is not null", COCKROACHDB_21_1),
|
||||||
|
Arguments.of("update sometable set bin_colum_name = blob_colum_name where blob_colum_name is not null", H2_EMBEDDED),
|
||||||
|
Arguments.of("update sometable set bin_colum_name = blob_colum_name where blob_colum_name is not null", MSSQL_2012),
|
||||||
|
|
||||||
|
Arguments.of("update sometable set bin_colum_name = lo_get(blob_colum_name) where blob_colum_name is not null", POSTGRES_9_4)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("paramArguments")
|
||||||
|
public void testBuildSqlStatementForMySql(String theExpectedSqlString, DriverTypeEnum theDriverTypeEnum) {
|
||||||
|
assertEquals(theExpectedSqlString, createMigrationSqlForDriverType(theDriverTypeEnum));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,47 @@
|
||||||
|
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||||
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
|
import org.junit.jupiter.params.provider.Arguments;
|
||||||
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.COCKROACHDB_21_1;
|
||||||
|
import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.DERBY_EMBEDDED;
|
||||||
|
import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.H2_EMBEDDED;
|
||||||
|
import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.MARIADB_10_1;
|
||||||
|
import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.MSSQL_2012;
|
||||||
|
import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.MYSQL_5_7;
|
||||||
|
import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.ORACLE_12C;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
|
||||||
|
public class MigrateColumnClobTypeToTextTypeTaskDbSpecificTest {
|
||||||
|
|
||||||
|
private String createMigrationSqlForDriverType(DriverTypeEnum theDriverTypeEnum) {
|
||||||
|
MigrateColumnClobTypeToTextTypeTask task = new MigrateColumnClobTypeToTextTypeTask("1", "1", "sometable", "clob_colum_name", "text_colum_name");
|
||||||
|
task.setDriverType(theDriverTypeEnum);
|
||||||
|
|
||||||
|
return task.buildSqlStatement();
|
||||||
|
}
|
||||||
|
|
||||||
|
static Stream<Arguments> paramArguments(){
|
||||||
|
return Stream.of(
|
||||||
|
Arguments.of("update sometable set text_colum_name = clob_colum_name where clob_colum_name is not null", MYSQL_5_7),
|
||||||
|
Arguments.of("update sometable set text_colum_name = clob_colum_name where clob_colum_name is not null", DERBY_EMBEDDED),
|
||||||
|
Arguments.of("update sometable set text_colum_name = clob_colum_name where clob_colum_name is not null", ORACLE_12C),
|
||||||
|
Arguments.of("update sometable set text_colum_name = clob_colum_name where clob_colum_name is not null", MARIADB_10_1),
|
||||||
|
Arguments.of("update sometable set text_colum_name = clob_colum_name where clob_colum_name is not null", COCKROACHDB_21_1),
|
||||||
|
Arguments.of("update sometable set text_colum_name = clob_colum_name where clob_colum_name is not null", H2_EMBEDDED),
|
||||||
|
Arguments.of("update sometable set text_colum_name = clob_colum_name where clob_colum_name is not null", MSSQL_2012),
|
||||||
|
|
||||||
|
Arguments.of("update sometable set text_colum_name = convert_from(lo_get(clob_colum_name), 'UTF8') where clob_colum_name is not null", DriverTypeEnum.POSTGRES_9_4)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("paramArguments")
|
||||||
|
public void testBuildSqlStatementForMySql(String theExpectedSqlString, DriverTypeEnum theDriverTypeEnum) {
|
||||||
|
assertEquals(theExpectedSqlString, createMigrationSqlForDriverType(theDriverTypeEnum));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,45 @@
|
||||||
|
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||||
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
|
import org.junit.jupiter.params.provider.Arguments;
|
||||||
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.DERBY_EMBEDDED;
|
||||||
|
import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.H2_EMBEDDED;
|
||||||
|
import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.MARIADB_10_1;
|
||||||
|
import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.MSSQL_2012;
|
||||||
|
import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.MYSQL_5_7;
|
||||||
|
import static ca.uhn.fhir.jpa.migrate.DriverTypeEnum.ORACLE_12C;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
|
||||||
|
public class RenameTableTaskDbSpecificTest {
|
||||||
|
|
||||||
|
private String createRenameTableSql(DriverTypeEnum theDriverTypeEnum) {
|
||||||
|
RenameTableTask task = new RenameTableTask("1", "1", "oldname","newname");
|
||||||
|
task.setDriverType(theDriverTypeEnum);
|
||||||
|
|
||||||
|
return task.buildRenameTableSqlStatement();
|
||||||
|
}
|
||||||
|
|
||||||
|
static Stream<Arguments> paramArguments(){
|
||||||
|
return Stream.of(
|
||||||
|
Arguments.of("rename table oldname to newname", MYSQL_5_7),
|
||||||
|
Arguments.of("rename table oldname to newname", DERBY_EMBEDDED),
|
||||||
|
Arguments.of("alter table oldname rename to newname", ORACLE_12C),
|
||||||
|
Arguments.of("alter table oldname rename to newname", MARIADB_10_1),
|
||||||
|
Arguments.of("alter table oldname rename to newname", DriverTypeEnum.POSTGRES_9_4),
|
||||||
|
Arguments.of("alter table oldname rename to newname", H2_EMBEDDED),
|
||||||
|
Arguments.of("sp_rename 'oldname', 'newname'", MSSQL_2012)
|
||||||
|
|
||||||
|
);
|
||||||
|
}
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("paramArguments")
|
||||||
|
public void testBuildSqlStatementForMySql(String theExpectedSqlString, DriverTypeEnum theDriverTypeEnum) {
|
||||||
|
assertEquals(theExpectedSqlString, createRenameTableSql(theDriverTypeEnum));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,66 @@
|
||||||
|
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||||
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
|
import static org.hamcrest.Matchers.contains;
|
||||||
|
import static org.hamcrest.Matchers.containsString;
|
||||||
|
import static org.hamcrest.Matchers.hasItem;
|
||||||
|
import static org.hamcrest.Matchers.hasSize;
|
||||||
|
import static org.hamcrest.Matchers.not;
|
||||||
|
import static org.junit.jupiter.api.Assertions.fail;
|
||||||
|
|
||||||
|
public class RenameTableTaskTest extends BaseTest {
|
||||||
|
|
||||||
|
@ParameterizedTest(name = "{index}: {0}")
|
||||||
|
@MethodSource("data")
|
||||||
|
public void testRenameTableTask_whenTableExists_willRenameTheTable(Supplier<TestDatabaseDetails> theTestDatabaseDetails) throws SQLException {
|
||||||
|
// given
|
||||||
|
before(theTestDatabaseDetails);
|
||||||
|
final String newTableName = "NEWTABLE";
|
||||||
|
final String oldTableName = "SOMETABLE";
|
||||||
|
|
||||||
|
executeSql("create table " + oldTableName + " (PID bigint not null, TEXTCOL varchar(255))");
|
||||||
|
|
||||||
|
RenameTableTask task = new RenameTableTask("1", "1", oldTableName, newTableName);
|
||||||
|
task.setTableName(oldTableName);
|
||||||
|
getMigrator().addTask(task);
|
||||||
|
|
||||||
|
// when
|
||||||
|
getMigrator().migrate();
|
||||||
|
|
||||||
|
// then
|
||||||
|
Set<String> tableNames = JdbcUtils.getTableNames(getConnectionProperties());
|
||||||
|
assertThat(tableNames, hasItem(newTableName));
|
||||||
|
assertThat(tableNames, not(hasItem(oldTableName)));
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest(name = "{index}: {0}")
|
||||||
|
@MethodSource("data")
|
||||||
|
public void testRenameTableTask_whenTableDoesNotExists_willRaiseException(Supplier<TestDatabaseDetails> theTestDatabaseDetails) throws SQLException {
|
||||||
|
// given
|
||||||
|
before(theTestDatabaseDetails);
|
||||||
|
final String newTableName = "NEWTABLE";
|
||||||
|
final String oldTableName = "SOMETABLE";
|
||||||
|
|
||||||
|
RenameTableTask task = new RenameTableTask("1", "1", oldTableName, newTableName);
|
||||||
|
getMigrator().addTask(task);
|
||||||
|
|
||||||
|
// when
|
||||||
|
try {
|
||||||
|
getMigrator().migrate();
|
||||||
|
fail();
|
||||||
|
} catch (Exception e){
|
||||||
|
// then
|
||||||
|
assertThat(e.getMessage(), containsString("2516"));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -77,7 +77,8 @@ public class BulkExportJobParametersValidator implements IJobParametersValidator
|
||||||
// validate the exportId
|
// validate the exportId
|
||||||
if (!StringUtils.isBlank(theParameters.getExportIdentifier())) {
|
if (!StringUtils.isBlank(theParameters.getExportIdentifier())) {
|
||||||
|
|
||||||
if (myBinaryStorageSvc != null && !myBinaryStorageSvc.isValidBlobId(theParameters.getExportIdentifier())) {
|
if (myBinaryStorageSvc != null
|
||||||
|
&& !myBinaryStorageSvc.isValidBinaryContentId(theParameters.getExportIdentifier())) {
|
||||||
errorMsgs.add("Export ID does not conform to the current blob storage implementation's limitations.");
|
errorMsgs.add("Export ID does not conform to the current blob storage implementation's limitations.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -67,7 +67,7 @@ public class BulkExportJobParametersValidatorTest {
|
||||||
// when
|
// when
|
||||||
when(myDaoRegistry.isResourceTypeSupported(anyString()))
|
when(myDaoRegistry.isResourceTypeSupported(anyString()))
|
||||||
.thenReturn(true);
|
.thenReturn(true);
|
||||||
when(myIBinaryStorageSvc.isValidBlobId(any())).thenReturn(false);
|
when(myIBinaryStorageSvc.isValidBinaryContentId(any())).thenReturn(false);
|
||||||
List<String> errors = myValidator.validate(null, parameters);
|
List<String> errors = myValidator.validate(null, parameters);
|
||||||
|
|
||||||
// verify
|
// verify
|
||||||
|
@ -84,7 +84,7 @@ public class BulkExportJobParametersValidatorTest {
|
||||||
when(myDaoRegistry.isResourceTypeSupported(anyString()))
|
when(myDaoRegistry.isResourceTypeSupported(anyString()))
|
||||||
.thenReturn(true);
|
.thenReturn(true);
|
||||||
|
|
||||||
when(myIBinaryStorageSvc.isValidBlobId(any())).thenReturn(true);
|
when(myIBinaryStorageSvc.isValidBinaryContentId(any())).thenReturn(true);
|
||||||
List<String> errors = myValidator.validate(null, parameters);
|
List<String> errors = myValidator.validate(null, parameters);
|
||||||
|
|
||||||
// verify
|
// verify
|
||||||
|
|
|
@ -43,7 +43,7 @@ public interface IBinaryStorageSvc {
|
||||||
* @param theNewBlobId the blob ID to validate
|
* @param theNewBlobId the blob ID to validate
|
||||||
* @return true if the blob ID is valid, false otherwise.
|
* @return true if the blob ID is valid, false otherwise.
|
||||||
*/
|
*/
|
||||||
default boolean isValidBlobId(String theNewBlobId) {
|
default boolean isValidBinaryContentId(String theNewBlobId) {
|
||||||
return true; // default method here as we don't want to break existing implementations
|
return true; // default method here as we don't want to break existing implementations
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -77,12 +77,12 @@ public interface IBinaryStorageSvc {
|
||||||
* @param theContentType What is the content type
|
* @param theContentType What is the content type
|
||||||
* @return <code>true</code> if the storage service should store the item
|
* @return <code>true</code> if the storage service should store the item
|
||||||
*/
|
*/
|
||||||
boolean shouldStoreBlob(long theSize, IIdType theResourceId, String theContentType);
|
boolean shouldStoreBinaryContent(long theSize, IIdType theResourceId, String theContentType);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generate a new blob ID that will be passed to {@link #storeBlob(IIdType, String, String, InputStream)} later
|
* Generate a new binaryContent ID that will be passed to {@link #storeBinaryContent(IIdType, String, String, InputStream)} later
|
||||||
*/
|
*/
|
||||||
String newBlobId();
|
String newBinaryContentId();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Store a new binary blob
|
* Store a new binary blob
|
||||||
|
@ -92,16 +92,17 @@ public interface IBinaryStorageSvc {
|
||||||
* @param theContentType The content type to associate with this blob
|
* @param theContentType The content type to associate with this blob
|
||||||
* @param theInputStream An InputStream to read from. This method should close the stream when it has been fully consumed.
|
* @param theInputStream An InputStream to read from. This method should close the stream when it has been fully consumed.
|
||||||
* @return Returns details about the stored data
|
* @return Returns details about the stored data
|
||||||
* @deprecated Use {@link #storeBlob(IIdType theResourceId, String theBlobIdOrNull, String theContentType,
|
* @deprecated Use {@link #storeBinaryContent(IIdType theResourceId, String theBlobIdOrNull, String theContentType,
|
||||||
* InputStream theInputStream, RequestDetails theRequestDetails)} instead. This method
|
* InputStream theInputStream, RequestDetails theRequestDetails)} instead. This method
|
||||||
* will be removed because it doesn't receive the 'theRequestDetails' parameter it needs to forward to the pointcut)
|
* will be removed because it doesn't receive the 'theRequestDetails' parameter it needs to forward to the pointcut)
|
||||||
*/
|
*/
|
||||||
@Deprecated(since = "6.6.0", forRemoval = true)
|
@Deprecated(since = "6.6.0", forRemoval = true)
|
||||||
@Nonnull
|
@Nonnull
|
||||||
default StoredDetails storeBlob(
|
default StoredDetails storeBinaryContent(
|
||||||
IIdType theResourceId, String theBlobIdOrNull, String theContentType, InputStream theInputStream)
|
IIdType theResourceId, String theBlobIdOrNull, String theContentType, InputStream theInputStream)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
return storeBlob(theResourceId, theBlobIdOrNull, theContentType, theInputStream, new ServletRequestDetails());
|
return storeBinaryContent(
|
||||||
|
theResourceId, theBlobIdOrNull, theContentType, theInputStream, new ServletRequestDetails());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -115,7 +116,7 @@ public interface IBinaryStorageSvc {
|
||||||
* @return Returns details about the stored data
|
* @return Returns details about the stored data
|
||||||
*/
|
*/
|
||||||
@Nonnull
|
@Nonnull
|
||||||
StoredDetails storeBlob(
|
StoredDetails storeBinaryContent(
|
||||||
IIdType theResourceId,
|
IIdType theResourceId,
|
||||||
String theBlobIdOrNull,
|
String theBlobIdOrNull,
|
||||||
String theContentType,
|
String theContentType,
|
||||||
|
@ -123,14 +124,15 @@ public interface IBinaryStorageSvc {
|
||||||
RequestDetails theRequestDetails)
|
RequestDetails theRequestDetails)
|
||||||
throws IOException;
|
throws IOException;
|
||||||
|
|
||||||
StoredDetails fetchBlobDetails(IIdType theResourceId, String theBlobId) throws IOException;
|
StoredDetails fetchBinaryContentDetails(IIdType theResourceId, String theBlobId) throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return Returns <code>true</code> if the blob was found and written, of <code>false</code> if the blob was not found (i.e. it was expunged or the ID was invalid)
|
* @return Returns <code>true</code> if the blob was found and written, of <code>false</code> if the blob was not found (i.e. it was expunged or the ID was invalid)
|
||||||
*/
|
*/
|
||||||
boolean writeBlob(IIdType theResourceId, String theBlobId, OutputStream theOutputStream) throws IOException;
|
boolean writeBinaryContent(IIdType theResourceId, String theBlobId, OutputStream theOutputStream)
|
||||||
|
throws IOException;
|
||||||
|
|
||||||
void expungeBlob(IIdType theResourceId, String theBlobId);
|
void expungeBinaryContent(IIdType theResourceId, String theBlobId);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetch the contents of the given blob
|
* Fetch the contents of the given blob
|
||||||
|
@ -139,7 +141,7 @@ public interface IBinaryStorageSvc {
|
||||||
* @param theBlobId The blob ID
|
* @param theBlobId The blob ID
|
||||||
* @return The payload as a byte array
|
* @return The payload as a byte array
|
||||||
*/
|
*/
|
||||||
byte[] fetchBlob(IIdType theResourceId, String theBlobId) throws IOException;
|
byte[] fetchBinaryContent(IIdType theResourceId, String theBlobId) throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetch the byte[] contents of a given Binary resource's `data` element. If the data is a standard base64encoded string that is embedded, return it.
|
* Fetch the byte[] contents of a given Binary resource's `data` element. If the data is a standard base64encoded string that is embedded, return it.
|
||||||
|
@ -148,5 +150,5 @@ public interface IBinaryStorageSvc {
|
||||||
* @param theResource The Binary resource you want to extract data bytes from
|
* @param theResource The Binary resource you want to extract data bytes from
|
||||||
* @return The binary data blob as a byte array
|
* @return The binary data blob as a byte array
|
||||||
*/
|
*/
|
||||||
byte[] fetchDataBlobFromBinary(IBaseBinary theResource) throws IOException;
|
byte[] fetchDataByteArrayFromBinary(IBaseBinary theResource) throws IOException;
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,8 +33,8 @@ import java.util.Date;
|
||||||
|
|
||||||
public class StoredDetails implements IModelJson {
|
public class StoredDetails implements IModelJson {
|
||||||
|
|
||||||
@JsonProperty("blobId")
|
@JsonProperty("binaryContentId")
|
||||||
private String myBlobId;
|
private String myBinaryContentId;
|
||||||
|
|
||||||
@JsonProperty("bytes")
|
@JsonProperty("bytes")
|
||||||
private long myBytes;
|
private long myBytes;
|
||||||
|
@ -62,12 +62,12 @@ public class StoredDetails implements IModelJson {
|
||||||
* Constructor
|
* Constructor
|
||||||
*/
|
*/
|
||||||
public StoredDetails(
|
public StoredDetails(
|
||||||
@Nonnull String theBlobId,
|
@Nonnull String theBinaryContentId,
|
||||||
long theBytes,
|
long theBytes,
|
||||||
@Nonnull String theContentType,
|
@Nonnull String theContentType,
|
||||||
HashingInputStream theIs,
|
HashingInputStream theIs,
|
||||||
Date thePublished) {
|
Date thePublished) {
|
||||||
myBlobId = theBlobId;
|
myBinaryContentId = theBinaryContentId;
|
||||||
myBytes = theBytes;
|
myBytes = theBytes;
|
||||||
myContentType = theContentType;
|
myContentType = theContentType;
|
||||||
myHash = theIs.hash().toString();
|
myHash = theIs.hash().toString();
|
||||||
|
@ -77,7 +77,7 @@ public class StoredDetails implements IModelJson {
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return new ToStringBuilder(this)
|
return new ToStringBuilder(this)
|
||||||
.append("blobId", myBlobId)
|
.append("binaryContentId", myBinaryContentId)
|
||||||
.append("bytes", myBytes)
|
.append("bytes", myBytes)
|
||||||
.append("contentType", myContentType)
|
.append("contentType", myContentType)
|
||||||
.append("hash", myHash)
|
.append("hash", myHash)
|
||||||
|
@ -114,12 +114,12 @@ public class StoredDetails implements IModelJson {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
public String getBlobId() {
|
public String getBinaryContentId() {
|
||||||
return myBlobId;
|
return myBinaryContentId;
|
||||||
}
|
}
|
||||||
|
|
||||||
public StoredDetails setBlobId(String theBlobId) {
|
public StoredDetails setBinaryContentId(String theBinaryContentId) {
|
||||||
myBlobId = theBlobId;
|
myBinaryContentId = theBinaryContentId;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -56,7 +56,6 @@ import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
import java.awt.*;
|
|
||||||
import java.io.ByteArrayInputStream;
|
import java.io.ByteArrayInputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
|
@ -129,7 +128,7 @@ public class BinaryStorageInterceptor<T extends IPrimitiveType<byte[]>> {
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
for (String next : attachmentIds) {
|
for (String next : attachmentIds) {
|
||||||
myBinaryStorageSvc.expungeBlob(theResource.getIdElement(), next);
|
myBinaryStorageSvc.expungeBinaryContent(theResource.getIdElement(), next);
|
||||||
theCounter.incrementAndGet();
|
theCounter.incrementAndGet();
|
||||||
|
|
||||||
ourLog.info(
|
ourLog.info(
|
||||||
|
@ -232,38 +231,39 @@ public class BinaryStorageInterceptor<T extends IPrimitiveType<byte[]>> {
|
||||||
long nextPayloadLength = data.length;
|
long nextPayloadLength = data.length;
|
||||||
String nextContentType = nextTarget.getContentType();
|
String nextContentType = nextTarget.getContentType();
|
||||||
boolean shouldStoreBlob =
|
boolean shouldStoreBlob =
|
||||||
myBinaryStorageSvc.shouldStoreBlob(nextPayloadLength, resourceId, nextContentType);
|
myBinaryStorageSvc.shouldStoreBinaryContent(nextPayloadLength, resourceId, nextContentType);
|
||||||
if (shouldStoreBlob) {
|
if (shouldStoreBlob) {
|
||||||
|
|
||||||
String newBlobId;
|
String newBinaryContentId;
|
||||||
if (thePointcut == Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED) {
|
if (thePointcut == Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED) {
|
||||||
ByteArrayInputStream inputStream = new ByteArrayInputStream(data);
|
ByteArrayInputStream inputStream = new ByteArrayInputStream(data);
|
||||||
StoredDetails storedDetails = myBinaryStorageSvc.storeBlob(
|
StoredDetails storedDetails = myBinaryStorageSvc.storeBinaryContent(
|
||||||
resourceId, null, nextContentType, inputStream, theRequestDetails);
|
resourceId, null, nextContentType, inputStream, theRequestDetails);
|
||||||
newBlobId = storedDetails.getBlobId();
|
newBinaryContentId = storedDetails.getBinaryContentId();
|
||||||
} else {
|
} else {
|
||||||
assert thePointcut == Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED : thePointcut.name();
|
assert thePointcut == Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED : thePointcut.name();
|
||||||
newBlobId = myBinaryStorageSvc.newBlobId();
|
newBinaryContentId = myBinaryStorageSvc.newBinaryContentId();
|
||||||
|
|
||||||
String prefix = invokeAssignBlobPrefix(theRequestDetails, theResource);
|
String prefix = invokeAssignBinaryContentPrefix(theRequestDetails, theResource);
|
||||||
if (isNotBlank(prefix)) {
|
if (isNotBlank(prefix)) {
|
||||||
newBlobId = prefix + newBlobId;
|
newBinaryContentId = prefix + newBinaryContentId;
|
||||||
}
|
}
|
||||||
if (myBinaryStorageSvc.isValidBlobId(newBlobId)) {
|
if (myBinaryStorageSvc.isValidBinaryContentId(newBinaryContentId)) {
|
||||||
List<DeferredBinaryTarget> deferredBinaryTargets =
|
List<DeferredBinaryTarget> deferredBinaryTargets =
|
||||||
getOrCreateDeferredBinaryStorageList(theResource);
|
getOrCreateDeferredBinaryStorageList(theResource);
|
||||||
DeferredBinaryTarget newDeferredBinaryTarget =
|
DeferredBinaryTarget newDeferredBinaryTarget =
|
||||||
new DeferredBinaryTarget(newBlobId, nextTarget, data);
|
new DeferredBinaryTarget(newBinaryContentId, nextTarget, data);
|
||||||
deferredBinaryTargets.add(newDeferredBinaryTarget);
|
deferredBinaryTargets.add(newDeferredBinaryTarget);
|
||||||
newDeferredBinaryTarget.setBlobIdPrefixHookApplied(true);
|
newDeferredBinaryTarget.setBlobIdPrefixHookApplied(true);
|
||||||
} else {
|
} else {
|
||||||
throw new InternalErrorException(Msg.code(2341)
|
throw new InternalErrorException(Msg.code(2341)
|
||||||
+ "Invalid blob ID for backing storage service.[blobId=" + newBlobId + ",service="
|
+ "Invalid binaryContent ID for backing storage service.[binaryContentId="
|
||||||
|
+ newBinaryContentId + ",service="
|
||||||
+ myBinaryStorageSvc.getClass().getName() + "]");
|
+ myBinaryStorageSvc.getClass().getName() + "]");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
myBinaryAccessProvider.replaceDataWithExtension(nextTarget, newBlobId);
|
myBinaryAccessProvider.replaceDataWithExtension(nextTarget, newBinaryContentId);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -273,19 +273,32 @@ public class BinaryStorageInterceptor<T extends IPrimitiveType<byte[]>> {
|
||||||
* This invokes the {@link Pointcut#STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX} hook and returns the prefix to use for the blob ID, or null if there are no implementers.
|
* This invokes the {@link Pointcut#STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX} hook and returns the prefix to use for the blob ID, or null if there are no implementers.
|
||||||
* @return A string, which will be used to prefix the blob ID. May be null.
|
* @return A string, which will be used to prefix the blob ID. May be null.
|
||||||
*/
|
*/
|
||||||
private String invokeAssignBlobPrefix(RequestDetails theRequest, IBaseResource theResource) {
|
private String invokeAssignBinaryContentPrefix(RequestDetails theRequest, IBaseResource theResource) {
|
||||||
if (!CompositeInterceptorBroadcaster.hasHooks(
|
// TODO: to be removed when pointcut STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX has exceeded the grace period
|
||||||
Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX, myInterceptorBroadcaster, theRequest)) {
|
boolean hasStorageBinaryAssignBlobIdPrefixHooks = CompositeInterceptorBroadcaster.hasHooks(
|
||||||
|
Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX, myInterceptorBroadcaster, theRequest);
|
||||||
|
|
||||||
|
boolean hasStorageBinaryAssignBinaryContentIdPrefixHooks = CompositeInterceptorBroadcaster.hasHooks(
|
||||||
|
Pointcut.STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX, myInterceptorBroadcaster, theRequest);
|
||||||
|
|
||||||
|
if (!(hasStorageBinaryAssignBlobIdPrefixHooks || hasStorageBinaryAssignBinaryContentIdPrefixHooks)) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
HookParams params =
|
HookParams params =
|
||||||
new HookParams().add(RequestDetails.class, theRequest).add(IBaseResource.class, theResource);
|
new HookParams().add(RequestDetails.class, theRequest).add(IBaseResource.class, theResource);
|
||||||
|
|
||||||
BaseBinaryStorageSvcImpl.setBlobIdPrefixApplied(theRequest);
|
BaseBinaryStorageSvcImpl.setBinaryContentIdPrefixApplied(theRequest);
|
||||||
|
|
||||||
|
Pointcut pointcutToInvoke = Pointcut.STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX;
|
||||||
|
|
||||||
|
// TODO: to be removed when pointcut STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX has exceeded the grace period
|
||||||
|
if (hasStorageBinaryAssignBlobIdPrefixHooks) {
|
||||||
|
pointcutToInvoke = Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX;
|
||||||
|
}
|
||||||
|
|
||||||
return (String) CompositeInterceptorBroadcaster.doCallHooksAndReturnObject(
|
return (String) CompositeInterceptorBroadcaster.doCallHooksAndReturnObject(
|
||||||
myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX, params);
|
myInterceptorBroadcaster, theRequest, pointcutToInvoke, params);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
|
@ -317,7 +330,7 @@ public class BinaryStorageInterceptor<T extends IPrimitiveType<byte[]>> {
|
||||||
InputStream dataStream = next.getDataStream();
|
InputStream dataStream = next.getDataStream();
|
||||||
String contentType = target.getContentType();
|
String contentType = target.getContentType();
|
||||||
RequestDetails requestDetails = initRequestDetails(next);
|
RequestDetails requestDetails = initRequestDetails(next);
|
||||||
myBinaryStorageSvc.storeBlob(resourceId, blobId, contentType, dataStream, requestDetails);
|
myBinaryStorageSvc.storeBinaryContent(resourceId, blobId, contentType, dataStream, requestDetails);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -325,7 +338,7 @@ public class BinaryStorageInterceptor<T extends IPrimitiveType<byte[]>> {
|
||||||
private RequestDetails initRequestDetails(DeferredBinaryTarget theDeferredBinaryTarget) {
|
private RequestDetails initRequestDetails(DeferredBinaryTarget theDeferredBinaryTarget) {
|
||||||
ServletRequestDetails requestDetails = new ServletRequestDetails();
|
ServletRequestDetails requestDetails = new ServletRequestDetails();
|
||||||
if (theDeferredBinaryTarget.isBlobIdPrefixHookApplied()) {
|
if (theDeferredBinaryTarget.isBlobIdPrefixHookApplied()) {
|
||||||
BaseBinaryStorageSvcImpl.setBlobIdPrefixApplied(requestDetails);
|
BaseBinaryStorageSvcImpl.setBinaryContentIdPrefixApplied(requestDetails);
|
||||||
}
|
}
|
||||||
return requestDetails;
|
return requestDetails;
|
||||||
}
|
}
|
||||||
|
@ -374,14 +387,15 @@ public class BinaryStorageInterceptor<T extends IPrimitiveType<byte[]>> {
|
||||||
Optional<String> attachmentId = nextTarget.getAttachmentId();
|
Optional<String> attachmentId = nextTarget.getAttachmentId();
|
||||||
if (attachmentId.isPresent()) {
|
if (attachmentId.isPresent()) {
|
||||||
|
|
||||||
StoredDetails blobDetails = myBinaryStorageSvc.fetchBlobDetails(resourceId, attachmentId.get());
|
StoredDetails blobDetails =
|
||||||
|
myBinaryStorageSvc.fetchBinaryContentDetails(resourceId, attachmentId.get());
|
||||||
if (blobDetails == null) {
|
if (blobDetails == null) {
|
||||||
String msg = myCtx.getLocalizer().getMessage(BinaryAccessProvider.class, "unknownBlobId");
|
String msg = myCtx.getLocalizer().getMessage(BinaryAccessProvider.class, "unknownBlobId");
|
||||||
throw new InvalidRequestException(Msg.code(1330) + msg);
|
throw new InvalidRequestException(Msg.code(1330) + msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
if ((theCumulativeInflatedBytes + blobDetails.getBytes()) < myAutoInflateBinariesMaximumBytes) {
|
if ((theCumulativeInflatedBytes + blobDetails.getBytes()) < myAutoInflateBinariesMaximumBytes) {
|
||||||
byte[] bytes = myBinaryStorageSvc.fetchBlob(resourceId, attachmentId.get());
|
byte[] bytes = myBinaryStorageSvc.fetchBinaryContent(resourceId, attachmentId.get());
|
||||||
nextTarget.setData(bytes);
|
nextTarget.setData(bytes);
|
||||||
theCumulativeInflatedBytes += blobDetails.getBytes();
|
theCumulativeInflatedBytes += blobDetails.getBytes();
|
||||||
}
|
}
|
||||||
|
|
|
@ -118,7 +118,7 @@ public class BinaryAccessProvider {
|
||||||
|
|
||||||
String blobId = attachmentId.get();
|
String blobId = attachmentId.get();
|
||||||
|
|
||||||
StoredDetails blobDetails = myBinaryStorageSvc.fetchBlobDetails(theResourceId, blobId);
|
StoredDetails blobDetails = myBinaryStorageSvc.fetchBinaryContentDetails(theResourceId, blobId);
|
||||||
if (blobDetails == null) {
|
if (blobDetails == null) {
|
||||||
String msg = myCtx.getLocalizer().getMessage(BinaryAccessProvider.class, "unknownBlobId");
|
String msg = myCtx.getLocalizer().getMessage(BinaryAccessProvider.class, "unknownBlobId");
|
||||||
throw new InvalidRequestException(Msg.code(1331) + msg);
|
throw new InvalidRequestException(Msg.code(1331) + msg);
|
||||||
|
@ -138,7 +138,7 @@ public class BinaryAccessProvider {
|
||||||
theServletResponse.addHeader(
|
theServletResponse.addHeader(
|
||||||
Constants.HEADER_LAST_MODIFIED, DateUtils.formatDate(blobDetails.getPublished()));
|
Constants.HEADER_LAST_MODIFIED, DateUtils.formatDate(blobDetails.getPublished()));
|
||||||
|
|
||||||
myBinaryStorageSvc.writeBlob(theResourceId, blobId, theServletResponse.getOutputStream());
|
myBinaryStorageSvc.writeBinaryContent(theResourceId, blobId, theServletResponse.getOutputStream());
|
||||||
theServletResponse.getOutputStream().close();
|
theServletResponse.getOutputStream().close();
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
@ -212,11 +212,11 @@ public class BinaryAccessProvider {
|
||||||
Msg.code(2073)
|
Msg.code(2073)
|
||||||
+ "Input stream is empty! Ensure that you are uploading data, and if so, ensure that no interceptors are in use that may be consuming the input stream");
|
+ "Input stream is empty! Ensure that you are uploading data, and if so, ensure that no interceptors are in use that may be consuming the input stream");
|
||||||
}
|
}
|
||||||
if (myBinaryStorageSvc.shouldStoreBlob(size, theResourceId, requestContentType)) {
|
if (myBinaryStorageSvc.shouldStoreBinaryContent(size, theResourceId, requestContentType)) {
|
||||||
StoredDetails storedDetails = myBinaryStorageSvc.storeBlob(
|
StoredDetails storedDetails = myBinaryStorageSvc.storeBinaryContent(
|
||||||
theResourceId, null, requestContentType, new ByteArrayInputStream(bytes), theRequestDetails);
|
theResourceId, null, requestContentType, new ByteArrayInputStream(bytes), theRequestDetails);
|
||||||
size = storedDetails.getBytes();
|
size = storedDetails.getBytes();
|
||||||
blobId = storedDetails.getBlobId();
|
blobId = storedDetails.getBinaryContentId();
|
||||||
Validate.notBlank(blobId, "BinaryStorageSvc returned a null blob ID"); // should not happen
|
Validate.notBlank(blobId, "BinaryStorageSvc returned a null blob ID"); // should not happen
|
||||||
Validate.isTrue(size == theServletRequest.getContentLength(), "Unexpected stored size"); // Sanity check
|
Validate.isTrue(size == theServletRequest.getContentLength(), "Unexpected stored size"); // Sanity check
|
||||||
}
|
}
|
||||||
|
|
|
@ -57,7 +57,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
|
|
||||||
public abstract class BaseBinaryStorageSvcImpl implements IBinaryStorageSvc {
|
public abstract class BaseBinaryStorageSvcImpl implements IBinaryStorageSvc {
|
||||||
public static long DEFAULT_MAXIMUM_BINARY_SIZE = Long.MAX_VALUE - 1;
|
public static long DEFAULT_MAXIMUM_BINARY_SIZE = Long.MAX_VALUE - 1;
|
||||||
public static String BLOB_ID_PREFIX_APPLIED = "blob-id-prefix-applied";
|
public static String BINARY_CONTENT_ID_PREFIX_APPLIED = "binary-content-id-prefix-applied";
|
||||||
|
|
||||||
private final int ID_LENGTH = 100;
|
private final int ID_LENGTH = 100;
|
||||||
private long myMaximumBinarySize = DEFAULT_MAXIMUM_BINARY_SIZE;
|
private long myMaximumBinarySize = DEFAULT_MAXIMUM_BINARY_SIZE;
|
||||||
|
@ -95,20 +95,20 @@ public abstract class BaseBinaryStorageSvcImpl implements IBinaryStorageSvc {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String newBlobId() {
|
public String newBinaryContentId() {
|
||||||
return RandomTextUtils.newSecureRandomAlphaNumericString(ID_LENGTH);
|
return RandomTextUtils.newSecureRandomAlphaNumericString(ID_LENGTH);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Default implementation is to return true for any Blob ID.
|
* Default implementation is to return true for any binary content ID.
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean isValidBlobId(String theNewBlobId) {
|
public boolean isValidBinaryContentId(String theNewBinaryContentId) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean shouldStoreBlob(long theSize, IIdType theResourceId, String theContentType) {
|
public boolean shouldStoreBinaryContent(long theSize, IIdType theResourceId, String theContentType) {
|
||||||
return theSize >= getMinimumBinarySize();
|
return theSize >= getMinimumBinarySize();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -139,63 +139,83 @@ public abstract class BaseBinaryStorageSvcImpl implements IBinaryStorageSvc {
|
||||||
since =
|
since =
|
||||||
"6.6.0 - Maintained for interface backwards compatibility. Note that invokes interceptor pointcut with empty parameters",
|
"6.6.0 - Maintained for interface backwards compatibility. Note that invokes interceptor pointcut with empty parameters",
|
||||||
forRemoval = true)
|
forRemoval = true)
|
||||||
protected String provideIdForNewBlob(String theBlobIdOrNull) {
|
protected String provideIdForNewBinaryContent(String theBinaryContentIdOrNull) {
|
||||||
return isNotBlank(theBlobIdOrNull) ? theBlobIdOrNull : newBlobId();
|
return isNotBlank(theBinaryContentIdOrNull) ? theBinaryContentIdOrNull : newBinaryContentId();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
protected String provideIdForNewBlob(
|
protected String provideIdForNewBinaryContent(
|
||||||
String theBlobIdOrNull, byte[] theBytes, RequestDetails theRequestDetails, String theContentType) {
|
String theBinaryContentIdOrNull, byte[] theBytes, RequestDetails theRequestDetails, String theContentType) {
|
||||||
String blobId = isNotBlank(theBlobIdOrNull) ? theBlobIdOrNull : newBlobId();
|
String binaryContentId = isNotBlank(theBinaryContentIdOrNull) ? theBinaryContentIdOrNull : newBinaryContentId();
|
||||||
|
|
||||||
// make sure another pointcut didn't already apply a prefix to the blobId
|
// make sure another pointcut didn't already apply a prefix to the binaryContentId
|
||||||
if (isBlobIdPrefixApplied(theRequestDetails)) {
|
if (isBinaryContentIdPrefixApplied(theRequestDetails)) {
|
||||||
return blobId;
|
return binaryContentId;
|
||||||
}
|
}
|
||||||
|
|
||||||
String blobPrefixFromHooksOrNull = callBlobIdPointcut(theBytes, theRequestDetails, theContentType);
|
String binaryContentIdPrefixFromHooksOrNull =
|
||||||
String blobIdPrefixFromHooks = blobPrefixFromHooksOrNull == null ? "" : blobPrefixFromHooksOrNull;
|
callBinaryContentIdPointcut(theBytes, theRequestDetails, theContentType);
|
||||||
return blobIdPrefixFromHooks + blobId;
|
String binaryContentIdPrefixFromHooks = StringUtils.defaultString(binaryContentIdPrefixFromHooksOrNull);
|
||||||
|
return binaryContentIdPrefixFromHooks + binaryContentId;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected boolean isBlobIdPrefixApplied(RequestDetails theRequestDetails) {
|
protected boolean isBinaryContentIdPrefixApplied(RequestDetails theRequestDetails) {
|
||||||
return theRequestDetails.getUserData().get(BLOB_ID_PREFIX_APPLIED) == Boolean.TRUE;
|
return theRequestDetails.getUserData().get(BINARY_CONTENT_ID_PREFIX_APPLIED) == Boolean.TRUE;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void setBlobIdPrefixApplied(RequestDetails theRequestDetails) {
|
public static void setBinaryContentIdPrefixApplied(RequestDetails theRequestDetails) {
|
||||||
theRequestDetails.getUserData().put(BLOB_ID_PREFIX_APPLIED, true);
|
theRequestDetails.getUserData().put(BINARY_CONTENT_ID_PREFIX_APPLIED, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Invokes STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX pointcut if present
|
* This invokes the {@link Pointcut#STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX} hook and returns the prefix to use for the binary content ID, or null if there are no implementers.
|
||||||
* @return null if pointcut is not present
|
* @return A string, which will be used to prefix the binary content ID. May be null.
|
||||||
*/
|
*/
|
||||||
@Nullable
|
@Nullable
|
||||||
private String callBlobIdPointcut(byte[] theBytes, RequestDetails theRequestDetails, String theContentType) {
|
private String callBinaryContentIdPointcut(
|
||||||
// Interceptor call: STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX
|
byte[] theBytes, RequestDetails theRequestDetails, String theContentType) {
|
||||||
|
// TODO: to be removed when pointcut STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX has exceeded the grace period.
|
||||||
|
// Deprecated in 7.2.0.
|
||||||
|
boolean hasStorageBinaryAssignBlobIdPrefixHooks = CompositeInterceptorBroadcaster.hasHooks(
|
||||||
|
Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX, myInterceptorBroadcaster, theRequestDetails);
|
||||||
|
|
||||||
|
boolean hasStorageBinaryAssignBinaryContentIdPrefixHooks = CompositeInterceptorBroadcaster.hasHooks(
|
||||||
|
Pointcut.STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX, myInterceptorBroadcaster, theRequestDetails);
|
||||||
|
|
||||||
|
if (!(hasStorageBinaryAssignBlobIdPrefixHooks || hasStorageBinaryAssignBinaryContentIdPrefixHooks)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
IBaseBinary binary =
|
IBaseBinary binary =
|
||||||
BinaryUtil.newBinary(myFhirContext).setContent(theBytes).setContentType(theContentType);
|
BinaryUtil.newBinary(myFhirContext).setContent(theBytes).setContentType(theContentType);
|
||||||
|
|
||||||
HookParams hookParams =
|
HookParams hookParams =
|
||||||
new HookParams().add(RequestDetails.class, theRequestDetails).add(IBaseResource.class, binary);
|
new HookParams().add(RequestDetails.class, theRequestDetails).add(IBaseResource.class, binary);
|
||||||
|
|
||||||
setBlobIdPrefixApplied(theRequestDetails);
|
setBinaryContentIdPrefixApplied(theRequestDetails);
|
||||||
|
|
||||||
|
Pointcut pointcutToInvoke = Pointcut.STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX;
|
||||||
|
|
||||||
|
// TODO: to be removed when pointcut STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX has exceeded the grace period
|
||||||
|
if (hasStorageBinaryAssignBlobIdPrefixHooks) {
|
||||||
|
pointcutToInvoke = Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX;
|
||||||
|
}
|
||||||
|
|
||||||
return (String) CompositeInterceptorBroadcaster.doCallHooksAndReturnObject(
|
return (String) CompositeInterceptorBroadcaster.doCallHooksAndReturnObject(
|
||||||
myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX, hookParams);
|
myInterceptorBroadcaster, theRequestDetails, pointcutToInvoke, hookParams);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte[] fetchDataBlobFromBinary(IBaseBinary theBaseBinary) throws IOException {
|
public byte[] fetchDataByteArrayFromBinary(IBaseBinary theBaseBinary) throws IOException {
|
||||||
IPrimitiveType<byte[]> dataElement = BinaryUtil.getOrCreateData(myFhirContext, theBaseBinary);
|
IPrimitiveType<byte[]> dataElement = BinaryUtil.getOrCreateData(myFhirContext, theBaseBinary);
|
||||||
byte[] value = dataElement.getValue();
|
byte[] value = dataElement.getValue();
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
Optional<String> attachmentId = getAttachmentId((IBaseHasExtensions) dataElement);
|
Optional<String> attachmentId = getAttachmentId((IBaseHasExtensions) dataElement);
|
||||||
if (attachmentId.isPresent()) {
|
if (attachmentId.isPresent()) {
|
||||||
value = fetchBlob(theBaseBinary.getIdElement(), attachmentId.get());
|
value = fetchBinaryContent(theBaseBinary.getIdElement(), attachmentId.get());
|
||||||
} else {
|
} else {
|
||||||
throw new InternalErrorException(
|
throw new InternalErrorException(
|
||||||
Msg.code(1344) + "Unable to load binary blob data for " + theBaseBinary.getIdElement());
|
Msg.code(1344) + "Unable to load binary content data for " + theBaseBinary.getIdElement());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return value;
|
return value;
|
||||||
|
|
|
@ -38,7 +38,7 @@ public class NullBinaryStorageSvcImpl implements IBinaryStorageSvc {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isValidBlobId(String theNewBlobId) {
|
public boolean isValidBinaryContentId(String theNewBlobId) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -58,18 +58,18 @@ public class NullBinaryStorageSvcImpl implements IBinaryStorageSvc {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean shouldStoreBlob(long theSize, IIdType theResourceId, String theContentType) {
|
public boolean shouldStoreBinaryContent(long theSize, IIdType theResourceId, String theContentType) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String newBlobId() {
|
public String newBinaryContentId() {
|
||||||
throw new UnsupportedOperationException(Msg.code(1345));
|
throw new UnsupportedOperationException(Msg.code(1345));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
@Override
|
@Override
|
||||||
public StoredDetails storeBlob(
|
public StoredDetails storeBinaryContent(
|
||||||
IIdType theResourceId,
|
IIdType theResourceId,
|
||||||
String theBlobIdOrNull,
|
String theBlobIdOrNull,
|
||||||
String theContentType,
|
String theContentType,
|
||||||
|
@ -79,27 +79,27 @@ public class NullBinaryStorageSvcImpl implements IBinaryStorageSvc {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public StoredDetails fetchBlobDetails(IIdType theResourceId, String theBlobId) {
|
public StoredDetails fetchBinaryContentDetails(IIdType theResourceId, String theBlobId) {
|
||||||
throw new UnsupportedOperationException(Msg.code(1347));
|
throw new UnsupportedOperationException(Msg.code(1347));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean writeBlob(IIdType theResourceId, String theBlobId, OutputStream theOutputStream) {
|
public boolean writeBinaryContent(IIdType theResourceId, String theBlobId, OutputStream theOutputStream) {
|
||||||
throw new UnsupportedOperationException(Msg.code(1348));
|
throw new UnsupportedOperationException(Msg.code(1348));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void expungeBlob(IIdType theIdElement, String theBlobId) {
|
public void expungeBinaryContent(IIdType theIdElement, String theBlobId) {
|
||||||
throw new UnsupportedOperationException(Msg.code(1349));
|
throw new UnsupportedOperationException(Msg.code(1349));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte[] fetchBlob(IIdType theResourceId, String theBlobId) {
|
public byte[] fetchBinaryContent(IIdType theResourceId, String theBlobId) {
|
||||||
throw new UnsupportedOperationException(Msg.code(1350));
|
throw new UnsupportedOperationException(Msg.code(1350));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte[] fetchDataBlobFromBinary(IBaseBinary theResource) {
|
public byte[] fetchDataByteArrayFromBinary(IBaseBinary theResource) {
|
||||||
throw new UnsupportedOperationException(Msg.code(1351));
|
throw new UnsupportedOperationException(Msg.code(1351));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -81,13 +81,13 @@ public class FilesystemBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
|
||||||
* This implementation prevents: \ / | .
|
* This implementation prevents: \ / | .
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean isValidBlobId(String theNewBlobId) {
|
public boolean isValidBinaryContentId(String theNewBinaryContentId) {
|
||||||
return !StringUtils.containsAny(theNewBlobId, '\\', '/', '|', '.');
|
return !StringUtils.containsAny(theNewBinaryContentId, '\\', '/', '|', '.');
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
@Override
|
@Override
|
||||||
public StoredDetails storeBlob(
|
public StoredDetails storeBinaryContent(
|
||||||
IIdType theResourceId,
|
IIdType theResourceId,
|
||||||
String theBlobIdOrNull,
|
String theBlobIdOrNull,
|
||||||
String theContentType,
|
String theContentType,
|
||||||
|
@ -95,7 +95,7 @@ public class FilesystemBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
|
||||||
RequestDetails theRequestDetails)
|
RequestDetails theRequestDetails)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
||||||
String id = super.provideIdForNewBlob(theBlobIdOrNull, null, theRequestDetails, theContentType);
|
String id = super.provideIdForNewBinaryContent(theBlobIdOrNull, null, theRequestDetails, theContentType);
|
||||||
File storagePath = getStoragePath(id, true);
|
File storagePath = getStoragePath(id, true);
|
||||||
|
|
||||||
// Write binary file
|
// Write binary file
|
||||||
|
@ -126,7 +126,7 @@ public class FilesystemBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public StoredDetails fetchBlobDetails(IIdType theResourceId, String theBlobId) throws IOException {
|
public StoredDetails fetchBinaryContentDetails(IIdType theResourceId, String theBlobId) throws IOException {
|
||||||
StoredDetails retVal = null;
|
StoredDetails retVal = null;
|
||||||
|
|
||||||
File storagePath = getStoragePath(theBlobId, false);
|
File storagePath = getStoragePath(theBlobId, false);
|
||||||
|
@ -145,7 +145,8 @@ public class FilesystemBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean writeBlob(IIdType theResourceId, String theBlobId, OutputStream theOutputStream) throws IOException {
|
public boolean writeBinaryContent(IIdType theResourceId, String theBlobId, OutputStream theOutputStream)
|
||||||
|
throws IOException {
|
||||||
InputStream inputStream = getInputStream(theResourceId, theBlobId);
|
InputStream inputStream = getInputStream(theResourceId, theBlobId);
|
||||||
|
|
||||||
if (inputStream != null) {
|
if (inputStream != null) {
|
||||||
|
@ -172,7 +173,7 @@ public class FilesystemBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void expungeBlob(IIdType theResourceId, String theBlobId) {
|
public void expungeBinaryContent(IIdType theResourceId, String theBlobId) {
|
||||||
File storagePath = getStoragePath(theBlobId, false);
|
File storagePath = getStoragePath(theBlobId, false);
|
||||||
if (storagePath != null) {
|
if (storagePath != null) {
|
||||||
File storageFile = getStorageFilename(storagePath, theResourceId, theBlobId);
|
File storageFile = getStorageFilename(storagePath, theResourceId, theBlobId);
|
||||||
|
@ -187,8 +188,8 @@ public class FilesystemBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte[] fetchBlob(IIdType theResourceId, String theBlobId) throws IOException {
|
public byte[] fetchBinaryContent(IIdType theResourceId, String theBlobId) throws IOException {
|
||||||
StoredDetails details = fetchBlobDetails(theResourceId, theBlobId);
|
StoredDetails details = fetchBinaryContentDetails(theResourceId, theBlobId);
|
||||||
try (InputStream inputStream = getInputStream(theResourceId, theBlobId)) {
|
try (InputStream inputStream = getInputStream(theResourceId, theBlobId)) {
|
||||||
|
|
||||||
if (inputStream != null) {
|
if (inputStream != null) {
|
||||||
|
|
|
@ -54,7 +54,7 @@ public class MemoryBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl impleme
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
@Override
|
@Override
|
||||||
public StoredDetails storeBlob(
|
public StoredDetails storeBinaryContent(
|
||||||
IIdType theResourceId,
|
IIdType theResourceId,
|
||||||
String theBlobIdOrNull,
|
String theBlobIdOrNull,
|
||||||
String theContentType,
|
String theContentType,
|
||||||
|
@ -66,7 +66,7 @@ public class MemoryBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl impleme
|
||||||
CountingInputStream countingIs = createCountingInputStream(hashingIs);
|
CountingInputStream countingIs = createCountingInputStream(hashingIs);
|
||||||
|
|
||||||
byte[] bytes = IOUtils.toByteArray(countingIs);
|
byte[] bytes = IOUtils.toByteArray(countingIs);
|
||||||
String id = super.provideIdForNewBlob(theBlobIdOrNull, bytes, theRequestDetails, theContentType);
|
String id = super.provideIdForNewBinaryContent(theBlobIdOrNull, bytes, theRequestDetails, theContentType);
|
||||||
String key = toKey(theResourceId, id);
|
String key = toKey(theResourceId, id);
|
||||||
theInputStream.close();
|
theInputStream.close();
|
||||||
myDataMap.put(key, bytes);
|
myDataMap.put(key, bytes);
|
||||||
|
@ -77,13 +77,14 @@ public class MemoryBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl impleme
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public StoredDetails fetchBlobDetails(IIdType theResourceId, String theBlobId) {
|
public StoredDetails fetchBinaryContentDetails(IIdType theResourceId, String theBlobId) {
|
||||||
String key = toKey(theResourceId, theBlobId);
|
String key = toKey(theResourceId, theBlobId);
|
||||||
return myDetailsMap.get(key);
|
return myDetailsMap.get(key);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean writeBlob(IIdType theResourceId, String theBlobId, OutputStream theOutputStream) throws IOException {
|
public boolean writeBinaryContent(IIdType theResourceId, String theBlobId, OutputStream theOutputStream)
|
||||||
|
throws IOException {
|
||||||
String key = toKey(theResourceId, theBlobId);
|
String key = toKey(theResourceId, theBlobId);
|
||||||
byte[] bytes = myDataMap.get(key);
|
byte[] bytes = myDataMap.get(key);
|
||||||
if (bytes == null) {
|
if (bytes == null) {
|
||||||
|
@ -94,14 +95,14 @@ public class MemoryBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl impleme
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void expungeBlob(IIdType theResourceId, String theBlobId) {
|
public void expungeBinaryContent(IIdType theResourceId, String theBlobId) {
|
||||||
String key = toKey(theResourceId, theBlobId);
|
String key = toKey(theResourceId, theBlobId);
|
||||||
myDataMap.remove(key);
|
myDataMap.remove(key);
|
||||||
myDetailsMap.remove(key);
|
myDetailsMap.remove(key);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte[] fetchBlob(IIdType theResourceId, String theBlobId) {
|
public byte[] fetchBinaryContent(IIdType theResourceId, String theBlobId) {
|
||||||
String key = toKey(theResourceId, theBlobId);
|
String key = toKey(theResourceId, theBlobId);
|
||||||
return myDataMap.get(key);
|
return myDataMap.get(key);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,22 @@
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR Test Utilities
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
package ca.uhn.test.util;
|
package ca.uhn.test.util;
|
||||||
|
|
||||||
import ch.qos.logback.classic.spi.ILoggingEvent;
|
import ch.qos.logback.classic.spi.ILoggingEvent;
|
||||||
|
|
Loading…
Reference in New Issue