failing Test

This commit is contained in:
Tadgh 2024-07-19 12:37:35 -07:00
parent 6f683e0990
commit 0f58641030
3 changed files with 187 additions and 5 deletions

View File

@ -7,6 +7,7 @@ import ca.uhn.fhir.jpa.binary.api.StoredDetails;
import ca.uhn.fhir.rest.server.exceptions.PayloadTooLargeException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.system.HapiSystemProperties;
import org.apache.commons.io.FileUtils;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.IdType;
@ -46,6 +47,29 @@ public class FilesystemBinaryStorageSvcImplTest {
FileUtils.deleteDirectory(myPath);
}
@Test
public void testStoreAndRetrievePostMigration() throws IOException {
HapiSystemProperties.enableUnitTestMode();
IIdType id = new IdType("Patient/123");
String contentType = "image/png";
StoredDetails outcome = mySvc.storeBinaryContent(id, null, contentType, new ByteArrayInputStream(SOME_BYTES), new ServletRequestDetails());
ourLog.info("Got id: {}", outcome);
StoredDetails details = mySvc.fetchBinaryContentDetails(id, outcome.getBinaryContentId());
assertEquals(16L, details.getBytes());
assertEquals(outcome.getBinaryContentId(), details.getBinaryContentId());
assertEquals("image/png", details.getContentType());
assertEquals("dc7197cfab936698bef7818975c185a9b88b71a0a0a2493deea487706ddf20cb", details.getHash());
assertNotNull(details.getPublished());
ByteArrayOutputStream capture = new ByteArrayOutputStream();
mySvc.writeBinaryContent(id, outcome.getBinaryContentId(), capture);
assertArrayEquals(SOME_BYTES, capture.toByteArray());
assertArrayEquals(SOME_BYTES, mySvc.fetchBinaryContent(id, outcome.getBinaryContentId()));
}
@Test
public void testStoreAndRetrieve() throws IOException {
IIdType id = new IdType("Patient/123");

View File

@ -0,0 +1,145 @@
/*-
* #%L
* HAPI FHIR Storage api
* %%
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.binary.api;
import ca.uhn.fhir.model.api.IModelJson;
import ca.uhn.fhir.rest.server.util.JsonDateDeserializer;
import ca.uhn.fhir.rest.server.util.JsonDateSerializer;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.google.common.hash.HashFunction;
import com.google.common.hash.Hashing;
import com.google.common.hash.HashingInputStream;
import jakarta.annotation.Nonnull;
import org.apache.commons.lang3.builder.ToStringBuilder;
import java.io.ByteArrayInputStream;
import java.util.Date;
public class OldStoredDetails implements IModelJson {
@JsonProperty("blobId")
private String myBinaryContentId;
@JsonProperty("bytes")
private long myBytes;
@JsonProperty("contentType")
private String myContentType;
@JsonProperty("hash")
private String myHash;
@JsonProperty("published")
@JsonSerialize(using = JsonDateSerializer.class)
@JsonDeserialize(using = JsonDateDeserializer.class)
private Date myPublished;
/**
* Constructor
*/
@SuppressWarnings("unused")
public OldStoredDetails() {
super();
}
/**
* Constructor
*/
public OldStoredDetails(
@Nonnull String theBinaryContentId,
long theBytes,
@Nonnull String theContentType,
HashingInputStream theIs,
Date thePublished) {
myBinaryContentId = theBinaryContentId;
myBytes = theBytes;
myContentType = theContentType;
myHash = theIs.hash().toString();
myPublished = thePublished;
}
@Override
public String toString() {
return new ToStringBuilder(this)
.append("binaryContentId", myBinaryContentId)
.append("bytes", myBytes)
.append("contentType", myContentType)
.append("hash", myHash)
.append("published", myPublished)
.toString();
}
public String getHash() {
return myHash;
}
public OldStoredDetails setHash(String theHash) {
myHash = theHash;
return this;
}
public Date getPublished() {
return myPublished;
}
public OldStoredDetails setPublished(Date thePublished) {
myPublished = thePublished;
return this;
}
@Nonnull
public String getContentType() {
return myContentType;
}
public OldStoredDetails setContentType(String theContentType) {
myContentType = theContentType;
return this;
}
@Nonnull
public String getBinaryContentId() {
return myBinaryContentId;
}
public OldStoredDetails setBinaryContentId(String theBinaryContentId) {
myBinaryContentId = theBinaryContentId;
return this;
}
public long getBytes() {
return myBytes;
}
public OldStoredDetails setBytes(long theBytes) {
myBytes = theBytes;
return this;
}
public StoredDetails toDetails() {
HashFunction hash = Hashing.sha256();
StoredDetails storedDetails = new StoredDetails(myBinaryContentId, myBytes, myContentType, new HashingInputStream(hash,new ByteArrayInputStream("whatever".getBytes())), myPublished);
storedDetails.setHash(myHash);
return storedDetails;
}
}

View File

@ -21,10 +21,12 @@ package ca.uhn.fhir.jpa.binstore;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.binary.api.OldStoredDetails;
import ca.uhn.fhir.jpa.binary.api.StoredDetails;
import ca.uhn.fhir.jpa.binary.svc.BaseBinaryStorageSvcImpl;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.system.HapiSystemProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
@ -109,11 +111,22 @@ public class FilesystemBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
// Write descriptor file
long count = countingInputStream.getByteCount();
StoredDetails details = new StoredDetails(id, count, theContentType, hashingInputStream, new Date());
File descriptorFilename = getDescriptorFilename(storagePath, theResourceId, id);
ourLog.info("Writing to file: {}", descriptorFilename.getAbsolutePath());
try (FileWriter writer = new FileWriter(descriptorFilename)) {
myJsonSerializer.writeValue(writer, details);
StoredDetails details = null;
if (HapiSystemProperties.isUnitTestModeEnabled()) {
OldStoredDetails oldDetails = new OldStoredDetails(id, count, theContentType, hashingInputStream, new Date());
File descriptorFilename = getDescriptorFilename(storagePath, theResourceId, id);
ourLog.info("Writing to file: {}", descriptorFilename.getAbsolutePath());
try (FileWriter writer = new FileWriter(descriptorFilename)) {
myJsonSerializer.writeValue(writer, oldDetails);
}
details = oldDetails.toDetails();
} else {
details = new StoredDetails(id, count, theContentType, hashingInputStream, new Date());
File descriptorFilename = getDescriptorFilename(storagePath, theResourceId, id);
ourLog.info("Writing to file: {}", descriptorFilename.getAbsolutePath());
try (FileWriter writer = new FileWriter(descriptorFilename)) {
myJsonSerializer.writeValue(writer, details);
}
}
ourLog.info(