updating field to make it nullable (#5982)
* updating field to make it nullable * review fixes and test fixes * review fixes --------- Co-authored-by: leif stawnyczy <leifstawnyczy@leifs-mbp.home>
This commit is contained in:
parent
cdf5b602c1
commit
5302a7da52
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5981
|
||||
title: "Batch2WorkChunkEntity poll_attempt count is a nullable field, but was incorrectly
|
||||
declared as a primitive.
|
||||
This has been fixed.
|
||||
"
|
|
@ -78,7 +78,7 @@ public interface IBatch2WorkChunkRepository
|
|||
|
||||
@Modifying
|
||||
@Query(
|
||||
"UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myNextPollTime = :nextPollTime, e.myPollAttempts = e.myPollAttempts + 1 WHERE e.myId = :id AND e.myStatus IN(:states)")
|
||||
"UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myNextPollTime = :nextPollTime, e.myPollAttempts = COALESCE(e.myPollAttempts, 0) + 1 WHERE e.myId = :id AND e.myStatus IN(:states)")
|
||||
int updateWorkChunkNextPollTime(
|
||||
@Param("id") String theChunkId,
|
||||
@Param("status") WorkChunkStatusEnum theStatus,
|
||||
|
|
|
@ -140,7 +140,7 @@ public class Batch2WorkChunkEntity implements Serializable {
|
|||
* The number of times the work chunk has had its state set back to POLL_WAITING.
|
||||
*/
|
||||
@Column(name = "POLL_ATTEMPTS", nullable = true)
|
||||
private int myPollAttempts;
|
||||
private Integer myPollAttempts;
|
||||
|
||||
/**
|
||||
* Default constructor for Hibernate.
|
||||
|
@ -352,7 +352,10 @@ public class Batch2WorkChunkEntity implements Serializable {
|
|||
myNextPollTime = theNextPollTime;
|
||||
}
|
||||
|
||||
public int getPollAttempts() {
|
||||
public Integer getPollAttempts() {
|
||||
if (myPollAttempts == null) {
|
||||
return 0;
|
||||
}
|
||||
return myPollAttempts;
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,76 @@
|
|||
package ca.uhn.fhir.jpa.entity;
|
||||
|
||||
import com.google.common.reflect.ClassPath;
|
||||
import jakarta.persistence.Column;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Field;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
public class EntityTest {
|
||||
|
||||
private static final String PACKAGE_BASE = "ca.uhn.fhir.jpa.entity";
|
||||
|
||||
private static final Map<String, List<String>> CLASS_TO_FIELD_NAME_EXCEPTIONS = new HashMap<>();
|
||||
|
||||
static {
|
||||
CLASS_TO_FIELD_NAME_EXCEPTIONS.put("TermValueSetConceptViewOracle",
|
||||
List.of("myConceptOrder"));
|
||||
|
||||
CLASS_TO_FIELD_NAME_EXCEPTIONS.put("ResourceSearchView",
|
||||
List.of("myHasTags"));
|
||||
|
||||
CLASS_TO_FIELD_NAME_EXCEPTIONS.put("HapiFhirEnversRevision",
|
||||
List.of("myRev"));
|
||||
|
||||
CLASS_TO_FIELD_NAME_EXCEPTIONS.put("Batch2JobInstanceEntity",
|
||||
List.of("myProgress", "myErrorCount"));
|
||||
|
||||
CLASS_TO_FIELD_NAME_EXCEPTIONS.put("TermValueSetConceptView",
|
||||
List.of("myConceptOrder"));
|
||||
|
||||
CLASS_TO_FIELD_NAME_EXCEPTIONS.put("TermConcept",
|
||||
List.of("myCodeSystemVersionPid"));
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
@Test
|
||||
public void nullableFieldsAreNotPrimitivesTest() throws IOException {
|
||||
Set<Class> classes = ClassPath.from(ClassLoader.getSystemClassLoader())
|
||||
.getAllClasses()
|
||||
.stream()
|
||||
.filter(clazz -> clazz.getPackageName().startsWith(PACKAGE_BASE))
|
||||
.map(ClassPath.ClassInfo::load)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
for (Class clazz : classes) {
|
||||
String className = clazz.getName().replace(PACKAGE_BASE + ".", "");
|
||||
for (Field field : clazz.getDeclaredFields()) {
|
||||
Column column = field.getAnnotation(Column.class);
|
||||
if (column == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (isPrimitiveFieldNullable(className, field, column)) {
|
||||
fail(String.format("Column %s on Entity %s is nullable, but has been defined as primitive.",
|
||||
field.getName(), className)
|
||||
+ " If this is a new field on an existing Table, this can result in deserialization issues when reading old data."
|
||||
+ " Either change to a nullable type (Object) or ensure existing data is migrated and add as an exception.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isPrimitiveFieldNullable(String className, Field field, Column column) {
|
||||
return column.nullable()
|
||||
&& field.getType().isPrimitive()
|
||||
&& !(CLASS_TO_FIELD_NAME_EXCEPTIONS.containsKey(className) && CLASS_TO_FIELD_NAME_EXCEPTIONS.get(className).contains(field.getName()));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,57 @@
|
|||
package ca.uhn.fhir.jpa.batch2;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobPersistence;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||
import ca.uhn.fhir.batch2.model.WorkChunkStatusEnum;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
|
||||
import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
public class JpaJobPersistenceImplIT extends BaseJpaR4Test {
|
||||
|
||||
@Autowired
|
||||
private IBatch2WorkChunkRepository myWorkChunkRepository;
|
||||
@Autowired
|
||||
private IJobPersistence myJobPersistence;
|
||||
|
||||
@Test
|
||||
public void onWorkChunkPollDelay_withValidData_updatesDeadlineAndPollAttemptCount() {
|
||||
// setup
|
||||
Date nextPollTime = Date.from(Instant.now().plus(1, ChronoUnit.HOURS));
|
||||
JobInstance jobInstance = new JobInstance();
|
||||
jobInstance.setJobDefinitionId("job-def-id");
|
||||
jobInstance.setStatus(StatusEnum.IN_PROGRESS);
|
||||
jobInstance.setJobDefinitionVersion(1);
|
||||
String instanceId = myJobPersistence.storeNewInstance(jobInstance);
|
||||
|
||||
Batch2WorkChunkEntity workChunk = new Batch2WorkChunkEntity();
|
||||
workChunk.setId("id");
|
||||
workChunk.setInstanceId(instanceId);
|
||||
workChunk.setJobDefinitionId(jobInstance.getJobDefinitionId());
|
||||
workChunk.setJobDefinitionVersion(jobInstance.getJobDefinitionVersion());
|
||||
workChunk.setStatus(WorkChunkStatusEnum.IN_PROGRESS);
|
||||
workChunk.setCreateTime(new Date());
|
||||
workChunk.setTargetStepId("step-id");
|
||||
myWorkChunkRepository.save(workChunk);
|
||||
|
||||
// test
|
||||
myJobPersistence.onWorkChunkPollDelay(workChunk.getId(), nextPollTime);
|
||||
|
||||
// verify
|
||||
List<Batch2WorkChunkEntity> allChunks = myWorkChunkRepository.findAll();
|
||||
assertEquals(1, allChunks.size());
|
||||
Batch2WorkChunkEntity chunk = allChunks.get(0);
|
||||
assertEquals(nextPollTime, chunk.getNextPollTime());
|
||||
assertEquals(1, chunk.getPollAttempts());
|
||||
}
|
||||
}
|
|
@ -76,7 +76,7 @@ public class WorkChunk extends WorkChunkMetadata {
|
|||
* Total polling attempts done thus far.
|
||||
*/
|
||||
@JsonProperty("pollAttempts")
|
||||
private int myPollAttempts;
|
||||
private Integer myPollAttempts;
|
||||
|
||||
@JsonProperty(value = "recordsProcessed", access = JsonProperty.Access.READ_ONLY)
|
||||
private Integer myRecordsProcessed;
|
||||
|
@ -214,7 +214,7 @@ public class WorkChunk extends WorkChunkMetadata {
|
|||
myNextPollTime = theNextPollTime;
|
||||
}
|
||||
|
||||
public int getPollAttempts() {
|
||||
public Integer getPollAttempts() {
|
||||
return myPollAttempts;
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue