Fix two test failures

This commit is contained in:
James Agnew 2018-11-22 08:41:12 -05:00
parent 0625e38727
commit d0b194f9d7
7 changed files with 45 additions and 31 deletions

View File

@ -25,21 +25,20 @@ import ca.uhn.fhir.jpa.dao.data.ISearchDao;
import ca.uhn.fhir.jpa.dao.data.ISearchIncludeDao;
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.dstu3.model.InstantType;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Slice;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.transaction.support.TransactionTemplate;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import java.util.Date;
import java.util.List;
/**
* Deletes old searches
@ -57,7 +56,8 @@ public class StaleSearchDeletingSvcImpl implements IStaleSearchDeletingSvc {
* DELETE FROM foo WHERE params IN (aaaa)
* type query and this can fail if we have 1000s of params
*/
public static int ourMaximumResultsToDelete = 500;
public static int ourMaximumResultsToDeleteInOneStatement = 500;
public static int ourMaximumResultsToDeleteInOnePass = 20000;
private static Long ourNowForUnitTests;
/*
* We give a bit of extra leeway just to avoid race conditions where a query result
@ -75,8 +75,6 @@ public class StaleSearchDeletingSvcImpl implements IStaleSearchDeletingSvc {
private ISearchResultDao mySearchResultDao;
@Autowired
private PlatformTransactionManager myTransactionManager;
@PersistenceContext()
private EntityManager myEntityManager;
private void deleteSearch(final Long theSearchPid) {
mySearchDao.findById(theSearchPid).ifPresent(searchToDelete -> {
@ -90,10 +88,14 @@ public class StaleSearchDeletingSvcImpl implements IStaleSearchDeletingSvc {
* huge deal to be only partially deleting search results. They'll get deleted
* eventually
*/
int max = ourMaximumResultsToDelete;
int max = ourMaximumResultsToDeleteInOnePass;
Slice<Long> resultPids = mySearchResultDao.findForSearch(PageRequest.of(0, max), searchToDelete.getId());
if (resultPids.hasContent()) {
mySearchResultDao.deleteByIds(resultPids.getContent());
List<List<Long>> partitions = Lists.partition(resultPids.getContent(), ourMaximumResultsToDeleteInOneStatement);
for (List<Long> nextPartition : partitions) {
mySearchResultDao.deleteByIds(nextPartition);
}
}
// Only delete if we don't have results left in this search
@ -166,7 +168,7 @@ public class StaleSearchDeletingSvcImpl implements IStaleSearchDeletingSvc {
@VisibleForTesting
public static void setMaximumResultsToDeleteForUnitTest(int theMaximumResultsToDelete) {
ourMaximumResultsToDelete = theMaximumResultsToDelete;
ourMaximumResultsToDeleteInOneStatement = theMaximumResultsToDelete;
}
private static long now() {

View File

@ -36,7 +36,6 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
import ca.uhn.fhir.util.StopWatch;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Stopwatch;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
import org.apache.commons.lang3.time.DateUtils;
@ -72,9 +71,9 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
static final Date BEGINNING_OF_TIME = new Date(0);
private static final Date BEGINNING_OF_TIME = new Date(0);
private static final Logger ourLog = LoggerFactory.getLogger(ResourceReindexingSvcImpl.class);
public static final int PASS_SIZE = 25000;
private static final int PASS_SIZE = 25000;
private final ReentrantLock myIndexingLock = new ReentrantLock();
@Autowired
private IResourceReindexJobDao myReindexJobDao;
@ -301,7 +300,6 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
.collect(Collectors.toList());
Date latestDate = null;
boolean haveMultipleDates = false;
for (Future<Date> next : futures) {
Date nextDate;
try {
@ -317,17 +315,13 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
}
if (nextDate != null) {
if (latestDate != null) {
if (latestDate.getTime() != nextDate.getTime()) {
haveMultipleDates = true;
}
}
if (latestDate == null || latestDate.getTime() < nextDate.getTime()) {
latestDate = new Date(nextDate.getTime());
}
}
}
Validate.notNull(latestDate);
Date newLow;
if (latestDate.getTime() == low.getTime()) {
if (count == PASS_SIZE) {

View File

@ -863,8 +863,8 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
}
if (relCount > 0) {
ourLog.info("Saved {} deferred relationships ({} remain) in {}ms ({}ms / code)",
relCount, myConceptLinksToSaveLater.size(), stopwatch.getMillis(), stopwatch.getMillisPerOperation(codeCount));
ourLog.info("Saved {} deferred relationships ({} remain) in {}ms ({}ms / entry)",
relCount, myConceptLinksToSaveLater.size(), stopwatch.getMillis(), stopwatch.getMillisPerOperation(relCount));
}
if ((myDeferredConcepts.size() + myConceptLinksToSaveLater.size()) == 0) {

View File

@ -78,6 +78,14 @@ public class TestUtil {
for (Field nextField : theClazz.getDeclaredFields()) {
ourLog.info(" * Scanning field: {}", nextField.getName());
scan(nextField, theNames, theIsSuperClass);
Lob lobClass = nextField.getAnnotation(Lob.class);
if (lobClass != null) {
if (nextField.getType().equals(byte[].class) == false) {
//Validate.isTrue(false);
}
}
}
if (theClazz.getSuperclass().equals(Object.class)) {
@ -87,8 +95,8 @@ public class TestUtil {
scanClass(theNames, theClazz.getSuperclass(), true);
}
private static void scan(AnnotatedElement ae, Set<String> theNames, boolean theIsSuperClass) {
Table table = ae.getAnnotation(Table.class);
private static void scan(AnnotatedElement theAnnotatedElement, Set<String> theNames, boolean theIsSuperClass) {
Table table = theAnnotatedElement.getAnnotation(Table.class);
if (table != null) {
assertNotADuplicateName(table.name(), theNames);
for (UniqueConstraint nextConstraint : table.uniqueConstraints()) {
@ -101,28 +109,28 @@ public class TestUtil {
}
}
JoinColumn joinColumn = ae.getAnnotation(JoinColumn.class);
JoinColumn joinColumn = theAnnotatedElement.getAnnotation(JoinColumn.class);
if (joinColumn != null) {
assertNotADuplicateName(joinColumn.name(), null);
ForeignKey fk = joinColumn.foreignKey();
if (theIsSuperClass) {
Validate.isTrue(isBlank(fk.name()), "Foreign key on " + ae.toString() + " has a name() and should not as it is a superclass");
Validate.isTrue(isBlank(fk.name()), "Foreign key on " + theAnnotatedElement.toString() + " has a name() and should not as it is a superclass");
} else {
Validate.notNull(fk);
Validate.isTrue(isNotBlank(fk.name()), "Foreign key on " + ae.toString() + " has no name()");
Validate.isTrue(isNotBlank(fk.name()), "Foreign key on " + theAnnotatedElement.toString() + " has no name()");
Validate.isTrue(fk.name().startsWith("FK_"));
assertNotADuplicateName(fk.name(), theNames);
}
}
Column column = ae.getAnnotation(Column.class);
Column column = theAnnotatedElement.getAnnotation(Column.class);
if (column != null) {
assertNotADuplicateName(column.name(), null);
Validate.isTrue(column.unique() == false, "Should not use unique attribute on column (use named @UniqueConstraint instead) on " + ae.toString());
Validate.isTrue(column.unique() == false, "Should not use unique attribute on column (use named @UniqueConstraint instead) on " + theAnnotatedElement.toString());
}
GeneratedValue gen = ae.getAnnotation(GeneratedValue.class);
SequenceGenerator sg = ae.getAnnotation(SequenceGenerator.class);
GeneratedValue gen = theAnnotatedElement.getAnnotation(GeneratedValue.class);
SequenceGenerator sg = theAnnotatedElement.getAnnotation(SequenceGenerator.class);
Validate.isTrue((gen != null) == (sg != null));
if (gen != null) {
assertNotADuplicateName(gen.generator(), theNames);

View File

@ -145,6 +145,7 @@ public class FhirResourceDaoR4SearchCustomSearchParamTest extends BaseJpaR4Test
mySearchParameterDao.create(fooSp, mySrd);
assertEquals(1, myResourceReindexingSvc.forceReindexingPass());
assertEquals(1, myResourceReindexingSvc.forceReindexingPass());
assertEquals(0, myResourceReindexingSvc.forceReindexingPass());

View File

@ -571,7 +571,11 @@ public class FhirResourceDaoR4UniqueSearchParamTest extends BaseJpaR4Test {
createUniqueIndexCoverageBeneficiary();
myResourceReindexingSvc.markAllResourcesForReindexing("Coverage");
// The first pass as a low of EPOCH
assertEquals(1, myResourceReindexingSvc.forceReindexingPass());
// The second pass has a low of Coverage.lastUpdated
assertEquals(1, myResourceReindexingSvc.forceReindexingPass());
// The third pass has a low of (Coverage.lastUpdated + 1ms)
assertEquals(0, myResourceReindexingSvc.forceReindexingPass());
List<ResourceIndexedCompositeStringUnique> uniques = myResourceIndexedCompositeStringUniqueDao.findAll();

View File

@ -86,6 +86,11 @@
mark reindexing jobs as deleted before they had actually completed, leading to
some resources not actually being reindexed.
</action>
<action type="change">
The JPA stale search deletion service now deletes cached search results in much
larger batches (20000 instead of 500) in order to reduce the amount of noise
in the logs.
</action>
</release>
<release version="3.6.0" date="2018-11-12" description="Food">
<action type="add">