Renable elasticsearch tests (#3850)
* Renable elasticsearch tests Limit elasticsearch container to 512MB instead of the default 4GB.
This commit is contained in:
parent
a5c4b0756b
commit
60a879c842
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.util;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Core Library
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
|
|
|
@ -140,8 +140,10 @@ public class UploadTerminologyCommand extends BaseRequestGeneratingCommand {
|
|||
boolean haveCompressedContents = false;
|
||||
try {
|
||||
for (String nextDataFile : theDatafile) {
|
||||
File dataFile = new File(nextDataFile);
|
||||
ourLog.info("Reading {}", dataFile.getAbsolutePath());
|
||||
|
||||
try (FileInputStream fileInputStream = new FileInputStream(nextDataFile)) {
|
||||
try (FileInputStream fileInputStream = new FileInputStream(dataFile)) {
|
||||
boolean isFhirType = nextDataFile.endsWith(".json") || nextDataFile.endsWith(".xml");
|
||||
if (nextDataFile.endsWith(".csv") || nextDataFile.endsWith(".properties") || isFhirType) {
|
||||
|
||||
|
@ -168,6 +170,7 @@ public class UploadTerminologyCommand extends BaseRequestGeneratingCommand {
|
|||
IOUtils.copy(countingInputStream, zipOutputStream);
|
||||
haveCompressedContents = true;
|
||||
compressedSourceBytesCount += countingInputStream.getCount();
|
||||
++compressedFileCount;
|
||||
|
||||
zipOutputStream.flush();
|
||||
ourLog.info("Finished compressing {}", nextDataFile);
|
||||
|
|
|
@ -20,12 +20,15 @@ import org.junit.jupiter.api.Test;
|
|||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.hasItems;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
|
@ -40,22 +43,23 @@ import static org.mockito.Mockito.when;
|
|||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public class HeaderPassthroughOptionTest {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(HeaderPassthroughOptionTest.class);
|
||||
|
||||
final String FHIR_VERSION = "r4";
|
||||
private FhirContext myCtx = FhirContext.forR4();
|
||||
private Server myServer;
|
||||
private int myPort;
|
||||
private final String headerKey1 = "test-header-key-1";
|
||||
private final String headerValue1 = "test header value-1";
|
||||
private static final String myConceptsFileName = "target/concepts.csv";
|
||||
private static File myConceptsFile = new File(myConceptsFileName);
|
||||
private static final String myHierarchyFileName = "target/hierarchy.csv";
|
||||
private static File myHierarchyFile = new File(myHierarchyFileName);
|
||||
private static final String ourConceptsFileName = "target/concepts.csv";
|
||||
private static final String ourHierarchyFileName = "target/hierarchy.csv";
|
||||
private final CapturingInterceptor myCapturingInterceptor = new CapturingInterceptor();
|
||||
private final UploadTerminologyCommand testedCommand =
|
||||
new RequestCapturingUploadTerminologyCommand(myCapturingInterceptor);
|
||||
|
||||
@Mock
|
||||
protected ITermLoaderSvc myTermLoaderSvc;
|
||||
private static final AtomicInteger ourFilenameCounter = new AtomicInteger();
|
||||
|
||||
@BeforeEach
|
||||
public void beforeEach() throws Exception {
|
||||
|
@ -69,21 +73,22 @@ public class HeaderPassthroughOptionTest {
|
|||
myServer.setHandler(proxyHandler);
|
||||
JettyUtil.startServer(myServer);
|
||||
myPort = JettyUtil.getPortForStartedServer(myServer);
|
||||
writeConceptAndHierarchyFiles();
|
||||
when(myTermLoaderSvc.loadCustom(eq("http://foo"), anyList(), any()))
|
||||
.thenReturn(new UploadStatistics(100, new IdType("CodeSystem/101")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void oneHeader() throws Exception {
|
||||
writeConceptAndHierarchyFiles();
|
||||
String[] args = new String[] {
|
||||
int filenameCounter = ourFilenameCounter.incrementAndGet();
|
||||
writeConceptAndHierarchyFiles(filenameCounter);
|
||||
|
||||
String[] args = new String[]{
|
||||
"-v", FHIR_VERSION,
|
||||
"-m", "SNAPSHOT",
|
||||
"-t", "http://localhost:" + myPort,
|
||||
"-u", "http://foo",
|
||||
"-d", myConceptsFileName,
|
||||
"-d", myHierarchyFileName,
|
||||
"-d", getConceptFilename(filenameCounter),
|
||||
"-d", getHierarchyFilename(filenameCounter),
|
||||
"-hp", "\"" + headerKey1 + ":" + headerValue1 + "\""
|
||||
};
|
||||
|
||||
|
@ -102,16 +107,18 @@ public class HeaderPassthroughOptionTest {
|
|||
|
||||
@Test
|
||||
public void twoHeadersSameKey() throws Exception {
|
||||
writeConceptAndHierarchyFiles();
|
||||
int filenameCounter = ourFilenameCounter.incrementAndGet();
|
||||
writeConceptAndHierarchyFiles(filenameCounter);
|
||||
|
||||
final String headerValue2 = "test header value-2";
|
||||
|
||||
String[] args = new String[] {
|
||||
String[] args = new String[]{
|
||||
"-v", FHIR_VERSION,
|
||||
"-m", "SNAPSHOT",
|
||||
"-t", "http://localhost:" + myPort,
|
||||
"-u", "http://foo",
|
||||
"-d", myConceptsFileName,
|
||||
"-d", myHierarchyFileName,
|
||||
"-d", getConceptFilename(filenameCounter),
|
||||
"-d", getHierarchyFilename(filenameCounter),
|
||||
"-hp", "\"" + headerKey1 + ":" + headerValue1 + "\"",
|
||||
"-hp", "\"" + headerKey1 + ":" + headerValue2 + "\""
|
||||
};
|
||||
|
@ -133,17 +140,19 @@ public class HeaderPassthroughOptionTest {
|
|||
|
||||
@Test
|
||||
public void twoHeadersDifferentKeys() throws Exception {
|
||||
writeConceptAndHierarchyFiles();
|
||||
int filenameCounter = ourFilenameCounter.incrementAndGet();
|
||||
writeConceptAndHierarchyFiles(filenameCounter);
|
||||
|
||||
final String headerKey2 = "test-header-key-2";
|
||||
final String headerValue2 = "test header value-2";
|
||||
|
||||
String[] args = new String[] {
|
||||
String[] args = new String[]{
|
||||
"-v", FHIR_VERSION,
|
||||
"-m", "SNAPSHOT",
|
||||
"-t", "http://localhost:" + myPort,
|
||||
"-u", "http://foo",
|
||||
"-d", myConceptsFileName,
|
||||
"-d", myHierarchyFileName,
|
||||
"-d", getConceptFilename(filenameCounter),
|
||||
"-d", getHierarchyFilename(filenameCounter),
|
||||
"-hp", "\"" + headerKey1 + ":" + headerValue1 + "\"",
|
||||
"-hp", "\"" + headerKey2 + ":" + headerValue2 + "\""
|
||||
};
|
||||
|
@ -164,23 +173,34 @@ public class HeaderPassthroughOptionTest {
|
|||
assertThat(allHeaders.get(headerKey2), hasItems(headerValue2));
|
||||
}
|
||||
|
||||
private synchronized void writeConceptAndHierarchyFiles() throws IOException {
|
||||
if (!myConceptsFile.exists()) {
|
||||
try (FileWriter w = new FileWriter(myConceptsFile, false)) {
|
||||
w.append("CODE,DISPLAY\n");
|
||||
w.append("ANIMALS,Animals\n");
|
||||
w.append("CATS,Cats\n");
|
||||
w.append("DOGS,Dogs\n");
|
||||
}
|
||||
}
|
||||
private static void writeConceptAndHierarchyFiles(int theFilenameCounter) throws IOException {
|
||||
File conceptsFile = new File(getConceptFilename(theFilenameCounter));
|
||||
File hierarchyFile = new File(getHierarchyFilename(theFilenameCounter));
|
||||
|
||||
if (!myHierarchyFile.exists()) {
|
||||
try (FileWriter w = new FileWriter(myHierarchyFile, false)) {
|
||||
w.append("PARENT,CHILD\n");
|
||||
w.append("ANIMALS,CATS\n");
|
||||
w.append("ANIMALS,DOGS\n");
|
||||
}
|
||||
ourLog.info("Writing {}", conceptsFile.getAbsolutePath());
|
||||
try (FileWriter w = new FileWriter(conceptsFile, false)) {
|
||||
w.append("CODE,DISPLAY\n");
|
||||
w.append("ANIMALS,Animals\n");
|
||||
w.append("CATS,Cats\n");
|
||||
w.append("DOGS,Dogs\n");
|
||||
}
|
||||
ourLog.info("Can read {}: {}", ourConceptsFileName, conceptsFile.canRead());
|
||||
|
||||
ourLog.info("Writing {}", hierarchyFile.getAbsolutePath());
|
||||
try (FileWriter w = new FileWriter(hierarchyFile, false)) {
|
||||
w.append("PARENT,CHILD\n");
|
||||
w.append("ANIMALS,CATS\n");
|
||||
w.append("ANIMALS,DOGS\n");
|
||||
}
|
||||
ourLog.info("Can read {}: {}", ourHierarchyFileName, hierarchyFile.canRead());
|
||||
}
|
||||
|
||||
private static String getConceptFilename(int theFilenameCounter) {
|
||||
return ourConceptsFileName.replace(".csv", theFilenameCounter + ".csv");
|
||||
}
|
||||
|
||||
private static String getHierarchyFilename(int theFilenameCounter) {
|
||||
return ourHierarchyFileName.replace(".csv", theFilenameCounter + ".csv");
|
||||
}
|
||||
|
||||
private class RequestCapturingUploadTerminologyCommand extends UploadTerminologyCommand {
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
type: fix
|
||||
issue: 3878
|
||||
title: "Previously, if an intermediate step of a gated batch job produced no output data, an exception would be thrown
|
||||
and processing of the step would abort, leaving the batch job in an unrecoverable state. This has been fixed."
|
|
@ -27,10 +27,10 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
|||
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
|
||||
import ca.uhn.fhir.jpa.batch.log.Logs;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.ActivateJobResult;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.ActivateJobResult;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkImportJobDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkImportJobFileDao;
|
||||
import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
|
||||
|
@ -98,12 +98,12 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc {
|
|||
ValidateUtil.isNotNullOrThrowUnprocessableEntity(theJobDescription.getProcessingMode(), "Job File Processing mode must not be null");
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(theJobDescription.getBatchSize() > 0, "Job File Batch Size must be > 0");
|
||||
|
||||
String jobId = UUID.randomUUID().toString();
|
||||
String biJobId = UUID.randomUUID().toString();
|
||||
|
||||
ourLog.info("Creating new Bulk Import job with {} files, assigning job ID: {}", theInitialFiles.size(), jobId);
|
||||
ourLog.info("Creating new Bulk Import job with {} files, assigning bijob ID: {}", theInitialFiles.size(), biJobId);
|
||||
|
||||
BulkImportJobEntity job = new BulkImportJobEntity();
|
||||
job.setJobId(jobId);
|
||||
job.setJobId(biJobId);
|
||||
job.setFileCount(theInitialFiles.size());
|
||||
job.setStatus(BulkImportJobStatusEnum.STAGING);
|
||||
job.setJobDescription(theJobDescription.getJobDescription());
|
||||
|
@ -114,17 +114,17 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc {
|
|||
int nextSequence = 0;
|
||||
addFilesToJob(theInitialFiles, job, nextSequence);
|
||||
|
||||
return jobId;
|
||||
return biJobId;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public void addFilesToJob(String theJobId, List<BulkImportJobFileJson> theFiles) {
|
||||
ourLog.info("Adding {} files to bulk import job: {}", theFiles.size(), theJobId);
|
||||
public void addFilesToJob(String theBiJobId, List<BulkImportJobFileJson> theFiles) {
|
||||
ourLog.info("Adding {} files to bulk import job with bijob id {}", theFiles.size(), theBiJobId);
|
||||
|
||||
BulkImportJobEntity job = findJobByJobId(theJobId);
|
||||
BulkImportJobEntity job = findJobByBiJobId(theBiJobId);
|
||||
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(job.getStatus() == BulkImportJobStatusEnum.STAGING, "Job %s has status %s and can not be added to", theJobId, job.getStatus());
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(job.getStatus() == BulkImportJobStatusEnum.STAGING, "bijob id %s has status %s and can not be added to", theBiJobId, job.getStatus());
|
||||
|
||||
addFilesToJob(theFiles, job, job.getFileCount());
|
||||
|
||||
|
@ -132,20 +132,20 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc {
|
|||
myJobDao.save(job);
|
||||
}
|
||||
|
||||
private BulkImportJobEntity findJobByJobId(String theJobId) {
|
||||
private BulkImportJobEntity findJobByBiJobId(String theBiJobId) {
|
||||
BulkImportJobEntity job = myJobDao
|
||||
.findByJobId(theJobId)
|
||||
.orElseThrow(() -> new InvalidRequestException("Unknown job ID: " + theJobId));
|
||||
.findByJobId(theBiJobId)
|
||||
.orElseThrow(() -> new InvalidRequestException("Unknown bijob id: " + theBiJobId));
|
||||
return job;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public void markJobAsReadyForActivation(String theJobId) {
|
||||
ourLog.info("Activating bulk import job {}", theJobId);
|
||||
public void markJobAsReadyForActivation(String theBiJobId) {
|
||||
ourLog.info("Activating bulk import bijob {}", theBiJobId);
|
||||
|
||||
BulkImportJobEntity job = findJobByJobId(theJobId);
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(job.getStatus() == BulkImportJobStatusEnum.STAGING, "Bulk import job %s can not be activated in status: %s", theJobId, job.getStatus());
|
||||
BulkImportJobEntity job = findJobByBiJobId(theBiJobId);
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(job.getStatus() == BulkImportJobStatusEnum.STAGING, "Bulk import bijob %s can not be activated in status: %s", theBiJobId, job.getStatus());
|
||||
|
||||
job.setStatus(BulkImportJobStatusEnum.READY);
|
||||
myJobDao.save(job);
|
||||
|
@ -168,7 +168,9 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc {
|
|||
}
|
||||
|
||||
try {
|
||||
return doActivateNextReadyJob();
|
||||
ActivateJobResult retval = doActivateNextReadyJob();
|
||||
ourLog.info("Batch job submitted with batch job id {}", retval.jobId);
|
||||
return retval;
|
||||
} finally {
|
||||
myRunningJobSemaphore.release();
|
||||
}
|
||||
|
@ -191,9 +193,9 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc {
|
|||
BulkImportJobEntity bulkImportJobEntity = jobToProcessOpt.get();
|
||||
|
||||
String jobUuid = bulkImportJobEntity.getJobId();
|
||||
String batchJobId = null;
|
||||
String biJobId = null;
|
||||
try {
|
||||
batchJobId = processJob(bulkImportJobEntity);
|
||||
biJobId = processJob(bulkImportJobEntity);
|
||||
} catch (Exception e) {
|
||||
ourLog.error("Failure while preparing bulk export extract", e);
|
||||
myTxTemplate.execute(t -> {
|
||||
|
@ -208,18 +210,18 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc {
|
|||
});
|
||||
}
|
||||
|
||||
return new ActivateJobResult(true, batchJobId);
|
||||
return new ActivateJobResult(true, biJobId);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public void setJobToStatus(String theJobId, BulkImportJobStatusEnum theStatus) {
|
||||
setJobToStatus(theJobId, theStatus, null);
|
||||
public void setJobToStatus(String theBiJobId, BulkImportJobStatusEnum theStatus) {
|
||||
setJobToStatus(theBiJobId, theStatus, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setJobToStatus(String theJobId, BulkImportJobStatusEnum theStatus, String theStatusMessage) {
|
||||
BulkImportJobEntity job = findJobByJobId(theJobId);
|
||||
public void setJobToStatus(String theBiJobId, BulkImportJobStatusEnum theStatus, String theStatusMessage) {
|
||||
BulkImportJobEntity job = findJobByBiJobId(theBiJobId);
|
||||
job.setStatus(theStatus);
|
||||
job.setStatusMessage(theStatusMessage);
|
||||
myJobDao.save(job);
|
||||
|
@ -227,14 +229,14 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc {
|
|||
|
||||
@Override
|
||||
@Transactional
|
||||
public BulkImportJobJson fetchJob(String theJobId) {
|
||||
BulkImportJobEntity job = findJobByJobId(theJobId);
|
||||
public BulkImportJobJson fetchJob(String theBiJobId) {
|
||||
BulkImportJobEntity job = findJobByBiJobId(theBiJobId);
|
||||
return job.toJson();
|
||||
}
|
||||
|
||||
@Override
|
||||
public JobInfo getJobStatus(String theJobId) {
|
||||
BulkImportJobEntity theJob = findJobByJobId(theJobId);
|
||||
public JobInfo getJobStatus(String theBiJobId) {
|
||||
BulkImportJobEntity theJob = findJobByBiJobId(theBiJobId);
|
||||
return new JobInfo()
|
||||
.setStatus(theJob.getStatus())
|
||||
.setStatusMessage(theJob.getStatusMessage())
|
||||
|
@ -243,28 +245,28 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc {
|
|||
|
||||
@Transactional
|
||||
@Override
|
||||
public BulkImportJobFileJson fetchFile(String theJobId, int theFileIndex) {
|
||||
BulkImportJobEntity job = findJobByJobId(theJobId);
|
||||
public BulkImportJobFileJson fetchFile(String theBiJobId, int theFileIndex) {
|
||||
BulkImportJobEntity job = findJobByBiJobId(theBiJobId);
|
||||
|
||||
return myJobFileDao
|
||||
.findForJob(job, theFileIndex)
|
||||
.map(t -> t.toJson())
|
||||
.orElseThrow(() -> new IllegalArgumentException("Invalid index " + theFileIndex + " for job " + theJobId));
|
||||
.orElseThrow(() -> new IllegalArgumentException("Invalid index " + theFileIndex + " for bijob " + theBiJobId));
|
||||
}
|
||||
|
||||
@Transactional
|
||||
@Override
|
||||
public String getFileDescription(String theJobId, int theFileIndex) {
|
||||
BulkImportJobEntity job = findJobByJobId(theJobId);
|
||||
public String getFileDescription(String theBiJobId, int theFileIndex) {
|
||||
BulkImportJobEntity job = findJobByBiJobId(theBiJobId);
|
||||
|
||||
return myJobFileDao.findFileDescriptionForJob(job, theFileIndex).orElse("");
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public void deleteJobFiles(String theJobId) {
|
||||
BulkImportJobEntity job = findJobByJobId(theJobId);
|
||||
List<Long> files = myJobFileDao.findAllIdsForJob(theJobId);
|
||||
public void deleteJobFiles(String theBiJobId) {
|
||||
BulkImportJobEntity job = findJobByBiJobId(theBiJobId);
|
||||
List<Long> files = myJobFileDao.findAllIdsForJob(theBiJobId);
|
||||
for (Long next : files) {
|
||||
myJobFileDao.deleteById(next);
|
||||
}
|
||||
|
@ -272,18 +274,18 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc {
|
|||
}
|
||||
|
||||
private String processJob(BulkImportJobEntity theBulkExportJobEntity) {
|
||||
String jobId = theBulkExportJobEntity.getJobId();
|
||||
String biJobId = theBulkExportJobEntity.getJobId();
|
||||
int batchSize = theBulkExportJobEntity.getBatchSize();
|
||||
|
||||
Batch2BulkImportPullJobParameters jobParameters = new Batch2BulkImportPullJobParameters();
|
||||
jobParameters.setJobId(jobId);
|
||||
jobParameters.setJobId(biJobId);
|
||||
jobParameters.setBatchSize(batchSize);
|
||||
|
||||
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
||||
request.setJobDefinitionId(BatchConstants.BULK_IMPORT_JOB_NAME);
|
||||
request.setParameters(jobParameters);
|
||||
|
||||
ourLog.info("Submitting bulk import job {} to job scheduler", jobId);
|
||||
ourLog.info("Submitting bulk import with bijob id {} to job scheduler", biJobId);
|
||||
|
||||
return myJobCoordinator.startInstance(request).getJobId();
|
||||
}
|
||||
|
|
|
@ -166,14 +166,15 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
|
||||
|
||||
private int getMaxFetchSize(SearchParameterMap theParams, Integer theMax) {
|
||||
if (theParams.getCount() != null) {
|
||||
return theParams.getCount();
|
||||
}
|
||||
|
||||
if (theMax != null) {
|
||||
return theMax;
|
||||
}
|
||||
|
||||
// wipmb we should really pass this in.
|
||||
if (theParams.getCount() != null) {
|
||||
return theParams.getCount();
|
||||
}
|
||||
|
||||
return DEFAULT_MAX_NON_PAGED_SIZE;
|
||||
}
|
||||
|
||||
|
|
|
@ -23,6 +23,8 @@ package ca.uhn.fhir.jpa.dao.search;
|
|||
import com.google.gson.Gson;
|
||||
import com.google.gson.JsonArray;
|
||||
import com.google.gson.JsonObject;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.List;
|
||||
|
@ -36,6 +38,7 @@ import static ca.uhn.fhir.jpa.model.search.HSearchIndexWriter.SEARCH_PARAM_ROOT;
|
|||
* Builds lastN aggregation, and parse the results
|
||||
*/
|
||||
public class LastNAggregation {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(LastNAggregation.class);
|
||||
static final String SP_SUBJECT = SEARCH_PARAM_ROOT + ".subject.reference.value";
|
||||
private static final String SP_CODE_TOKEN_CODE_AND_SYSTEM = SEARCH_PARAM_ROOT + ".code.token.code-system";
|
||||
private static final String SP_DATE_DT_UPPER = SEARCH_PARAM_ROOT + ".date.dt.upper";
|
||||
|
@ -167,6 +170,7 @@ public class LastNAggregation {
|
|||
* </pre>
|
||||
*/
|
||||
public List<Long> extractResourceIds(@Nonnull JsonObject theAggregationResult) {
|
||||
ourLog.trace("extractResourceIds - hasSubject {} aggregation {}", myAggregateOnSubject, theAggregationResult);
|
||||
Stream<JsonObject> resultBuckets = Stream.of(theAggregationResult);
|
||||
|
||||
// was it grouped by subject?
|
||||
|
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.jpa.dao.search;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
public class ResourceNotFoundInIndexException extends IllegalStateException {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
|
|
|
@ -37,6 +37,9 @@ import javax.persistence.TemporalType;
|
|||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* @deprecated use new batch2 reindex job
|
||||
*/
|
||||
@Deprecated
|
||||
@Entity
|
||||
@Table(name = "HFJ_RES_REINDEX_JOB")
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.1.0-PRE27-SNAPSHOT</version>
|
||||
<version>6.2.0-PRE1-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -7,9 +7,10 @@ import ca.uhn.fhir.jpa.dao.r4.ElasticsearchPrefixTest;
|
|||
import ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect;
|
||||
import ca.uhn.fhir.jpa.search.HapiHSearchAnalysisConfigurers;
|
||||
import ca.uhn.fhir.jpa.search.elastic.IndexNamePrefixLayoutStrategy;
|
||||
import ca.uhn.fhir.jpa.search.elastic.TestElasticsearchContainerHelper;
|
||||
import ca.uhn.fhir.jpa.search.lastn.ElasticsearchRestClientFactory;
|
||||
import ca.uhn.fhir.jpa.test.config.BlockLargeNumbersOfParamsListener;
|
||||
import ca.uhn.fhir.jpa.test.config.TestElasticsearchContainerHelper;
|
||||
import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig;
|
||||
import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener;
|
||||
import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel;
|
||||
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
|
||||
|
@ -26,9 +27,11 @@ import org.hibernate.search.backend.elasticsearch.index.IndexStatus;
|
|||
import org.hibernate.search.engine.cfg.BackendSettings;
|
||||
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
|
||||
import org.hibernate.search.mapper.orm.schema.management.SchemaManagementStrategyName;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
|
||||
import org.testcontainers.elasticsearch.ElasticsearchContainer;
|
||||
|
||||
|
@ -43,6 +46,7 @@ import java.util.concurrent.TimeUnit;
|
|||
* We need to do this as it is during bean creation that HS bootstrapping occurs.
|
||||
*/
|
||||
@Configuration
|
||||
@Import(TestHSearchAddInConfig.PooledElasticsearchContainerConfig.class)
|
||||
public class ElasticsearchWithPrefixConfig {
|
||||
|
||||
@Bean
|
||||
|
@ -93,6 +97,7 @@ public class ElasticsearchWithPrefixConfig {
|
|||
return dataSource;
|
||||
}
|
||||
|
||||
@Autowired ElasticsearchContainer myElasticsearchContainer;
|
||||
@Bean
|
||||
public Properties jpaProperties() {
|
||||
Properties extraProperties = new Properties();
|
||||
|
@ -102,8 +107,8 @@ public class ElasticsearchWithPrefixConfig {
|
|||
extraProperties.put("hibernate.dialect", HapiFhirH2Dialect.class.getName());
|
||||
//Override default lucene settings
|
||||
// Force elasticsearch to start first
|
||||
int httpPort = elasticContainer().getMappedPort(9200);//9200 is the HTTP port
|
||||
String host = elasticContainer().getHost();
|
||||
int httpPort = myElasticsearchContainer.getMappedPort(9200);//9200 is the HTTP port
|
||||
String host = myElasticsearchContainer.getHost();
|
||||
// the below properties are used for ElasticSearch integration
|
||||
extraProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "elasticsearch");
|
||||
extraProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.ANALYSIS_CONFIGURER),
|
||||
|
@ -137,10 +142,4 @@ public class ElasticsearchWithPrefixConfig {
|
|||
return extraProperties;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ElasticsearchContainer elasticContainer() {
|
||||
ElasticsearchContainer embeddedElasticSearch = TestElasticsearchContainerHelper.getEmbeddedElasticSearch();
|
||||
embeddedElasticSearch.start();
|
||||
return embeddedElasticSearch;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
package ca.uhn.fhir.jpa.config;
|
||||
|
||||
import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig;
|
||||
import ca.uhn.fhir.jpa.test.config.TestR4Config;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
|
||||
/**
|
||||
* Combined config so we consistently share app contexts.
|
||||
*
|
||||
* Spring uses the context classes as the key for reuse between classes,
|
||||
* so let's try to use the same config as much as we can.
|
||||
*/
|
||||
@Configuration
|
||||
@Import({
|
||||
TestR4Config.class,
|
||||
TestHSearchAddInConfig.Elasticsearch.class
|
||||
})
|
||||
public class TestR4ConfigWithElasticHSearch {
|
||||
}
|
|
@ -4,11 +4,10 @@ import ca.uhn.fhir.context.FhirContext;
|
|||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoObservation;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoPatient;
|
||||
import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticHSearch;
|
||||
import ca.uhn.fhir.jpa.search.lastn.ElasticsearchSvcImpl;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaTest;
|
||||
import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig;
|
||||
import ca.uhn.fhir.jpa.test.config.TestR4Config;
|
||||
import ca.uhn.fhir.rest.param.DateAndListParam;
|
||||
import ca.uhn.fhir.rest.param.DateOrListParam;
|
||||
import ca.uhn.fhir.rest.param.DateParam;
|
||||
|
@ -29,9 +28,12 @@ import org.hl7.fhir.r4.model.Patient;
|
|||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.TestInstance;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.boot.test.mock.mockito.SpyBean;
|
||||
|
@ -54,19 +56,21 @@ import static org.mockito.Mockito.when;
|
|||
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@RequiresDocker
|
||||
@ContextConfiguration(classes = {TestR4Config.class, TestHSearchAddInConfig.Elasticsearch.class})
|
||||
@ContextConfiguration(classes=TestR4ConfigWithElasticHSearch.class)
|
||||
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
|
||||
abstract public class BaseR4SearchLastN extends BaseJpaTest {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BaseR4SearchLastN.class);
|
||||
|
||||
private static final Map<String, String> observationPatientMap = new HashMap<>();
|
||||
private static final Map<String, String> observationCategoryMap = new HashMap<>();
|
||||
private static final Map<String, String> observationCodeMap = new HashMap<>();
|
||||
private static final Map<String, Date> observationEffectiveMap = new HashMap<>();
|
||||
// wipmb make thise normal fields. This static setup wasn't working
|
||||
protected static IIdType patient0Id = null;
|
||||
protected static IIdType patient1Id = null;
|
||||
protected static IIdType patient2Id = null;
|
||||
// Using static variables including the flag below so that we can initalize the database and indexes once
|
||||
// (all of the tests only read from the DB and indexes and so no need to re-initialze them for each test).
|
||||
private static boolean dataLoaded = false;
|
||||
private static Calendar observationDate = new GregorianCalendar();
|
||||
protected final String observationCd0 = "code0";
|
||||
protected final String observationCd1 = "code1";
|
||||
|
@ -108,35 +112,35 @@ abstract public class BaseR4SearchLastN extends BaseJpaTest {
|
|||
myDaoConfig.setLastNEnabled(new DaoConfig().isLastNEnabled());
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
@BeforeAll
|
||||
public void beforeCreateTestPatientsAndObservations() throws IOException {
|
||||
ourLog.info("setup Patients and Observations");
|
||||
myDaoConfig.setLastNEnabled(true);
|
||||
// enabled to also create extended lucene index during creation of test data
|
||||
boolean hsearchSaved = myDaoConfig.isAdvancedHSearchIndexing();
|
||||
myDaoConfig.setAdvancedHSearchIndexing(true);
|
||||
|
||||
// Using a static flag to ensure that test data and elasticsearch index is only created once.
|
||||
// Creating this data and the index is time consuming and as such want to avoid having to repeat for each test.
|
||||
// Normally would use a static @BeforeClass method for this purpose, but Autowired objects cannot be accessed in static methods.
|
||||
if (!dataLoaded || patient0Id == null) {
|
||||
// enabled to also create extended lucene index during creation of test data
|
||||
myDaoConfig.setAdvancedHSearchIndexing(true);
|
||||
Patient pt = new Patient();
|
||||
pt.addName().setFamily("Lastn").addGiven("Arthur");
|
||||
patient0Id = myPatientDao.create(pt, mockSrd()).getId().toUnqualifiedVersionless();
|
||||
createObservationsForPatient(patient0Id);
|
||||
pt = new Patient();
|
||||
pt.addName().setFamily("Lastn").addGiven("Johnathan");
|
||||
patient1Id = myPatientDao.create(pt, mockSrd()).getId().toUnqualifiedVersionless();
|
||||
createObservationsForPatient(patient1Id);
|
||||
pt = new Patient();
|
||||
pt.addName().setFamily("Lastn").addGiven("Michael");
|
||||
patient2Id = myPatientDao.create(pt, mockSrd()).getId().toUnqualifiedVersionless();
|
||||
createObservationsForPatient(patient2Id);
|
||||
dataLoaded = true;
|
||||
|
||||
myElasticsearchSvc.refreshIndex(ElasticsearchSvcImpl.OBSERVATION_INDEX);
|
||||
myElasticsearchSvc.refreshIndex(ElasticsearchSvcImpl.OBSERVATION_CODE_INDEX);
|
||||
// turn off the setting enabled earlier
|
||||
myDaoConfig.setAdvancedHSearchIndexing(false);
|
||||
}
|
||||
Patient pt = new Patient();
|
||||
pt.addName().setFamily("Lastn").addGiven("Arthur");
|
||||
patient0Id = myPatientDao.create(pt, mockSrd()).getId().toUnqualifiedVersionless();
|
||||
createObservationsForPatient(patient0Id);
|
||||
pt = new Patient();
|
||||
pt.addName().setFamily("Lastn").addGiven("Johnathan");
|
||||
patient1Id = myPatientDao.create(pt, mockSrd()).getId().toUnqualifiedVersionless();
|
||||
createObservationsForPatient(patient1Id);
|
||||
pt = new Patient();
|
||||
pt.addName().setFamily("Lastn").addGiven("Michael");
|
||||
patient2Id = myPatientDao.create(pt, mockSrd()).getId().toUnqualifiedVersionless();
|
||||
createObservationsForPatient(patient2Id);
|
||||
|
||||
myElasticsearchSvc.refreshIndex(ElasticsearchSvcImpl.OBSERVATION_INDEX);
|
||||
myElasticsearchSvc.refreshIndex(ElasticsearchSvcImpl.OBSERVATION_CODE_INDEX);
|
||||
// turn off the setting enabled earlier
|
||||
myDaoConfig.setAdvancedHSearchIndexing(hsearchSaved);
|
||||
|
||||
}
|
||||
|
||||
|
@ -211,6 +215,7 @@ abstract public class BaseR4SearchLastN extends BaseJpaTest {
|
|||
|
||||
@Test
|
||||
public void testLastNNoPatients() {
|
||||
ourLog.info("testLastNNoPatients lastn {} hsearch {}", myDaoConfig.isLastNEnabled(), myDaoConfig.isAdvancedHSearchIndexing());
|
||||
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
|
||||
|
|
|
@ -11,6 +11,7 @@ import org.elasticsearch.client.RestHighLevelClient;
|
|||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.test.annotation.DirtiesContext;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
import org.testcontainers.elasticsearch.ElasticsearchContainer;
|
||||
|
@ -19,9 +20,12 @@ import java.io.IOException;
|
|||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.springframework.test.annotation.DirtiesContext.ClassMode.AFTER_CLASS;
|
||||
|
||||
@RequiresDocker
|
||||
@ExtendWith(SpringExtension.class)
|
||||
// we don't reuse this context, so discard it and release our elastic container.
|
||||
@DirtiesContext(classMode = AFTER_CLASS)
|
||||
@ContextConfiguration(classes = {ElasticsearchWithPrefixConfig.class})
|
||||
public class ElasticsearchPrefixTest {
|
||||
|
||||
|
|
|
@ -58,6 +58,8 @@ public class FhirResourceDaoR4SearchLastNAsyncIT extends BaseR4SearchLastN {
|
|||
|
||||
SearchBuilder.setMaxPageSize50ForTest(true);
|
||||
|
||||
myDaoConfig.setLastNEnabled(true);
|
||||
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
|
|
|
@ -11,6 +11,7 @@ import org.hl7.fhir.instance.model.api.IIdType;
|
|||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
|
@ -33,6 +34,11 @@ import static org.mockito.Mockito.when;
|
|||
|
||||
@ExtendWith(SpringExtension.class)
|
||||
public class FhirResourceDaoR4SearchLastNIT extends BaseR4SearchLastN {
|
||||
@BeforeEach
|
||||
public void enableAdvancedHSearchIndexing() {
|
||||
myDaoConfig.setLastNEnabled(true);
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void reset() {
|
||||
SearchBuilder.setMaxPageSize50ForTest(false);
|
||||
|
|
|
@ -15,6 +15,7 @@ public class FhirResourceDaoR4SearchLastNUsingExtendedHSearchIndexAsyncIT extend
|
|||
|
||||
@BeforeEach
|
||||
public void enableAdvancedHSearchIndexing() {
|
||||
myDaoConfig.setLastNEnabled(true);
|
||||
myDaoConfig.setAdvancedHSearchIndexing(true);
|
||||
}
|
||||
|
||||
|
|
|
@ -9,6 +9,8 @@ import org.junit.jupiter.api.BeforeEach;
|
|||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Mockito;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
|
||||
|
@ -22,6 +24,7 @@ import java.util.List;
|
|||
*/
|
||||
@ExtendWith(SpringExtension.class)
|
||||
public class FhirResourceDaoR4SearchLastNUsingExtendedHSearchIndexIT extends FhirResourceDaoR4SearchLastNIT {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(FhirResourceDaoR4SearchLastNUsingExtendedHSearchIndexIT.class);
|
||||
|
||||
@Autowired
|
||||
private TestHSearchEventDispatcher myHSearchEventDispatcher;
|
||||
|
@ -32,8 +35,11 @@ public class FhirResourceDaoR4SearchLastNUsingExtendedHSearchIndexIT extends Fhi
|
|||
|
||||
@BeforeEach
|
||||
public void enableAdvancedHSearchIndexing() {
|
||||
myDaoConfig.setLastNEnabled(true);
|
||||
myDaoConfig.setAdvancedHSearchIndexing(true);
|
||||
myHSearchEventDispatcher.register(mySearchEventListener);
|
||||
ourLog.info("enableAdvancedHSearchIndexing finished. lastn {} advancedHSearchIndexing {}", myDaoConfig.isLastNEnabled(), myDaoConfig.isAdvancedHSearchIndexing());
|
||||
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
|
|
|
@ -10,6 +10,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
|||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
|
||||
import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticHSearch;
|
||||
import ca.uhn.fhir.jpa.dao.IHSearchEventListener;
|
||||
import ca.uhn.fhir.jpa.dao.TestDaoSearch;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
|
@ -26,8 +27,6 @@ import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
|
|||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvcR4;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaTest;
|
||||
import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig;
|
||||
import ca.uhn.fhir.jpa.test.config.TestR4Config;
|
||||
import ca.uhn.fhir.jpa.test.util.TestHSearchEventDispatcher;
|
||||
import ca.uhn.fhir.parser.DataFormatException;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
|
@ -91,6 +90,7 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.test.annotation.DirtiesContext;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.ContextHierarchy;
|
||||
import org.springframework.test.context.TestContext;
|
||||
import org.springframework.test.context.TestExecutionListeners;
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
|
@ -136,11 +136,13 @@ import static org.mockito.Mockito.when;
|
|||
@ExtendWith(SpringExtension.class)
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
@RequiresDocker
|
||||
@ContextConfiguration(classes = {
|
||||
TestR4Config.class,
|
||||
TestHSearchAddInConfig.Elasticsearch.class,
|
||||
DaoTestDataBuilder.Config.class,
|
||||
TestDaoSearch.Config.class
|
||||
// wipmb hierarchy for context
|
||||
@ContextHierarchy({
|
||||
@ContextConfiguration(classes = TestR4ConfigWithElasticHSearch.class),
|
||||
@ContextConfiguration(classes = {
|
||||
DaoTestDataBuilder.Config.class,
|
||||
TestDaoSearch.Config.class
|
||||
})
|
||||
})
|
||||
@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS)
|
||||
@TestExecutionListeners(listeners = {
|
||||
|
|
|
@ -7,6 +7,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet;
|
|||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
|
||||
import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticHSearch;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
|
@ -14,8 +15,6 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
|||
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaTest;
|
||||
import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig;
|
||||
import ca.uhn.fhir.jpa.test.config.TestR4Config;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
|
@ -51,7 +50,7 @@ import static org.mockito.Mockito.when;
|
|||
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@RequiresDocker
|
||||
@ContextConfiguration(classes = {TestR4Config.class, TestHSearchAddInConfig.Elasticsearch.class})
|
||||
@ContextConfiguration(classes = TestR4ConfigWithElasticHSearch.class)
|
||||
public class FhirResourceDaoR4TerminologyElasticsearchIT extends BaseJpaTest {
|
||||
|
||||
public static final String URL_MY_CODE_SYSTEM = "http://example.com/my_code_system";
|
||||
|
|
|
@ -5,12 +5,11 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
|||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
|
||||
import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticHSearch;
|
||||
import ca.uhn.fhir.jpa.dao.TestDaoSearch;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaTest;
|
||||
import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig;
|
||||
import ca.uhn.fhir.jpa.test.config.TestR4Config;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
|
||||
import ca.uhn.fhir.rest.param.QuantityParam;
|
||||
|
@ -39,6 +38,7 @@ import org.junit.jupiter.api.extension.ExtendWith;
|
|||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.ContextHierarchy;
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
|
||||
|
@ -58,11 +58,12 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
*/
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@RequiresDocker
|
||||
@ContextConfiguration(classes = {
|
||||
TestR4Config.class,
|
||||
TestHSearchAddInConfig.Elasticsearch.class,
|
||||
DaoTestDataBuilder.Config.class,
|
||||
TestDaoSearch.Config.class
|
||||
@ContextHierarchy({
|
||||
@ContextConfiguration(classes = TestR4ConfigWithElasticHSearch.class),
|
||||
@ContextConfiguration(classes = {
|
||||
DaoTestDataBuilder.Config.class,
|
||||
TestDaoSearch.Config.class
|
||||
})
|
||||
})
|
||||
@Disabled
|
||||
public class HSearchSandboxTest extends BaseJpaTest {
|
||||
|
|
|
@ -3,14 +3,13 @@ package ca.uhn.fhir.jpa.dao.r4;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticHSearch;
|
||||
import ca.uhn.fhir.jpa.dao.ObservationLastNIndexPersistSvc;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.search.lastn.ElasticsearchSvcImpl;
|
||||
import ca.uhn.fhir.jpa.search.lastn.json.CodeJson;
|
||||
import ca.uhn.fhir.jpa.search.lastn.json.ObservationJson;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig;
|
||||
import ca.uhn.fhir.jpa.test.config.TestR4Config;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
import ca.uhn.fhir.rest.param.ReferenceAndListParam;
|
||||
import ca.uhn.fhir.rest.param.ReferenceOrListParam;
|
||||
|
@ -63,7 +62,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
|
|||
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@RequiresDocker
|
||||
@ContextConfiguration(classes = {TestR4Config.class, TestHSearchAddInConfig.Elasticsearch.class})
|
||||
@ContextConfiguration(classes = TestR4ConfigWithElasticHSearch.class)
|
||||
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
|
||||
public class PersistObservationIndexedSearchParamLastNR4IT {
|
||||
|
||||
|
@ -105,50 +104,50 @@ public class PersistObservationIndexedSearchParamLastNR4IT {
|
|||
myDaoConfig.setLastNEnabled(new DaoConfig().isLastNEnabled());
|
||||
}
|
||||
|
||||
@Order(3)
|
||||
@Order(0)
|
||||
@Test
|
||||
public void testIndexObservationSingle() throws IOException {
|
||||
indexSingleObservation();
|
||||
public void testDeleteObservation() throws IOException {
|
||||
indexMultipleObservations();
|
||||
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||
searchParameterMap.setLastNMax(10);
|
||||
List<ObservationJson> persistedObservationEntities = elasticsearchSvc.executeLastNWithAllFieldsForTest(searchParameterMap, myFhirCtx);
|
||||
assertEquals(1, persistedObservationEntities.size());
|
||||
ObservationJson persistedObservationEntity = persistedObservationEntities.get(0);
|
||||
assertEquals(SINGLE_SUBJECT_ID, persistedObservationEntity.getSubject());
|
||||
assertEquals(SINGLE_OBSERVATION_PID, persistedObservationEntity.getIdentifier());
|
||||
assertEquals(SINGLE_EFFECTIVEDTM, persistedObservationEntity.getEffectiveDtm());
|
||||
|
||||
String observationCodeNormalizedId = persistedObservationEntity.getCode_concept_id();
|
||||
|
||||
// List<CodeJson> persistedObservationCodes = elasticsearchSvc.queryAllIndexedObservationCodesForTest();
|
||||
// assertEquals(1, persistedObservationCodes.size());
|
||||
|
||||
// Check that we can retrieve code by hash value.
|
||||
String codeSystemHash = persistedObservationEntity.getCode_coding_code_system_hash();
|
||||
CodeJson persistedObservationCode = elasticsearchSvc.getObservationCodeDocument(codeSystemHash, null);
|
||||
assertNotNull(persistedObservationCode);
|
||||
assertEquals(observationCodeNormalizedId, persistedObservationCode.getCodeableConceptId());
|
||||
assertEquals(SINGLE_OBSERVATION_CODE_TEXT, persistedObservationCode.getCodeableConceptText());
|
||||
|
||||
// Also confirm that we can retrieve code by text value.
|
||||
persistedObservationCode = elasticsearchSvc.getObservationCodeDocument(null, SINGLE_OBSERVATION_CODE_TEXT);
|
||||
assertNotNull(persistedObservationCode);
|
||||
searchParameterMap.setLastNMax(100);
|
||||
List<ObservationJson> observationDocuments = elasticsearchSvc.executeLastNWithAllFieldsForTest(searchParameterMap, myFhirCtx);
|
||||
assertEquals(100, observationDocuments.size());
|
||||
// Check that fifth observation for fifth patient has been indexed.
|
||||
ObservationJson observation = elasticsearchSvc.getObservationDocument("55");
|
||||
assertNotNull(observation);
|
||||
|
||||
searchParameterMap = new SearchParameterMap();
|
||||
ReferenceParam subjectParam = new ReferenceParam("Patient", "", SINGLE_SUBJECT_ID);
|
||||
searchParameterMap.add(Observation.SP_SUBJECT, new ReferenceAndListParam().addAnd(new ReferenceOrListParam().addOr(subjectParam)));
|
||||
TokenParam categoryParam = new TokenParam(CATEGORYFIRSTCODINGSYSTEM, FIRSTCATEGORYFIRSTCODINGCODE);
|
||||
searchParameterMap.add(Observation.SP_CATEGORY, new TokenAndListParam().addAnd(new TokenOrListParam().addOr(categoryParam)));
|
||||
TokenParam codeParam = new TokenParam(CODEFIRSTCODINGSYSTEM, CODEFIRSTCODINGCODE);
|
||||
searchParameterMap.add(Observation.SP_CODE, new TokenAndListParam().addAnd(new TokenOrListParam().addOr(codeParam)));
|
||||
searchParameterMap.setLastNMax(3);
|
||||
searchParameterMap.add(Observation.SP_SUBJECT, multiSubjectParams);
|
||||
searchParameterMap.setLastNMax(10);
|
||||
List<String> observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap, myFhirCtx, 200);
|
||||
assertEquals(100, observationIdsOnly.size());
|
||||
assertTrue(observationIdsOnly.contains("55"));
|
||||
|
||||
List<String> observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap, myFhirCtx, 100);
|
||||
// Delete fifth observation for fifth patient.
|
||||
ResourceTable entity = new ResourceTable();
|
||||
entity.setId(55L);
|
||||
entity.setResourceType("Observation");
|
||||
entity.setVersion(0L);
|
||||
|
||||
testObservationPersist.deleteObservationIndex(entity);
|
||||
elasticsearchSvc.refreshIndex(ElasticsearchSvcImpl.OBSERVATION_INDEX);
|
||||
|
||||
// Confirm that observation was deleted.
|
||||
searchParameterMap = new SearchParameterMap();
|
||||
searchParameterMap.setLastNMax(100);
|
||||
observationDocuments = elasticsearchSvc.executeLastNWithAllFieldsForTest(searchParameterMap, myFhirCtx);
|
||||
assertEquals(99, observationDocuments.size());
|
||||
observation = elasticsearchSvc.getObservationDocument("55");
|
||||
assertNull(observation);
|
||||
|
||||
observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap, myFhirCtx, 200);
|
||||
assertEquals(99, observationIdsOnly.size());
|
||||
assertTrue(!observationIdsOnly.contains("55"));
|
||||
|
||||
assertEquals(1, observationIdsOnly.size());
|
||||
assertEquals(SINGLE_OBSERVATION_PID, observationIdsOnly.get(0));
|
||||
}
|
||||
|
||||
|
||||
|
||||
private void indexSingleObservation() throws IOException {
|
||||
|
||||
Observation myObservation = new Observation();
|
||||
|
@ -206,6 +205,63 @@ public class PersistObservationIndexedSearchParamLastNR4IT {
|
|||
return codeableConceptField;
|
||||
}
|
||||
|
||||
@Order(1)
|
||||
@Test
|
||||
public void testSampleBundleInTransaction() throws IOException {
|
||||
FhirContext myFhirCtx = FhirContext.forR4Cached();
|
||||
|
||||
PathMatchingResourcePatternResolver provider = new PathMatchingResourcePatternResolver();
|
||||
final Resource[] bundleResources = provider.getResources("lastntestbundle.json");
|
||||
assertEquals(1, bundleResources.length);
|
||||
|
||||
AtomicInteger index = new AtomicInteger();
|
||||
|
||||
Arrays.stream(bundleResources).forEach(
|
||||
resource -> {
|
||||
index.incrementAndGet();
|
||||
|
||||
InputStream resIs = null;
|
||||
String nextBundleString;
|
||||
try {
|
||||
resIs = resource.getInputStream();
|
||||
nextBundleString = IOUtils.toString(resIs, Charsets.UTF_8);
|
||||
} catch (IOException e) {
|
||||
return;
|
||||
} finally {
|
||||
try {
|
||||
if (resIs != null) {
|
||||
resIs.close();
|
||||
}
|
||||
} catch (final IOException ioe) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
IParser parser = myFhirCtx.newJsonParser();
|
||||
Bundle bundle = parser.parseResource(Bundle.class, nextBundleString);
|
||||
|
||||
myDao.transaction(null, bundle);
|
||||
|
||||
}
|
||||
);
|
||||
|
||||
elasticsearchSvc.refreshIndex("*");
|
||||
|
||||
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||
|
||||
// execute Observation ID search - Composite Aggregation
|
||||
searchParameterMap.setLastNMax(1);
|
||||
List<String> observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap, myFhirCtx, 200);
|
||||
|
||||
assertEquals(20, observationIdsOnly.size());
|
||||
|
||||
searchParameterMap.setLastNMax(3);
|
||||
observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap, myFhirCtx, 200);
|
||||
|
||||
assertEquals(38, observationIdsOnly.size());
|
||||
|
||||
}
|
||||
|
||||
@Order(2)
|
||||
@Test
|
||||
public void testIndexObservationMultiple() throws IOException {
|
||||
|
@ -309,46 +365,48 @@ public class PersistObservationIndexedSearchParamLastNR4IT {
|
|||
|
||||
}
|
||||
|
||||
@Order(0)
|
||||
@Order(3)
|
||||
@Test
|
||||
public void testDeleteObservation() throws IOException {
|
||||
indexMultipleObservations();
|
||||
public void testIndexObservationSingle() throws IOException {
|
||||
indexSingleObservation();
|
||||
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||
searchParameterMap.setLastNMax(100);
|
||||
List<ObservationJson> observationDocuments = elasticsearchSvc.executeLastNWithAllFieldsForTest(searchParameterMap, myFhirCtx);
|
||||
assertEquals(100, observationDocuments.size());
|
||||
// Check that fifth observation for fifth patient has been indexed.
|
||||
ObservationJson observation = elasticsearchSvc.getObservationDocument("55");
|
||||
assertNotNull(observation);
|
||||
|
||||
searchParameterMap = new SearchParameterMap();
|
||||
searchParameterMap.add(Observation.SP_SUBJECT, multiSubjectParams);
|
||||
searchParameterMap.setLastNMax(10);
|
||||
List<String> observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap, myFhirCtx, 200);
|
||||
assertEquals(100, observationIdsOnly.size());
|
||||
assertTrue(observationIdsOnly.contains("55"));
|
||||
List<ObservationJson> persistedObservationEntities = elasticsearchSvc.executeLastNWithAllFieldsForTest(searchParameterMap, myFhirCtx);
|
||||
assertEquals(1, persistedObservationEntities.size());
|
||||
ObservationJson persistedObservationEntity = persistedObservationEntities.get(0);
|
||||
assertEquals(SINGLE_SUBJECT_ID, persistedObservationEntity.getSubject());
|
||||
assertEquals(SINGLE_OBSERVATION_PID, persistedObservationEntity.getIdentifier());
|
||||
assertEquals(SINGLE_EFFECTIVEDTM, persistedObservationEntity.getEffectiveDtm());
|
||||
|
||||
// Delete fifth observation for fifth patient.
|
||||
ResourceTable entity = new ResourceTable();
|
||||
entity.setId(55L);
|
||||
entity.setResourceType("Observation");
|
||||
entity.setVersion(0L);
|
||||
String observationCodeNormalizedId = persistedObservationEntity.getCode_concept_id();
|
||||
|
||||
testObservationPersist.deleteObservationIndex(entity);
|
||||
elasticsearchSvc.refreshIndex(ElasticsearchSvcImpl.OBSERVATION_INDEX);
|
||||
// List<CodeJson> persistedObservationCodes = elasticsearchSvc.queryAllIndexedObservationCodesForTest();
|
||||
// assertEquals(1, persistedObservationCodes.size());
|
||||
|
||||
// Check that we can retrieve code by hash value.
|
||||
String codeSystemHash = persistedObservationEntity.getCode_coding_code_system_hash();
|
||||
CodeJson persistedObservationCode = elasticsearchSvc.getObservationCodeDocument(codeSystemHash, null);
|
||||
assertNotNull(persistedObservationCode);
|
||||
assertEquals(observationCodeNormalizedId, persistedObservationCode.getCodeableConceptId());
|
||||
assertEquals(SINGLE_OBSERVATION_CODE_TEXT, persistedObservationCode.getCodeableConceptText());
|
||||
|
||||
// Also confirm that we can retrieve code by text value.
|
||||
persistedObservationCode = elasticsearchSvc.getObservationCodeDocument(null, SINGLE_OBSERVATION_CODE_TEXT);
|
||||
assertNotNull(persistedObservationCode);
|
||||
|
||||
// Confirm that observation was deleted.
|
||||
searchParameterMap = new SearchParameterMap();
|
||||
searchParameterMap.setLastNMax(100);
|
||||
observationDocuments = elasticsearchSvc.executeLastNWithAllFieldsForTest(searchParameterMap, myFhirCtx);
|
||||
assertEquals(99, observationDocuments.size());
|
||||
observation = elasticsearchSvc.getObservationDocument("55");
|
||||
assertNull(observation);
|
||||
ReferenceParam subjectParam = new ReferenceParam("Patient", "", SINGLE_SUBJECT_ID);
|
||||
searchParameterMap.add(Observation.SP_SUBJECT, new ReferenceAndListParam().addAnd(new ReferenceOrListParam().addOr(subjectParam)));
|
||||
TokenParam categoryParam = new TokenParam(CATEGORYFIRSTCODINGSYSTEM, FIRSTCATEGORYFIRSTCODINGCODE);
|
||||
searchParameterMap.add(Observation.SP_CATEGORY, new TokenAndListParam().addAnd(new TokenOrListParam().addOr(categoryParam)));
|
||||
TokenParam codeParam = new TokenParam(CODEFIRSTCODINGSYSTEM, CODEFIRSTCODINGCODE);
|
||||
searchParameterMap.add(Observation.SP_CODE, new TokenAndListParam().addAnd(new TokenOrListParam().addOr(codeParam)));
|
||||
searchParameterMap.setLastNMax(3);
|
||||
|
||||
observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap, myFhirCtx, 200);
|
||||
assertEquals(99, observationIdsOnly.size());
|
||||
assertTrue(!observationIdsOnly.contains("55"));
|
||||
List<String> observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap, myFhirCtx, 100);
|
||||
|
||||
assertEquals(1, observationIdsOnly.size());
|
||||
assertEquals(SINGLE_OBSERVATION_PID, observationIdsOnly.get(0));
|
||||
}
|
||||
|
||||
@Order(4)
|
||||
|
@ -412,61 +470,5 @@ public class PersistObservationIndexedSearchParamLastNR4IT {
|
|||
|
||||
}
|
||||
|
||||
@Order(1)
|
||||
@Test
|
||||
public void testSampleBundleInTransaction() throws IOException {
|
||||
FhirContext myFhirCtx = FhirContext.forR4Cached();
|
||||
|
||||
PathMatchingResourcePatternResolver provider = new PathMatchingResourcePatternResolver();
|
||||
final Resource[] bundleResources;
|
||||
bundleResources = provider.getResources("lastntestbundle.json");
|
||||
|
||||
AtomicInteger index = new AtomicInteger();
|
||||
|
||||
Arrays.stream(bundleResources).forEach(
|
||||
resource -> {
|
||||
index.incrementAndGet();
|
||||
|
||||
InputStream resIs = null;
|
||||
String nextBundleString;
|
||||
try {
|
||||
resIs = resource.getInputStream();
|
||||
nextBundleString = IOUtils.toString(resIs, Charsets.UTF_8);
|
||||
} catch (IOException e) {
|
||||
return;
|
||||
} finally {
|
||||
try {
|
||||
if (resIs != null) {
|
||||
resIs.close();
|
||||
}
|
||||
} catch (final IOException ioe) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
IParser parser = myFhirCtx.newJsonParser();
|
||||
Bundle bundle = parser.parseResource(Bundle.class, nextBundleString);
|
||||
|
||||
myDao.transaction(null, bundle);
|
||||
|
||||
}
|
||||
);
|
||||
|
||||
elasticsearchSvc.refreshIndex(ElasticsearchSvcImpl.OBSERVATION_INDEX);
|
||||
|
||||
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||
|
||||
// execute Observation ID search - Composite Aggregation
|
||||
searchParameterMap.setLastNMax(1);
|
||||
List<String> observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap, myFhirCtx, 200);
|
||||
|
||||
assertEquals(20, observationIdsOnly.size());
|
||||
|
||||
searchParameterMap.setLastNMax(3);
|
||||
observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap, myFhirCtx, 200);
|
||||
|
||||
assertEquals(38, observationIdsOnly.size());
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
package ca.uhn.fhir.jpa.provider.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticHSearch;
|
||||
import ca.uhn.fhir.jpa.provider.BaseJpaResourceProvider;
|
||||
import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.test.utilities.docker.RequiresDocker;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
|
@ -50,7 +50,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
|
|||
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@RequiresDocker
|
||||
@ContextConfiguration(classes = TestHSearchAddInConfig.Elasticsearch.class)
|
||||
@ContextConfiguration(classes = TestR4ConfigWithElasticHSearch.class)
|
||||
public class ResourceProviderR4ElasticTest extends BaseResourceProviderR4Test {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ResourceProviderR4ElasticTest.class);
|
||||
|
||||
|
|
|
@ -6,12 +6,12 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
|||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
|
||||
import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticHSearch;
|
||||
import ca.uhn.fhir.jpa.dao.TestDaoSearch;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
||||
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaTest;
|
||||
import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig;
|
||||
import ca.uhn.fhir.jpa.test.config.TestR4Config;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.storage.test.DaoTestDataBuilder;
|
||||
|
@ -26,12 +26,14 @@ import org.hl7.fhir.instance.model.api.IBaseCoding;
|
|||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.ContextHierarchy;
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
@ -49,10 +51,12 @@ import static org.hamcrest.Matchers.not;
|
|||
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@RequiresDocker
|
||||
@ContextConfiguration(classes = {
|
||||
TestR4Config.class,
|
||||
TestHSearchAddInConfig.Elasticsearch.class,
|
||||
DaoTestDataBuilder.Config.class
|
||||
@ContextHierarchy({
|
||||
@ContextConfiguration(classes = TestR4ConfigWithElasticHSearch.class),
|
||||
@ContextConfiguration(classes = {
|
||||
DaoTestDataBuilder.Config.class,
|
||||
TestDaoSearch.Config.class
|
||||
})
|
||||
})
|
||||
public class TokenAutocompleteElasticsearchIT extends BaseJpaTest {
|
||||
public static final Coding erythrocyte_by_volume = new Coding("http://loinc.org", "789-8", "Erythrocytes [#/volume] in Blood by Automated count");
|
||||
|
@ -96,6 +100,12 @@ public class TokenAutocompleteElasticsearchIT extends BaseJpaTest {
|
|||
myDaoConfig.setAdvancedHSearchIndexing(true);
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
void resetConfig() {
|
||||
DaoConfig defaultConfig = new DaoConfig();
|
||||
myDaoConfig.setAdvancedHSearchIndexing(defaultConfig.isAdvancedHSearchIndexing());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected FhirContext getFhirContext() {
|
||||
return myFhirCtx;
|
||||
|
|
|
@ -1,21 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.search.elastic;
|
||||
|
||||
import org.testcontainers.elasticsearch.ElasticsearchContainer;
|
||||
|
||||
import java.time.Duration;
|
||||
|
||||
import static java.time.temporal.ChronoUnit.SECONDS;
|
||||
|
||||
public class TestElasticsearchContainerHelper {
|
||||
|
||||
|
||||
public static final String ELASTICSEARCH_VERSION = "7.16.3";
|
||||
public static final String ELASTICSEARCH_IMAGE = "docker.elastic.co/elasticsearch/elasticsearch:" + ELASTICSEARCH_VERSION;
|
||||
|
||||
public static ElasticsearchContainer getEmbeddedElasticSearch() {
|
||||
|
||||
return new ElasticsearchContainer(ELASTICSEARCH_IMAGE)
|
||||
.withStartupTimeout(Duration.of(300, SECONDS));
|
||||
}
|
||||
|
||||
}
|
|
@ -2,10 +2,10 @@ package ca.uhn.fhir.jpa.search.lastn;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.search.elastic.TestElasticsearchContainerHelper;
|
||||
import ca.uhn.fhir.jpa.search.lastn.json.CodeJson;
|
||||
import ca.uhn.fhir.jpa.search.lastn.json.ObservationJson;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.test.config.TestElasticsearchContainerHelper;
|
||||
import ca.uhn.fhir.rest.param.DateAndListParam;
|
||||
import ca.uhn.fhir.rest.param.DateOrListParam;
|
||||
import ca.uhn.fhir.rest.param.DateParam;
|
||||
|
|
|
@ -3,10 +3,10 @@ package ca.uhn.fhir.jpa.search.lastn;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.util.CodeSystemHash;
|
||||
import ca.uhn.fhir.jpa.search.elastic.TestElasticsearchContainerHelper;
|
||||
import ca.uhn.fhir.jpa.search.lastn.json.CodeJson;
|
||||
import ca.uhn.fhir.jpa.search.lastn.json.ObservationJson;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.test.config.TestElasticsearchContainerHelper;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Observation;
|
||||
import ca.uhn.fhir.rest.param.DateParam;
|
||||
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
|
||||
|
|
|
@ -8,6 +8,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet;
|
|||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
|
||||
import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticHSearch;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
|
@ -19,8 +20,6 @@ import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
|||
import ca.uhn.fhir.jpa.term.api.ITermReadSvcR4;
|
||||
import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaTest;
|
||||
import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig;
|
||||
import ca.uhn.fhir.jpa.test.config.TestR4Config;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
|
@ -56,7 +55,7 @@ import static org.mockito.Mockito.verify;
|
|||
import static org.mockito.Mockito.when;
|
||||
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@ContextConfiguration(classes = {TestR4Config.class, TestHSearchAddInConfig.Elasticsearch.class})
|
||||
@ContextConfiguration(classes = TestR4ConfigWithElasticHSearch.class)
|
||||
@RequiresDocker
|
||||
public class ValueSetExpansionR4ElasticsearchIT extends BaseJpaTest {
|
||||
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig;
|
||||
import ca.uhn.fhir.jpa.test.config.TestR4Config;
|
||||
import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticHSearch;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
|
@ -10,7 +9,7 @@ import org.springframework.test.context.junit.jupiter.SpringExtension;
|
|||
* This class runs all parent class tests using Elasticsearch configuration
|
||||
*/
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@ContextConfiguration(classes = {TestR4Config.class, TestHSearchAddInConfig.Elasticsearch.class})
|
||||
@ContextConfiguration(classes = TestR4ConfigWithElasticHSearch.class)
|
||||
public class ValueSetHSearchExpansionR4ElasticIT extends AbstractValueSetHSearchExpansionR4Test {
|
||||
|
||||
}
|
||||
|
|
|
@ -35,6 +35,10 @@ public class TestElasticsearchContainerHelper {
|
|||
public static ElasticsearchContainer getEmbeddedElasticSearch() {
|
||||
|
||||
return new ElasticsearchContainer(ELASTICSEARCH_IMAGE)
|
||||
// the default is 4GB which is too much for our little tests
|
||||
.withEnv("ES_JAVA_OPTS", "-Xms512m -Xmx512m")
|
||||
// turn off security warnings
|
||||
.withEnv("xpack.security.enabled", "false")
|
||||
.withStartupTimeout(Duration.of(300, SECONDS));
|
||||
}
|
||||
|
||||
|
|
|
@ -37,8 +37,10 @@ import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
|
|||
import org.hibernate.search.mapper.orm.schema.management.SchemaManagementStrategyName;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.context.annotation.Primary;
|
||||
import org.testcontainers.elasticsearch.ElasticsearchContainer;
|
||||
|
||||
|
@ -177,7 +179,11 @@ public class TestHSearchAddInConfig {
|
|||
* Make sure you add {@link RequiresDocker} annotation to any uses.
|
||||
*/
|
||||
@Configuration
|
||||
@Import(PooledElasticsearchContainerConfig.class)
|
||||
public static class Elasticsearch {
|
||||
@Autowired
|
||||
ElasticsearchContainer myElasticsearchContainer;
|
||||
|
||||
@Bean
|
||||
@Primary // override the default
|
||||
IHSearchConfigurer hibernateSearchConfigurer(ElasticsearchContainer theContainer) {
|
||||
|
@ -208,18 +214,6 @@ public class TestHSearchAddInConfig {
|
|||
return new TestHSearchEventDispatcher();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ElasticsearchContainer elasticContainer() {
|
||||
ElasticsearchContainer embeddedElasticSearch = TestElasticsearchContainerHelper.getEmbeddedElasticSearch();
|
||||
embeddedElasticSearch.start();
|
||||
return embeddedElasticSearch;
|
||||
}
|
||||
|
||||
@PreDestroy
|
||||
public void stop() {
|
||||
elasticContainer().stop();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public PartitionSettings partitionSettings() {
|
||||
return new PartitionSettings();
|
||||
|
@ -227,8 +221,8 @@ public class TestHSearchAddInConfig {
|
|||
|
||||
@Bean()
|
||||
public ElasticsearchSvcImpl myElasticsearchSvc() {
|
||||
int elasticsearchPort = elasticContainer().getMappedPort(9200);
|
||||
String host = elasticContainer().getHost();
|
||||
int elasticsearchPort = myElasticsearchContainer.getMappedPort(9200);
|
||||
String host = myElasticsearchContainer.getHost();
|
||||
return new ElasticsearchSvcImpl("http", host + ":" + elasticsearchPort, null, null);
|
||||
}
|
||||
|
||||
|
@ -236,6 +230,16 @@ public class TestHSearchAddInConfig {
|
|||
public void stopEsClient() throws IOException {
|
||||
myElasticsearchSvc().close();
|
||||
}
|
||||
}
|
||||
|
||||
@Configuration
|
||||
public static class PooledElasticsearchContainerConfig {
|
||||
@Bean
|
||||
public ElasticsearchContainer elasticContainer() {
|
||||
ElasticsearchContainer embeddedElasticSearch = TestElasticsearchContainerHelper.getEmbeddedElasticSearch();
|
||||
embeddedElasticSearch.start();
|
||||
return embeddedElasticSearch;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -49,13 +49,14 @@
|
|||
<appender-ref ref="STDOUT" />
|
||||
</logger>
|
||||
|
||||
|
||||
<logger name="org.elasticsearch.client" additivity="true" level="trace"/>
|
||||
<!--
|
||||
<logger name="ca.uhn.fhir.jpa.model.search" additivity="false" level="debug"/>
|
||||
<logger name="org.elasticsearch.client" additivity="true" level="trace"/>
|
||||
<logger name="org.hibernate.search.elasticsearch.request" additivity="false" level="trace"/>
|
||||
<logger name="org.hibernate.search" level="TRACE"/>
|
||||
<logger name="org.hibernate.search.query" level="TRACE"/>
|
||||
<logger name="org.hibernate.search.elasticsearch.request" level="TRACE"/>
|
||||
<logger name="org.hibernate.search" additivity="false" level="TRACE"/>
|
||||
<logger name="org.hibernate.search.query" additivity="false" level="TRACE"/>
|
||||
<logger name="org.hibernate.search.elasticsearch.request" additivity="false" level="TRACE"/>
|
||||
-->
|
||||
<!-- See https://docs.jboss.org/hibernate/stable/search/reference/en-US/html_single/#backend-lucene-io-writer-infostream for lucene logging
|
||||
<logger name="org.hibernate.search.backend.lucene.infostream" level="TRACE"/> -->
|
|
@ -8,9 +8,9 @@ import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
|
|||
import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkImportJobDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkImportJobFileDao;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
|
||||
import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import com.google.common.collect.Lists;
|
||||
|
@ -99,7 +99,7 @@ public class BulkDataImportSvcImplTest extends BaseJpaR4Test {
|
|||
try {
|
||||
mySvc.addFilesToJob("ABCDEFG", Lists.newArrayList(file3));
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("Unknown job ID: ABCDEFG", e.getMessage());
|
||||
assertEquals("Unknown bijob id: ABCDEFG", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -119,7 +119,7 @@ public class BulkDataImportSvcImplTest extends BaseJpaR4Test {
|
|||
try {
|
||||
mySvc.addFilesToJob("ABCDEFG", Lists.newArrayList(file3));
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals(Msg.code(1769) + "Job ABCDEFG has status RUNNING and can not be added to", e.getMessage());
|
||||
assertEquals(Msg.code(1769) + "bijob id ABCDEFG has status RUNNING and can not be added to", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.hl7.fhir.r4.model.Practitioner;
|
|||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.hl7.fhir.r4.model.SearchParameter;
|
||||
import org.hl7.fhir.r4.model.ServiceRequest;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.Tag;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
|
@ -65,6 +66,10 @@ public class FhirResourceDaoR4ComboUniqueParamTest extends BaseComboParamsR4Test
|
|||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR4ComboUniqueParamTest.class);
|
||||
|
||||
@AfterEach
|
||||
public void purgeUniqueIndexes() {
|
||||
myResourceIndexedCompositeStringUniqueDao.deleteAll();
|
||||
}
|
||||
|
||||
private void createUniqueBirthdateAndGenderSps() {
|
||||
SearchParameter sp = new SearchParameter();
|
||||
|
|
|
@ -243,16 +243,16 @@ public class FhirResourceDaoR4SearchFtTest extends BaseJpaR4Test {
|
|||
|
||||
param = new StringAndListParam();
|
||||
param.addAnd(new StringOrListParam().addOr(new StringParam("obsvalue1")));
|
||||
actual = toUnqualifiedVersionlessIdValues(myPatientDao.patientInstanceEverything(request, ptId1, null, null, null, null, param, null, null, mockSrd()));
|
||||
actual = toUnqualifiedVersionlessIdValues(myPatientDao.patientInstanceEverything(request, ptId1, null, null, null, null, param, null, null, null, mockSrd()));
|
||||
assertThat(actual, containsInAnyOrder(toValues(ptId1, obsId1, devId1)));
|
||||
|
||||
param = new StringAndListParam();
|
||||
param.addAnd(new StringOrListParam().addOr(new StringParam("obstext1")));
|
||||
actual = toUnqualifiedVersionlessIdValues(myPatientDao.patientInstanceEverything(request, ptId1, null, null, null, null, null, param, null, mockSrd()));
|
||||
actual = toUnqualifiedVersionlessIdValues(myPatientDao.patientInstanceEverything(request, ptId1, null, null, null, null, null, param, null, null, mockSrd()));
|
||||
assertThat(actual, containsInAnyOrder(toValues(ptId1, obsId1, devId1)));
|
||||
|
||||
request = mock(HttpServletRequest.class);
|
||||
actual = toUnqualifiedVersionlessIdValues(myPatientDao.patientInstanceEverything(request, ptId1, null, null, null, null, null, null, null, mockSrd()));
|
||||
actual = toUnqualifiedVersionlessIdValues(myPatientDao.patientInstanceEverything(request, ptId1, null, null, null, null, null, null, null, null, mockSrd()));
|
||||
assertThat(actual, containsInAnyOrder(toValues(ptId1, obsId1, obsId2, devId1)));
|
||||
|
||||
/*
|
||||
|
@ -268,7 +268,7 @@ public class FhirResourceDaoR4SearchFtTest extends BaseJpaR4Test {
|
|||
|
||||
param = new StringAndListParam();
|
||||
param.addAnd(new StringOrListParam().addOr(new StringParam("obsvalue1")));
|
||||
actual = toUnqualifiedVersionlessIdValues(myPatientDao.patientInstanceEverything(request, ptId1, null, null, null, null, param, null, null, mockSrd()));
|
||||
actual = toUnqualifiedVersionlessIdValues(myPatientDao.patientInstanceEverything(request, ptId1, null, null, null, null, param, null, null, null, mockSrd()));
|
||||
assertThat(actual, containsInAnyOrder(toValues(ptId1, obsId1, obsId4, devId1)));
|
||||
|
||||
/*
|
||||
|
@ -284,7 +284,7 @@ public class FhirResourceDaoR4SearchFtTest extends BaseJpaR4Test {
|
|||
|
||||
param = new StringAndListParam();
|
||||
param.addAnd(new StringOrListParam().addOr(new StringParam("obsvalue1")));
|
||||
actual = toUnqualifiedVersionlessIdValues(myPatientDao.patientInstanceEverything(request, ptId1, null, null, null, null, param, null, null, mockSrd()));
|
||||
actual = toUnqualifiedVersionlessIdValues(myPatientDao.patientInstanceEverything(request, ptId1, null, null, null, null, param, null, null, null, mockSrd()));
|
||||
assertThat(actual, containsInAnyOrder(toValues(ptId1, obsId4)));
|
||||
|
||||
}
|
||||
|
@ -335,11 +335,11 @@ public class FhirResourceDaoR4SearchFtTest extends BaseJpaR4Test {
|
|||
|
||||
param = new StringAndListParam();
|
||||
param.addAnd(new StringOrListParam().addOr(new StringParam("obsvalue1")));
|
||||
actual = toUnqualifiedVersionlessIdValues(myPatientDao.patientTypeEverything(request, null, null, null, null, param, null, null, mockSrd(), null));
|
||||
actual = toUnqualifiedVersionlessIdValues(myPatientDao.patientTypeEverything(request, null, null, null, null, param, null, null, null, mockSrd(), null));
|
||||
assertThat(actual, containsInAnyOrder(toValues(ptId1, obsId1, devId1)));
|
||||
|
||||
request = mock(HttpServletRequest.class);
|
||||
actual = toUnqualifiedVersionlessIdValues(myPatientDao.patientTypeEverything(request, null, null, null, null, null, null, null, mockSrd(), null));
|
||||
actual = toUnqualifiedVersionlessIdValues(myPatientDao.patientTypeEverything(request, null, null, null, null, null, null, null, null, mockSrd(), null));
|
||||
assertThat(actual, containsInAnyOrder(toValues(ptId1, obsId1, obsId2, devId1, ptId2, obsId3)));
|
||||
|
||||
/*
|
||||
|
@ -355,7 +355,7 @@ public class FhirResourceDaoR4SearchFtTest extends BaseJpaR4Test {
|
|||
|
||||
param = new StringAndListParam();
|
||||
param.addAnd(new StringOrListParam().addOr(new StringParam("obsvalue1")));
|
||||
actual = toUnqualifiedVersionlessIdValues(myPatientDao.patientTypeEverything(request, null, null, null, null, param, null, null, mockSrd(), null));
|
||||
actual = toUnqualifiedVersionlessIdValues(myPatientDao.patientTypeEverything(request, null, null, null, null, param, null, null, null, mockSrd(), null));
|
||||
assertThat(actual, containsInAnyOrder(toValues(ptId1, ptId2, obsId1, obsId4, devId1)));
|
||||
|
||||
/*
|
||||
|
@ -371,7 +371,7 @@ public class FhirResourceDaoR4SearchFtTest extends BaseJpaR4Test {
|
|||
|
||||
param = new StringAndListParam();
|
||||
param.addAnd(new StringOrListParam().addOr(new StringParam("obsvalue1")));
|
||||
actual = toUnqualifiedVersionlessIdValues(myPatientDao.patientTypeEverything(request, null, null, null, null, param, null, null, mockSrd(), null));
|
||||
actual = toUnqualifiedVersionlessIdValues(myPatientDao.patientTypeEverything(request, null, null, null, null, param, null, null, null, mockSrd(), null));
|
||||
assertThat(actual, containsInAnyOrder(toValues(ptId1, obsId4)));
|
||||
|
||||
}
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.batch2.jobs.importpull;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* hapi-fhir-storage-batch2-jobs
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobParametersValidator;
|
||||
import ca.uhn.fhir.batch2.importpull.models.Batch2BulkImportPullJobParameters;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||
|
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.batch2.jobs.importpull;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* hapi-fhir-storage-batch2-jobs
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.batch2.importpull.models.Batch2BulkImportPullJobParameters;
|
||||
import ca.uhn.fhir.batch2.importpull.models.BulkImportFilePartitionResult;
|
||||
import ca.uhn.fhir.batch2.importpull.models.BulkImportRecord;
|
||||
|
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.batch2.jobs.importpull;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* hapi-fhir-storage-batch2-jobs
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IFirstJobStepWorker;
|
||||
import ca.uhn.fhir.batch2.api.IJobDataSink;
|
||||
import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
|
||||
|
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.batch2.jobs.importpull;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* hapi-fhir-storage-batch2-jobs
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobDataSink;
|
||||
import ca.uhn.fhir.batch2.api.IJobStepWorker;
|
||||
import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
|
||||
|
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.batch2.jobs.importpull;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* hapi-fhir-storage-batch2-jobs
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobDataSink;
|
||||
import ca.uhn.fhir.batch2.api.ILastJobStepWorker;
|
||||
import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
|
||||
|
|
|
@ -62,6 +62,10 @@ abstract class BaseDataSink<PT extends IModelJson, IT extends IModelJson, OT ext
|
|||
return myJobWorkCursor.isFirstStep && getWorkChunkCount() == 0;
|
||||
}
|
||||
|
||||
public boolean hasExactlyOneChunk() {
|
||||
return getWorkChunkCount() == 1;
|
||||
}
|
||||
|
||||
public JobDefinitionStep<PT,IT,OT> getTargetStep() {
|
||||
return myJobWorkCursor.currentStep;
|
||||
}
|
||||
|
|
|
@ -101,7 +101,7 @@ public class JobStepExecutor<PT extends IModelJson, IT extends IModelJson, OT ex
|
|||
jobInstance.setJobDefinition(myDefinition);
|
||||
JobInstanceProgressCalculator calculator = new JobInstanceProgressCalculator(myJobPersistence, jobInstance, new JobChunkProgressAccumulator());
|
||||
calculator.calculateAndStoreInstanceProgress();
|
||||
} else {
|
||||
} else if (theDataSink.hasExactlyOneChunk()) {
|
||||
JobWorkNotification workNotification = new JobWorkNotification(jobInstance, myCursor.nextStep.getStepId(), ((JobDataSink<PT,IT,OT>) theDataSink).getOnlyChunkId());
|
||||
myBatchJobSender.sendWorkChannelMessage(workNotification);
|
||||
}
|
||||
|
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.batch2.importpull.models;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR Storage api
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.model.api.IModelJson;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
|
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.batch2.importpull.models;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR Storage api
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum;
|
||||
import ca.uhn.fhir.model.api.IModelJson;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.batch2.importpull.models;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR Storage api
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum;
|
||||
import ca.uhn.fhir.model.api.IModelJson;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
|
|
@ -35,14 +35,6 @@ import javax.servlet.http.HttpServletRequest;
|
|||
|
||||
public interface IFhirResourceDaoPatient<T extends IBaseResource> extends IFhirResourceDao<T> {
|
||||
|
||||
default IBundleProvider patientInstanceEverything(HttpServletRequest theServletRequest, IIdType theId, IPrimitiveType<Integer> theCount, IPrimitiveType<Integer> theOffset, DateRangeParam theLastUpdate, SortSpec theSort, StringAndListParam theContent, StringAndListParam theNarrative, StringAndListParam theFilter, RequestDetails theRequestDetails){
|
||||
return patientInstanceEverything(theServletRequest, theId, theCount, theOffset, theLastUpdate, theSort, theContent, theNarrative, theFilter, new StringAndListParam(), theRequestDetails);
|
||||
}
|
||||
|
||||
default IBundleProvider patientTypeEverything(HttpServletRequest theServletRequest, IPrimitiveType<Integer> theCount, IPrimitiveType<Integer> theOffset, DateRangeParam theLastUpdated, SortSpec theSortSpec, StringAndListParam theContent, StringAndListParam theNarrative, StringAndListParam theFilter, RequestDetails theRequestDetails, TokenOrListParam theId){
|
||||
return patientTypeEverything(theServletRequest, theCount, theOffset, theLastUpdated, theSortSpec, theContent, theNarrative, theFilter, new StringAndListParam(), theRequestDetails, theId);
|
||||
}
|
||||
|
||||
IBundleProvider patientInstanceEverything(HttpServletRequest theServletRequest, IIdType theId, IPrimitiveType<Integer> theCount, IPrimitiveType<Integer> theOffset, DateRangeParam theLastUpdate, SortSpec theSort, StringAndListParam theContent, StringAndListParam theNarrative, StringAndListParam theFilter, StringAndListParam theTypes, RequestDetails theRequestDetails);
|
||||
|
||||
IBundleProvider patientTypeEverything(HttpServletRequest theServletRequest, IPrimitiveType<Integer> theCount, IPrimitiveType<Integer> theOffset, DateRangeParam theLastUpdated, SortSpec theSortSpec, StringAndListParam theContent, StringAndListParam theNarrative, StringAndListParam theFilter, StringAndListParam theTypes, RequestDetails theRequestDetails, TokenOrListParam theId);
|
||||
|
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.jpa.bulk.imprt.model;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR Storage api
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
public class ActivateJobResult {
|
||||
|
|
2
pom.xml
2
pom.xml
|
@ -2791,9 +2791,7 @@
|
|||
<module>hapi-fhir-docs</module>
|
||||
<module>hapi-fhir-test-utilities</module>
|
||||
<module>hapi-fhir-jpaserver-test-utilities</module>
|
||||
<!-- TODO KHS temporarily disable these tests because they are blocking ci
|
||||
<module>hapi-fhir-jpaserver-elastic-test-utilities</module>
|
||||
-->
|
||||
<module>hapi-tinder-plugin</module>
|
||||
<module>hapi-tinder-test</module>
|
||||
<module>hapi-fhir-client</module>
|
||||
|
|
Loading…
Reference in New Issue