Remove some dead code

This commit is contained in:
Tadgh 2021-02-24 15:57:55 -05:00
parent 8e45a84927
commit 3937d2ef01
4 changed files with 11 additions and 22 deletions

View File

@ -1,8 +0,0 @@
# Bulk Export
## Introduction
## Operations
## Examples

View File

@ -59,7 +59,8 @@ public class PidToIBaseResourceProcessor implements ItemProcessor<List<ResourceP
@Override
public List<IBaseResource> process(List<ResourcePersistentId> theResourcePersistentId) {
String collect = theResourcePersistentId.stream().map(pid -> pid.getId().toString()).collect(Collectors.joining(","));
ourLog.info("Processing pids {}" + collect);
ourLog.debug("Processing PIDs: {}" + collect);
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(myResourceType);
Class<? extends IBaseResource> resourceTypeClass = myContext.getResourceDefinition(myResourceType).getImplementingClass();
@ -67,7 +68,7 @@ public class PidToIBaseResourceProcessor implements ItemProcessor<List<ResourceP
List<IBaseResource> outgoing = new ArrayList<>();
sb.loadResourcesByPid(theResourcePersistentId, Collections.emptyList(), outgoing, false, null);
ourLog.warn("Loaded resources: {}", outgoing.stream().map(t->t.getIdElement().getValue()).collect(Collectors.joining(", ")));
ourLog.debug("Loaded resources: {}", outgoing.stream().map(t->t.getIdElement().getValue()).collect(Collectors.joining(", ")));
return outgoing;

View File

@ -41,7 +41,6 @@ import static org.slf4j.LoggerFactory.getLogger;
* This class will prevent a job from running if the UUID does not exist or is invalid.
*/
public class BulkExportJobParameterValidator implements JobParametersValidator {
private static final Logger ourLog = getLogger(BulkExportJobParameterValidator.class);
@Autowired
private IBulkExportJobDao myBulkExportJobDao;

View File

@ -35,7 +35,6 @@ import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.util.UrlUtil;
@ -55,14 +54,14 @@ import java.util.Optional;
public class BulkItemReader implements ItemReader<List<ResourcePersistentId>> {
private static final Logger ourLog = Logs.getBatchTroubleshootingLog();
Iterator<ResourcePersistentId> myPidIterator;
@Value("#{jobParameters['readChunkSize']}")
private Long READ_CHUNK_SIZE;
@Value("#{jobExecutionContext['jobUUID']}")
private String myJobUUID;
@Value("#{stepExecutionContext['resourceType']}")
private String myResourceType;
@Value("#{jobParameters['groupId']}")
private String myGroupId;
@Value("#{jobParameters['" + BulkExportJobConfig.READ_CHUNK_PARAMETER + "']}")
private Long myReadChunkSize;
@Value("#{jobParameters['"+ BulkExportJobConfig.JOB_UUID_PARAMETER+"']}")
private String myJobUUID;
@Autowired
private IBulkExportJobDao myBulkExportJobDao;
@ -94,9 +93,7 @@ public class BulkItemReader implements ItemReader<List<ResourcePersistentId>> {
ISearchBuilder sb = mySearchBuilderFactory.newSearchBuilder(dao, myResourceType, nextTypeClass);
SearchParameterMap map = createSearchParameterMapFromTypeFilter(jobEntity, def);
if (myGroupId != null) {
}
if (jobEntity.getSince() != null) {
map.setLastUpdated(new DateRangeParam(jobEntity.getSince(), null));
}
@ -131,7 +128,7 @@ public class BulkItemReader implements ItemReader<List<ResourcePersistentId>> {
}
int count = 0;
List<ResourcePersistentId> outgoing = new ArrayList<>();
while (myPidIterator.hasNext() && count < READ_CHUNK_SIZE) {
while (myPidIterator.hasNext() && count < myReadChunkSize) {
outgoing.add(myPidIterator.next());
count += 1;
}