Merge branch 'rel_6_4' into nd-4123-bulk-export-stuck-in-finalize-state
This commit is contained in:
commit
1280367593
|
@ -186,6 +186,7 @@ public abstract class BaseApp {
|
|||
commands.add(new ExportConceptMapToCsvCommand());
|
||||
commands.add(new ImportCsvToConceptMapCommand());
|
||||
commands.add(new HapiFlywayMigrateDatabaseCommand());
|
||||
commands.add(new HapiClearMigrationLockCommand());
|
||||
commands.add(new CreatePackageCommand());
|
||||
commands.add(new BulkImportCommand());
|
||||
commands.add(new ReindexTerminologyCommand());
|
||||
|
|
|
@ -0,0 +1,92 @@
|
|||
package ca.uhn.fhir.cli;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Command Line Client - API
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import ca.uhn.fhir.jpa.migrate.HapiMigrator;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public abstract class BaseClearMigrationLockCommand extends BaseCommand {
|
||||
|
||||
public static final String CLEAR_LOCK = "clear-migration-lock";
|
||||
private String myMigrationTableName;
|
||||
|
||||
@Override
|
||||
public String getCommandDescription() {
|
||||
return "This command clears a database migration lock";
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getCommandName() {
|
||||
return CLEAR_LOCK;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options retVal = new Options();
|
||||
addRequiredOption(retVal, "u", "url", "URL", "The JDBC database URL");
|
||||
addRequiredOption(retVal, "n", "username", "Username", "The JDBC database username");
|
||||
addRequiredOption(retVal, "p", "password", "Password", "The JDBC database password");
|
||||
addRequiredOption(retVal, "d", "driver", "Driver", "The database driver to use (Options are " + driverOptions() + ")");
|
||||
addRequiredOption(retVal, "l", "lock-uuid", "Lock UUID", "The UUID value of the lock held in the database.");
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private String driverOptions() {
|
||||
return Arrays.stream(DriverTypeEnum.values()).map(Enum::name).collect(Collectors.joining(", "));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run(CommandLine theCommandLine) throws ParseException {
|
||||
|
||||
String url = theCommandLine.getOptionValue("u");
|
||||
String username = theCommandLine.getOptionValue("n");
|
||||
String password = theCommandLine.getOptionValue("p");
|
||||
String lockUUID = theCommandLine.getOptionValue("l");
|
||||
DriverTypeEnum driverType;
|
||||
String driverTypeString = theCommandLine.getOptionValue("d");
|
||||
try {
|
||||
driverType = DriverTypeEnum.valueOf(driverTypeString);
|
||||
} catch (Exception e) {
|
||||
throw new ParseException(Msg.code(2774) + "Invalid driver type \"" + driverTypeString + "\". Valid values are: " + driverOptions());
|
||||
}
|
||||
|
||||
|
||||
DriverTypeEnum.ConnectionProperties connectionProperties = driverType.newConnectionProperties(url, username, password);
|
||||
HapiMigrator migrator = new HapiMigrator(myMigrationTableName, connectionProperties.getDataSource(), driverType);
|
||||
migrator.clearMigrationLockWithUUID(lockUUID);
|
||||
}
|
||||
|
||||
|
||||
protected void setMigrationTableName(String theMigrationTableName) {
|
||||
myMigrationTableName = theMigrationTableName;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
package ca.uhn.fhir.cli;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
|
||||
public class HapiClearMigrationLockCommand extends BaseClearMigrationLockCommand {
|
||||
@Override
|
||||
public void run(CommandLine theCommandLine) throws ParseException {
|
||||
setMigrationTableName(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME);
|
||||
super.run(theCommandLine);
|
||||
}
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
package ca.uhn.fhir.cli;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
|
||||
public class BaseAppTest {
|
||||
|
||||
private final PrintStream standardOut = System.out;
|
||||
private final ByteArrayOutputStream outputStreamCaptor = new ByteArrayOutputStream();
|
||||
|
||||
@BeforeEach
|
||||
public void setUp() {
|
||||
System.setOut(new PrintStream(outputStreamCaptor));
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void tearDown() {
|
||||
System.setOut(standardOut);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHelpOption() {
|
||||
App.main(new String[]{"help", "create-package"});
|
||||
assertThat(outputStreamCaptor.toString().trim(), outputStreamCaptor.toString().trim(), containsString("Usage"));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
package ca.uhn.fhir.cli;
|
||||
|
||||
import org.apache.commons.io.output.TeeOutputStream;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
/**
|
||||
* This class splits output stream to both STDOUT, and a capturing byte array output stream, which can later be inspected.
|
||||
*/
|
||||
public class ConsoleOutputCapturingBaseTest {
|
||||
|
||||
protected final ByteArrayOutputStream outputStreamCaptor = new ByteArrayOutputStream();
|
||||
protected final TeeOutputStream myTeeOutputStream = new TeeOutputStream(System.out, outputStreamCaptor);
|
||||
|
||||
@BeforeEach
|
||||
public void setUp() {
|
||||
System.setOut(new PrintStream(myTeeOutputStream));
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void tearDown() {
|
||||
outputStreamCaptor.reset();
|
||||
System.setOut(System.out);
|
||||
}
|
||||
protected String getConsoleOutput() {
|
||||
return outputStreamCaptor.toString().trim();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,311 @@
|
|||
package ca.uhn.fhir.cli;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
|
||||
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
|
||||
import ca.uhn.fhir.system.HapiSystemProperties;
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.springframework.jdbc.core.JdbcTemplate;
|
||||
import org.springframework.jdbc.core.support.AbstractLobCreatingPreparedStatementCallback;
|
||||
import org.springframework.jdbc.support.lob.DefaultLobHandler;
|
||||
import org.springframework.jdbc.support.lob.LobCreator;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Timestamp;
|
||||
import java.sql.Types;
|
||||
import java.util.*;
|
||||
|
||||
import static ca.uhn.fhir.jpa.migrate.HapiMigrationLock.LOCK_PID;
|
||||
import static ca.uhn.fhir.jpa.migrate.HapiMigrationStorageSvc.LOCK_TYPE;
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
import static org.slf4j.LoggerFactory.getLogger;
|
||||
|
||||
public class HapiClearMigrationLockCommandTest extends ConsoleOutputCapturingBaseTest {
|
||||
|
||||
private static final Logger ourLog = getLogger(HapiClearMigrationLockCommandTest.class);
|
||||
|
||||
public static final String DB_DIRECTORY = "target/h2_test";
|
||||
|
||||
static {
|
||||
HapiSystemProperties.enableTestMode();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testClearNonExistingLockIncorrectLock() throws IOException {
|
||||
ConnectionData connectionData = createSchemaAndMigrate("test_migrate_clear_incorrect_lock");
|
||||
HapiMigrationDao dao = new HapiMigrationDao(connectionData.connectionProperties.getDataSource(), DriverTypeEnum.H2_EMBEDDED, "FLY_HFJ_MIGRATION");
|
||||
String correctLockUUID = UUID.randomUUID().toString();
|
||||
String incorrectLockUUID = UUID.randomUUID().toString();
|
||||
createAndSaveLockRow(correctLockUUID, dao);
|
||||
|
||||
String[] args = new String[]{
|
||||
BaseClearMigrationLockCommand.CLEAR_LOCK,
|
||||
"-d", "H2_EMBEDDED",
|
||||
"-u", connectionData.url,
|
||||
"-n", "",
|
||||
"-p", "",
|
||||
"-l", incorrectLockUUID
|
||||
};
|
||||
|
||||
int beforeClearMigrationCount = dao.findAll().size();
|
||||
try {
|
||||
App.main(args);
|
||||
fail();
|
||||
} catch (CommandFailureException e) {
|
||||
assertThat(e.getMessage(), containsString("HAPI-2152: Internal error: on unlocking, a competing lock was found"));
|
||||
}
|
||||
}
|
||||
@Test
|
||||
public void testClearNonExistingLockNoLocks() throws IOException {
|
||||
ConnectionData connectionData = createSchemaAndMigrate("test_migrate_clear_nonexisting_lock");
|
||||
HapiMigrationDao dao = new HapiMigrationDao(connectionData.connectionProperties.getDataSource(), DriverTypeEnum.H2_EMBEDDED, "FLY_HFJ_MIGRATION");
|
||||
String lockUUID = UUID.randomUUID().toString();
|
||||
|
||||
String[] args = new String[]{
|
||||
BaseClearMigrationLockCommand.CLEAR_LOCK,
|
||||
"-d", "H2_EMBEDDED",
|
||||
"-u", connectionData.url,
|
||||
"-n", "",
|
||||
"-p", "",
|
||||
"-l", lockUUID
|
||||
};
|
||||
|
||||
int beforeClearMigrationCount = dao.findAll().size();
|
||||
App.main(args);
|
||||
int afterClearMigrationCount = dao.findAll().size();
|
||||
int removedRows = beforeClearMigrationCount - afterClearMigrationCount;
|
||||
assertEquals(0, removedRows);
|
||||
assertThat(getConsoleOutput(), containsString("Did not successfully remove lock entry. [uuid="+ lockUUID +"]"));
|
||||
}
|
||||
@Test
|
||||
public void testMigrateAndClearExistingLock() throws IOException, SQLException {
|
||||
ConnectionData connectionData = createSchemaAndMigrate("test_migrate_clear_existing_lock");
|
||||
HapiMigrationDao dao = new HapiMigrationDao(connectionData.connectionProperties.getDataSource(), DriverTypeEnum.H2_EMBEDDED, "FLY_HFJ_MIGRATION");
|
||||
String lockUUID = UUID.randomUUID().toString();
|
||||
createAndSaveLockRow(lockUUID, dao);
|
||||
|
||||
|
||||
String[] args = new String[]{
|
||||
BaseClearMigrationLockCommand.CLEAR_LOCK,
|
||||
"-d", "H2_EMBEDDED",
|
||||
"-u", connectionData.url,
|
||||
"-n", "",
|
||||
"-p", "",
|
||||
"-l", lockUUID
|
||||
};
|
||||
int beforeClearMigrationCount = dao.findAll().size();
|
||||
App.main(args);
|
||||
int afterClearMigrationCount = dao.findAll().size();
|
||||
int removedRows = beforeClearMigrationCount - afterClearMigrationCount;
|
||||
|
||||
assertEquals(1, removedRows);
|
||||
assertThat(getConsoleOutput(), containsString("Successfully removed lock entry. [uuid="+ lockUUID +"]"));
|
||||
}
|
||||
|
||||
private record ConnectionData(DriverTypeEnum.ConnectionProperties connectionProperties, String url) {}
|
||||
public ConnectionData createSchemaAndMigrate(String theDbName) throws IOException {
|
||||
|
||||
File location = getLocation(theDbName);
|
||||
|
||||
String url = "jdbc:h2:" + location.getAbsolutePath();
|
||||
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "", "");
|
||||
String initSql = "/persistence_create_h2_340.sql";
|
||||
executeSqlStatements(connectionProperties, initSql);
|
||||
|
||||
seedDatabase340(connectionProperties);
|
||||
|
||||
ourLog.info("**********************************************");
|
||||
ourLog.info("Done Setup, Starting Migration...");
|
||||
ourLog.info("**********************************************");
|
||||
|
||||
String[] args = new String[]{
|
||||
BaseFlywayMigrateDatabaseCommand.MIGRATE_DATABASE,
|
||||
"-d", "H2_EMBEDDED",
|
||||
"-u", url,
|
||||
"-n", "",
|
||||
"-p", "",
|
||||
"-r"
|
||||
};
|
||||
App.main(args);
|
||||
return new ConnectionData(connectionProperties, url);
|
||||
}
|
||||
|
||||
|
||||
private static void createAndSaveLockRow(String theLockUUID, HapiMigrationDao theDao) {
|
||||
HapiMigrationEntity me = new HapiMigrationEntity();
|
||||
me.setPid(LOCK_PID);
|
||||
me.setChecksum(100);
|
||||
me.setDescription(theLockUUID);
|
||||
me.setSuccess(true);
|
||||
me.setExecutionTime(20);
|
||||
me.setInstalledBy("gary");
|
||||
me.setInstalledOn(new Date());
|
||||
me.setVersion("2023.1");
|
||||
me.setType(LOCK_TYPE);
|
||||
theDao.save(me);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private File getLocation(String theDatabaseName) throws IOException {
|
||||
File directory = new File(DB_DIRECTORY);
|
||||
if (directory.exists()) {
|
||||
FileUtils.deleteDirectory(directory);
|
||||
}
|
||||
|
||||
return new File(DB_DIRECTORY + "/" + theDatabaseName);
|
||||
}
|
||||
|
||||
private void seedDatabase340(DriverTypeEnum.ConnectionProperties theConnectionProperties) {
|
||||
theConnectionProperties.getTxTemplate().execute(t -> {
|
||||
JdbcTemplate jdbcTemplate = theConnectionProperties.newJdbcTemplate();
|
||||
|
||||
jdbcTemplate.execute(
|
||||
"insert into HFJ_RESOURCE (RES_DELETED_AT, RES_VERSION, FORCED_ID_PID, HAS_TAGS, RES_PUBLISHED, RES_UPDATED, SP_HAS_LINKS, HASH_SHA256, SP_INDEX_STATUS, RES_LANGUAGE, SP_CMPSTR_UNIQ_PRESENT, SP_COORDS_PRESENT, SP_DATE_PRESENT, SP_NUMBER_PRESENT, SP_QUANTITY_PRESENT, SP_STRING_PRESENT, SP_TOKEN_PRESENT, SP_URI_PRESENT, RES_PROFILE, RES_TYPE, RES_VER, RES_ID) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
|
||||
@Override
|
||||
protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
|
||||
thePs.setNull(1, Types.TIMESTAMP);
|
||||
thePs.setString(2, "R4");
|
||||
thePs.setNull(3, Types.BIGINT);
|
||||
thePs.setBoolean(4, false);
|
||||
thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
|
||||
thePs.setTimestamp(6, new Timestamp(System.currentTimeMillis()));
|
||||
thePs.setBoolean(7, false);
|
||||
thePs.setNull(8, Types.VARCHAR);
|
||||
thePs.setLong(9, 1L);
|
||||
thePs.setNull(10, Types.VARCHAR);
|
||||
thePs.setBoolean(11, false);
|
||||
thePs.setBoolean(12, false);
|
||||
thePs.setBoolean(13, false);
|
||||
thePs.setBoolean(14, false);
|
||||
thePs.setBoolean(15, false);
|
||||
thePs.setBoolean(16, false);
|
||||
thePs.setBoolean(17, false);
|
||||
thePs.setBoolean(18, false);
|
||||
thePs.setNull(19, Types.VARCHAR);
|
||||
thePs.setString(20, "Patient");
|
||||
thePs.setLong(21, 1L);
|
||||
thePs.setLong(22, 1L);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
jdbcTemplate.execute(
|
||||
"insert into HFJ_RES_VER (RES_DELETED_AT, RES_VERSION, FORCED_ID_PID, HAS_TAGS, RES_PUBLISHED, RES_UPDATED, RES_ENCODING, RES_TEXT, RES_ID, RES_TYPE, RES_VER, PID) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
|
||||
@Override
|
||||
protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
|
||||
thePs.setNull(1, Types.TIMESTAMP);
|
||||
thePs.setString(2, "R4");
|
||||
thePs.setNull(3, Types.BIGINT);
|
||||
thePs.setBoolean(4, false);
|
||||
thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
|
||||
thePs.setTimestamp(6, new Timestamp(System.currentTimeMillis()));
|
||||
thePs.setString(7, "JSON");
|
||||
theLobCreator.setBlobAsBytes(thePs, 8, "{\"resourceType\":\"Patient\"}".getBytes(Charsets.US_ASCII));
|
||||
thePs.setLong(9, 1L);
|
||||
thePs.setString(10, "Patient");
|
||||
thePs.setLong(11, 1L);
|
||||
thePs.setLong(12, 1L);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
jdbcTemplate.execute(
|
||||
"insert into HFJ_SPIDX_STRING (SP_MISSING, SP_NAME, RES_ID, RES_TYPE, SP_UPDATED, SP_VALUE_EXACT, SP_VALUE_NORMALIZED, SP_ID) values (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
|
||||
@Override
|
||||
protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
|
||||
thePs.setBoolean(1, false);
|
||||
thePs.setString(2, "given");
|
||||
thePs.setLong(3, 1L); // res-id
|
||||
thePs.setString(4, "Patient");
|
||||
thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
|
||||
thePs.setString(6, "ROBERT");
|
||||
thePs.setString(7, "Robert");
|
||||
thePs.setLong(8, 1L);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
jdbcTemplate.execute(
|
||||
"insert into HFJ_SPIDX_TOKEN (SP_MISSING, SP_NAME, RES_ID, RES_TYPE, SP_UPDATED, SP_SYSTEM, SP_VALUE, SP_ID) values (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
|
||||
@Override
|
||||
protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
|
||||
thePs.setBoolean(1, false);
|
||||
thePs.setString(2, "identifier");
|
||||
thePs.setLong(3, 1L); // res-id
|
||||
thePs.setString(4, "Patient");
|
||||
thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
|
||||
thePs.setString(6, "http://foo");
|
||||
thePs.setString(7, "12345678");
|
||||
thePs.setLong(8, 1L);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
jdbcTemplate.execute(
|
||||
"insert into HFJ_SPIDX_DATE (SP_MISSING, SP_NAME, RES_ID, RES_TYPE, SP_UPDATED, SP_VALUE_HIGH, SP_VALUE_LOW, SP_ID) values (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
|
||||
@Override
|
||||
protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
|
||||
thePs.setBoolean(1, false);
|
||||
thePs.setString(2, "birthdate");
|
||||
thePs.setLong(3, 1L); // res-id
|
||||
thePs.setString(4, "Patient");
|
||||
thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
|
||||
thePs.setTimestamp(6, new Timestamp(1000000000L)); // value high
|
||||
thePs.setTimestamp(7, new Timestamp(1000000000L)); // value low
|
||||
thePs.setLong(8, 1L);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
return null;
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
private void executeSqlStatements(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theInitSql) throws
|
||||
IOException {
|
||||
String script = IOUtils.toString(HapiClearMigrationLockCommandTest.class.getResourceAsStream(theInitSql), Charsets.UTF_8);
|
||||
List<String> scriptStatements = new ArrayList<>(Arrays.asList(script.split("\n")));
|
||||
for (int i = 0; i < scriptStatements.size(); i++) {
|
||||
String nextStatement = scriptStatements.get(i);
|
||||
if (isBlank(nextStatement)) {
|
||||
scriptStatements.remove(i);
|
||||
i--;
|
||||
continue;
|
||||
}
|
||||
|
||||
nextStatement = nextStatement.trim();
|
||||
while (nextStatement.endsWith(";")) {
|
||||
nextStatement = nextStatement.substring(0, nextStatement.length() - 1);
|
||||
}
|
||||
scriptStatements.set(i, nextStatement);
|
||||
}
|
||||
|
||||
theConnectionProperties.getTxTemplate().execute(t -> {
|
||||
for (String next : scriptStatements) {
|
||||
theConnectionProperties.newJdbcTemplate().execute(next);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
package ca.uhn.fhir.cli;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
|
||||
public class HelpOptionTest extends ConsoleOutputCapturingBaseTest {
|
||||
@Test
|
||||
public void testHelpOption() {
|
||||
App.main(new String[]{"help", "create-package"});
|
||||
assertThat(outputStreamCaptor.toString().trim(), outputStreamCaptor.toString().trim(), containsString("Usage"));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 4500
|
||||
jira: SMILE-6001
|
||||
title: "Schedule bulk export job and binary was not working with relational databases. This has now been fixed with a reimplementation for batch 2."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 4508
|
||||
title: "Deleting CodeSystem resources by URL then expunging would fail to
|
||||
expunge and a foreign key error would be thrown. This has been fixed."
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 4516
|
||||
title: "A new command has been added to the HAPI-FHIR CLI called `clear-migration-lock`. This can be used to fix a database state which can occur if a migration is interrupted before completing."
|
|
@ -101,6 +101,21 @@ The `migrate-database` command may be used to Migrate a database schema when upg
|
|||
|
||||
See [Upgrading HAPI FHIR JPA](/docs/server_jpa/upgrading.html) for information on how to use this command.
|
||||
|
||||
# Clear Migration lock
|
||||
the `clear-migration-lock` command should be used if an upgrade to HAPI-FHIR failed during a migration. The migration system creates a lock row when it begins. If the migration is cancelled before it finishes, the system will be left in an inconsistent state. In order to resume the migration, the lock row must be removed. From your migration logs, you will see a line which looks like the following:
|
||||
|
||||
```text
|
||||
Migration Lock Row added. [uuid=05931c87-c2a4-49d6-8d82-d8ce09fdd8ef]
|
||||
```
|
||||
|
||||
In order to clear this migration lock, you can run:
|
||||
|
||||
```bash
|
||||
clear-migration-lock --lock-uuid 05931c87-c2a4-49d6-8d82-d8ce09fdd8ef
|
||||
```
|
||||
|
||||
|
||||
|
||||
# Reindex Terminology
|
||||
|
||||
The `reindex-terminology` command may be used to recreate freetext indexes for terminology resources.
|
||||
|
|
|
@ -136,6 +136,12 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
|
|||
return entity.getId();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.REQUIRES_NEW)
|
||||
public List<JobInstance> fetchInstances(String theJobDefinitionId, Set<StatusEnum> theStatuses, Date theCutoff, Pageable thePageable) {
|
||||
return toInstanceList(myJobInstanceRepository.findInstancesByJobIdAndStatusAndExpiry(theJobDefinitionId, theStatuses, theCutoff, thePageable));
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.REQUIRES_NEW)
|
||||
public List<JobInstance> fetchInstancesByJobDefinitionIdAndStatus(String theJobDefinitionId, Set<StatusEnum> theRequestedStatuses, int thePageSize, int thePageIndex) {
|
||||
|
|
|
@ -20,38 +20,47 @@ package ca.uhn.fhir.jpa.bulk.export.svc;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobPersistence;
|
||||
import ca.uhn.fhir.batch2.jobs.export.BulkExportAppCtx;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportBinaryFileId;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||
import ca.uhn.fhir.batch2.model.WorkChunk;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportCollectionEntity;
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
|
||||
import ca.uhn.fhir.jpa.model.sched.HapiJob;
|
||||
import ca.uhn.fhir.jpa.model.sched.IHasScheduledJobs;
|
||||
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
|
||||
import ca.uhn.fhir.util.JsonUtil;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBinary;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Binary;
|
||||
import org.quartz.JobExecutionContext;
|
||||
import org.slf4j.Logger;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.PostConstruct;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.ZoneId;
|
||||
import java.util.Date;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.slf4j.LoggerFactory.getLogger;
|
||||
|
@ -59,26 +68,24 @@ import static org.slf4j.LoggerFactory.getLogger;
|
|||
public class BulkDataExportJobSchedulingHelperImpl implements IBulkDataExportJobSchedulingHelper, IHasScheduledJobs {
|
||||
private static final Logger ourLog = getLogger(BulkDataExportJobSchedulingHelperImpl.class);
|
||||
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
private final DaoRegistry myDaoRegistry;
|
||||
|
||||
@Autowired
|
||||
private IBulkExportCollectionDao myBulkExportCollectionDao;
|
||||
|
||||
@Autowired
|
||||
private IBulkExportCollectionFileDao myBulkExportCollectionFileDao;
|
||||
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTxManager;
|
||||
private final PlatformTransactionManager myTxManager;
|
||||
private TransactionTemplate myTxTemplate;
|
||||
|
||||
@Autowired
|
||||
private IBulkExportJobDao myBulkExportJobDao;
|
||||
private final DaoConfig myDaoConfig;
|
||||
private final BulkExportHelperService myBulkExportHelperSvc;
|
||||
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
@Autowired
|
||||
private BulkExportHelperService myBulkExportHelperSvc;
|
||||
private final IJobPersistence myJpaJobPersistence;
|
||||
|
||||
public BulkDataExportJobSchedulingHelperImpl(DaoRegistry theDaoRegistry, PlatformTransactionManager theTxManager, DaoConfig theDaoConfig, BulkExportHelperService theBulkExportHelperSvc, IJobPersistence theJpaJobPersistence, TransactionTemplate theTxTemplate) {
|
||||
myDaoRegistry = theDaoRegistry;
|
||||
myTxManager = theTxManager;
|
||||
myDaoConfig = theDaoConfig;
|
||||
myBulkExportHelperSvc = theBulkExportHelperSvc;
|
||||
myJpaJobPersistence = theJpaJobPersistence;
|
||||
myTxTemplate = theTxTemplate;
|
||||
}
|
||||
|
||||
@PostConstruct
|
||||
public void start() {
|
||||
|
@ -97,15 +104,10 @@ public class BulkDataExportJobSchedulingHelperImpl implements IBulkDataExportJob
|
|||
@Override
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
public synchronized void cancelAndPurgeAllJobs() {
|
||||
myTxTemplate.execute(t -> {
|
||||
ourLog.info("Deleting all files");
|
||||
myBulkExportCollectionFileDao.deleteAllFiles();
|
||||
ourLog.info("Deleting all collections");
|
||||
myBulkExportCollectionDao.deleteAllFiles();
|
||||
ourLog.info("Deleting all jobs");
|
||||
myBulkExportJobDao.deleteAllFiles();
|
||||
return null;
|
||||
});
|
||||
// This is called by unit test code that also calls ExpungeEverythingService,
|
||||
// which explicitly deletes both Batch2WorkChunkEntity and Batch2JobInstanceEntity, as well as ResourceTable, in
|
||||
// which Binary's are stored
|
||||
// Long story short, this method no longer needs to do anything
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -116,51 +118,111 @@ public class BulkDataExportJobSchedulingHelperImpl implements IBulkDataExportJob
|
|||
@Override
|
||||
public void purgeExpiredFiles() {
|
||||
if (!myDaoConfig.isEnableTaskBulkExportJobExecution()) {
|
||||
ourLog.debug("bulk export disabled: doing nothing");
|
||||
return;
|
||||
}
|
||||
|
||||
Optional<BulkExportJobEntity> jobToDelete = myTxTemplate.execute(t -> {
|
||||
Pageable page = PageRequest.of(0, 1);
|
||||
Slice<BulkExportJobEntity> submittedJobs = myBulkExportJobDao.findNotRunningByExpiry(page, new Date());
|
||||
if (submittedJobs.isEmpty()) {
|
||||
return Optional.empty();
|
||||
}
|
||||
return Optional.of(submittedJobs.getContent().get(0));
|
||||
});
|
||||
final List<JobInstance> jobInstancesToDelete = myTxTemplate.execute(t ->
|
||||
myJpaJobPersistence.fetchInstances(Batch2JobDefinitionConstants.BULK_EXPORT,
|
||||
StatusEnum.getEndedStatuses(),
|
||||
computeCutoffFromConfig(),
|
||||
PageRequest.of(0, 50))
|
||||
);
|
||||
|
||||
if (jobToDelete.isPresent()) {
|
||||
ourLog.info("Deleting bulk export job: {}", jobToDelete.get());
|
||||
if (jobInstancesToDelete == null || jobInstancesToDelete.isEmpty()) {
|
||||
ourLog.debug("No batch 2 bulk export jobs found! Nothing to do!");
|
||||
ourLog.info("Finished bulk export job deletion with nothing to do");
|
||||
return;
|
||||
}
|
||||
|
||||
for (JobInstance jobInstance : jobInstancesToDelete) {
|
||||
ourLog.info("Deleting batch 2 bulk export job: {}", jobInstance);
|
||||
|
||||
myTxTemplate.execute(t -> {
|
||||
BulkExportJobEntity job = myBulkExportJobDao.getOne(jobToDelete.get().getId());
|
||||
for (BulkExportCollectionEntity nextCollection : job.getCollections()) {
|
||||
for (BulkExportCollectionFileEntity nextFile : nextCollection.getFiles()) {
|
||||
final Optional<JobInstance> optJobInstanceForInstanceId = myJpaJobPersistence.fetchInstance(jobInstance.getInstanceId());
|
||||
|
||||
ourLog.info("Purging bulk data file: {}", nextFile.getResourceId());
|
||||
IIdType id = myBulkExportHelperSvc.toId(nextFile.getResourceId());
|
||||
getBinaryDao().delete(id, new SystemRequestDetails());
|
||||
getBinaryDao().forceExpungeInExistingTransaction(id, new ExpungeOptions().setExpungeDeletedResources(true).setExpungeOldVersions(true), new SystemRequestDetails());
|
||||
myBulkExportCollectionFileDao.deleteByPid(nextFile.getId());
|
||||
|
||||
}
|
||||
|
||||
myBulkExportCollectionDao.deleteByPid(nextCollection.getId());
|
||||
if (optJobInstanceForInstanceId.isEmpty()) {
|
||||
ourLog.error("Can't find job instance for ID: {} despite having retrieved it in the first step", jobInstance.getInstanceId());
|
||||
return null;
|
||||
}
|
||||
|
||||
ourLog.debug("*** About to delete job with ID {}", job.getId());
|
||||
myBulkExportJobDao.deleteByPid(job.getId());
|
||||
final JobInstance jobInstanceForInstanceId = optJobInstanceForInstanceId.get();
|
||||
ourLog.info("Deleting bulk export job: {}", jobInstanceForInstanceId);
|
||||
|
||||
// We need to keep these for investigation but we also need a process to manually delete these jobs once we're done investigating
|
||||
if (StatusEnum.FAILED == jobInstanceForInstanceId.getStatus()) {
|
||||
ourLog.info("skipping because the status is FAILED for ID: {}" + jobInstanceForInstanceId.getInstanceId());
|
||||
return null;
|
||||
}
|
||||
|
||||
purgeBinariesIfNeeded(jobInstanceForInstanceId, jobInstanceForInstanceId.getReport());
|
||||
|
||||
final String batch2BulkExportJobInstanceId = jobInstanceForInstanceId.getInstanceId();
|
||||
ourLog.debug("*** About to delete batch 2 bulk export job with ID {}", batch2BulkExportJobInstanceId);
|
||||
|
||||
myJpaJobPersistence.deleteInstanceAndChunks(batch2BulkExportJobInstanceId);
|
||||
|
||||
ourLog.info("Finished deleting bulk export job: {}", jobInstance.getInstanceId());
|
||||
|
||||
return null;
|
||||
});
|
||||
|
||||
ourLog.info("Finished deleting bulk export job: {}", jobToDelete.get());
|
||||
ourLog.info("Finished deleting bulk export jobs");
|
||||
}
|
||||
}
|
||||
|
||||
private void purgeBinariesIfNeeded(JobInstance theJobInstanceForInstanceId, String theJobInstanceReportString) {
|
||||
final Optional<BulkExportJobResults> optBulkExportJobResults = getBulkExportJobResults(theJobInstanceReportString);
|
||||
|
||||
if (optBulkExportJobResults.isPresent()) {
|
||||
final BulkExportJobResults bulkExportJobResults = optBulkExportJobResults.get();
|
||||
ourLog.debug("job: {} resource type to binary ID: {}", theJobInstanceForInstanceId.getInstanceId(), bulkExportJobResults.getResourceTypeToBinaryIds());
|
||||
|
||||
final Map<String, List<String>> resourceTypeToBinaryIds = bulkExportJobResults.getResourceTypeToBinaryIds();
|
||||
for (String resourceType : resourceTypeToBinaryIds.keySet()) {
|
||||
final List<String> binaryIds = resourceTypeToBinaryIds.get(resourceType);
|
||||
for (String binaryId : binaryIds) {
|
||||
ourLog.info("Purging batch 2 bulk export binary: {}", binaryId);
|
||||
IIdType id = myBulkExportHelperSvc.toId(binaryId);
|
||||
getBinaryDao().delete(id, new SystemRequestDetails());
|
||||
}
|
||||
}
|
||||
} // else we can't know what the binary IDs are, so delete this job and move on
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private IFhirResourceDao<IBaseBinary> getBinaryDao() {
|
||||
return myDaoRegistry.getResourceDao("Binary");
|
||||
return myDaoRegistry.getResourceDao(Binary.class.getSimpleName());
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private Optional<BulkExportJobResults> getBulkExportJobResults(String theJobInstanceReportString) {
|
||||
if (StringUtils.isBlank(theJobInstanceReportString)) {
|
||||
ourLog.error(String.format("Cannot parse job report string because it's null or blank: %s", theJobInstanceReportString));
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
try {
|
||||
return Optional.of(JsonUtil.deserialize(theJobInstanceReportString, BulkExportJobResults.class));
|
||||
} catch (Exception theException) {
|
||||
ourLog.error(String.format("Cannot parse job report string: %s", theJobInstanceReportString), theException);
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private Date computeCutoffFromConfig() {
|
||||
final int bulkExportFileRetentionPeriodHours = myDaoConfig.getBulkExportFileRetentionPeriodHours();
|
||||
|
||||
final LocalDateTime cutoffLocalDateTime = LocalDateTime.now()
|
||||
.minusHours(bulkExportFileRetentionPeriodHours);
|
||||
|
||||
return Date.from(cutoffLocalDateTime
|
||||
.atZone(ZoneId.systemDefault())
|
||||
.toInstant());
|
||||
}
|
||||
|
||||
|
||||
public static class PurgeExpiredFilesJob implements HapiJob {
|
||||
@Autowired
|
||||
private IBulkDataExportJobSchedulingHelper myTarget;
|
||||
|
|
|
@ -1,41 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.bulk.export.svc;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao;
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
@Service
|
||||
public class BulkExportCollectionFileDaoSvc {
|
||||
|
||||
@Autowired
|
||||
private IBulkExportCollectionFileDao myBulkExportCollectionFileDao;
|
||||
|
||||
@Transactional
|
||||
public void save(BulkExportCollectionFileEntity theBulkExportCollectionEntity) {
|
||||
myBulkExportCollectionFileDao.saveAndFlush(theBulkExportCollectionEntity);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
package ca.uhn.fhir.jpa.config;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobPersistence;
|
||||
import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeJobSubmitterImpl;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
|
@ -16,6 +17,7 @@ import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider;
|
|||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
|
||||
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportJobSchedulingHelperImpl;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportHelperService;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.svc.BulkDataImportSvcImpl;
|
||||
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
|
||||
|
@ -161,6 +163,7 @@ import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
|
|||
import org.springframework.scheduling.TaskScheduler;
|
||||
import org.springframework.scheduling.concurrent.ConcurrentTaskScheduler;
|
||||
import org.springframework.scheduling.concurrent.ScheduledExecutorFactoryBean;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.io.IOException;
|
||||
|
@ -451,8 +454,8 @@ public class JpaConfig {
|
|||
}
|
||||
|
||||
@Bean
|
||||
public IBulkDataExportJobSchedulingHelper bulkDataExportJobSchedulingHelper() {
|
||||
return new BulkDataExportJobSchedulingHelperImpl();
|
||||
public IBulkDataExportJobSchedulingHelper bulkDataExportJobSchedulingHelper(DaoRegistry theDaoRegistry, PlatformTransactionManager theTxManager, DaoConfig theDaoConfig, BulkExportHelperService theBulkExportHelperSvc, IJobPersistence theJpaJobPersistence) {
|
||||
return new BulkDataExportJobSchedulingHelperImpl(theDaoRegistry, theTxManager, theDaoConfig, theBulkExportHelperSvc, theJpaJobPersistence, null);
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
|
|
@ -681,6 +681,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
// Perform delete
|
||||
|
||||
preDelete(resourceToDelete, entity, theRequest);
|
||||
|
||||
updateEntityForDelete(theRequest, transactionDetails, entity);
|
||||
resourceToDelete.setId(entity.getIdDt());
|
||||
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.springframework.data.jpa.repository.Modifying;
|
|||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
|
@ -60,6 +61,14 @@ public interface IBatch2JobInstanceRepository extends JpaRepository<Batch2JobIns
|
|||
Pageable thePageable
|
||||
);
|
||||
|
||||
@Query("SELECT b from Batch2JobInstanceEntity b WHERE b.myDefinitionId = :defId AND b.myStatus IN( :stats ) AND b.myEndTime < :cutoff")
|
||||
List<Batch2JobInstanceEntity> findInstancesByJobIdAndStatusAndExpiry(
|
||||
@Param("defId") String theDefinitionId,
|
||||
@Param("stats") Set<StatusEnum> theStatus,
|
||||
@Param("cutoff") Date theCutoff,
|
||||
Pageable thePageable
|
||||
);
|
||||
|
||||
@Query("SELECT e FROM Batch2JobInstanceEntity e WHERE e.myDefinitionId = :jobDefinitionId AND e.myStatus IN :statuses")
|
||||
List<Batch2JobInstanceEntity> fetchInstancesByJobDefinitionIdAndStatus(@Param("jobDefinitionId") String theJobDefinitionId, @Param("statuses") Set<StatusEnum> theIncompleteStatuses, Pageable thePageRequest);
|
||||
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.dao.data;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportCollectionEntity;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
@Deprecated
|
||||
public interface IBulkExportCollectionDao extends JpaRepository<BulkExportCollectionEntity, Long>, IHapiFhirJpaRepository {
|
||||
|
||||
@Modifying
|
||||
@Query("DELETE FROM BulkExportCollectionEntity t")
|
||||
void deleteAllFiles();
|
||||
|
||||
@Modifying
|
||||
@Query("DELETE FROM BulkExportCollectionEntity t WHERE t.myId = :pid")
|
||||
void deleteByPid(@Param("pid") Long theId);
|
||||
}
|
|
@ -1,39 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.dao.data;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
public interface IBulkExportCollectionFileDao extends JpaRepository<BulkExportCollectionFileEntity, Long>, IHapiFhirJpaRepository {
|
||||
|
||||
@Modifying
|
||||
@Query("DELETE FROM BulkExportCollectionFileEntity t")
|
||||
void deleteAllFiles();
|
||||
|
||||
@Modifying
|
||||
@Query("DELETE FROM BulkExportCollectionFileEntity t WHERE t.myId = :pid")
|
||||
void deleteByPid(@Param("pid") Long theId);
|
||||
|
||||
}
|
|
@ -34,25 +34,6 @@ import java.util.Optional;
|
|||
|
||||
public interface IBulkExportJobDao extends JpaRepository<BulkExportJobEntity, Long>, IHapiFhirJpaRepository {
|
||||
|
||||
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myJobId = :jobid")
|
||||
Optional<BulkExportJobEntity> findByJobId(@Param("jobid") String theUuid);
|
||||
|
||||
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myStatus = :status")
|
||||
Slice<BulkExportJobEntity> findByStatus(Pageable thePage, @Param("status") BulkExportJobStatusEnum theSubmitted);
|
||||
|
||||
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myExpiry < :cutoff")
|
||||
Slice<BulkExportJobEntity> findByExpiry(Pageable thePage, @Param("cutoff") Date theCutoff);
|
||||
|
||||
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myExpiry IS NOT NULL and j.myExpiry < :cutoff AND j.myStatus <> 'BUILDING'")
|
||||
Slice<BulkExportJobEntity> findNotRunningByExpiry(Pageable thePage, @Param("cutoff") Date theCutoff);
|
||||
|
||||
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myRequest = :request AND j.myCreated > :createdAfter AND j.myStatus <> :status ORDER BY j.myCreated DESC")
|
||||
Slice<BulkExportJobEntity> findExistingJob(Pageable thePage, @Param("request") String theRequest, @Param("createdAfter") Date theCreatedAfter, @Param("status") BulkExportJobStatusEnum theNotStatus);
|
||||
|
||||
@Modifying
|
||||
@Query("DELETE FROM BulkExportJobEntity t")
|
||||
void deleteAllFiles();
|
||||
|
||||
@Modifying
|
||||
@Query("DELETE FROM BulkExportJobEntity t WHERE t.myId = :pid")
|
||||
void deleteByPid(@Param("pid") Long theId);
|
||||
|
|
|
@ -0,0 +1,382 @@
|
|||
package ca.uhn.fhir.jpa.bulk.export.svc;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobPersistence;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
|
||||
import ca.uhn.fhir.util.JsonUtil;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBinary;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Binary;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Captor;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.mockito.stubbing.Answer;
|
||||
import org.mockito.stubbing.OngoingStubbing;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.util.Pair;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.support.TransactionCallback;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.ZoneId;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.anyString;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.never;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public class BulkDataExportJobSchedulingHelperImplTest {
|
||||
@Mock
|
||||
private DaoConfig myDaoConfig;
|
||||
|
||||
@Mock
|
||||
private PlatformTransactionManager myTxManager;
|
||||
|
||||
@Mock
|
||||
private TransactionTemplate myTxTemplate;
|
||||
|
||||
@Mock
|
||||
private IJobPersistence myJpaJobPersistence;
|
||||
|
||||
@Mock
|
||||
private BulkExportHelperService myBulkExportHelperSvc;
|
||||
|
||||
@Mock
|
||||
private DaoRegistry myDaoRegistry;
|
||||
|
||||
@Mock
|
||||
private IFhirResourceDao<IBaseBinary> myBinaryDao;
|
||||
|
||||
@Captor
|
||||
private ArgumentCaptor<Date> myCutoffCaptor;
|
||||
|
||||
private BulkDataExportJobSchedulingHelperImpl myBulkDataExportJobSchedulingHelper;
|
||||
private final FhirContext myFhirContext = FhirContext.forR4Cached();
|
||||
|
||||
@Test
|
||||
public void testPurgeExpiredFilesDisabledDoesNothing() {
|
||||
setupTestDisabled();
|
||||
|
||||
myBulkDataExportJobSchedulingHelper.purgeExpiredFiles();
|
||||
|
||||
verify(myJpaJobPersistence, never()).fetchInstance(anyString());
|
||||
verify(myBulkExportHelperSvc, never()).toId(anyString());
|
||||
verify(myJpaJobPersistence, never()).deleteInstanceAndChunks(anyString());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void purgeExpiredFilesNothingToDeleteOneHourRetention() {
|
||||
final int expectedRetentionHours = 1;
|
||||
|
||||
setupTestEnabled(expectedRetentionHours, List.of());
|
||||
|
||||
myBulkDataExportJobSchedulingHelper.purgeExpiredFiles();
|
||||
|
||||
|
||||
verify(myJpaJobPersistence, never()).fetchInstance(anyString());
|
||||
verify(myBulkExportHelperSvc, never()).toId(anyString());
|
||||
verify(myBinaryDao, never()).delete(any(IIdType.class), any(SystemRequestDetails.class));
|
||||
verify(myJpaJobPersistence, never()).deleteInstanceAndChunks(anyString());
|
||||
|
||||
final Date cutoffDate = myCutoffCaptor.getValue();
|
||||
assertEquals(DateUtils.truncate(computeDateFromConfig(expectedRetentionHours), Calendar.SECOND), DateUtils.truncate(cutoffDate, Calendar.SECOND));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void purgeExpiredFilesSingleJobSingleBinaryOneHourRetention_NULL_reportString() {
|
||||
final int expectedRetentionHours = 1;
|
||||
final int numBinariesPerJob = 1;
|
||||
final List<JobInstance> jobInstances = getJobInstances(numBinariesPerJob, StatusEnum.COMPLETED);
|
||||
|
||||
jobInstances.get(0).setReport(null);
|
||||
|
||||
setupTestEnabledNoBinaries(expectedRetentionHours, jobInstances);
|
||||
|
||||
myBulkDataExportJobSchedulingHelper.purgeExpiredFiles();
|
||||
|
||||
for (JobInstance jobInstance : jobInstances) {
|
||||
verify(myJpaJobPersistence).fetchInstance(jobInstance.getInstanceId());
|
||||
verify(myBulkExportHelperSvc, never()).toId(anyString());
|
||||
verify(myBinaryDao, never()).delete(any(IIdType.class), any(SystemRequestDetails.class));
|
||||
verify(myJpaJobPersistence).deleteInstanceAndChunks(jobInstance.getInstanceId());
|
||||
}
|
||||
|
||||
final Date cutoffDate = myCutoffCaptor.getValue();
|
||||
assertEquals(DateUtils.truncate(computeDateFromConfig(expectedRetentionHours), Calendar.MINUTE), DateUtils.truncate(cutoffDate, Calendar.MINUTE));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void purgeExpiredFilesSingleJobSingleBinaryOneHourRetention_BAD_reportString() {
|
||||
final int expectedRetentionHours = 1;
|
||||
final int numBinariesPerJob = 1;
|
||||
final List<JobInstance> jobInstances = getJobInstances(numBinariesPerJob, StatusEnum.COMPLETED);
|
||||
|
||||
jobInstances.get(0).setReport("{garbage}");
|
||||
|
||||
setupTestEnabledNoBinaries(expectedRetentionHours, jobInstances);
|
||||
|
||||
myBulkDataExportJobSchedulingHelper.purgeExpiredFiles();
|
||||
|
||||
for (JobInstance jobInstance : jobInstances) {
|
||||
verify(myJpaJobPersistence).fetchInstance(jobInstance.getInstanceId());
|
||||
verify(myBulkExportHelperSvc, never()).toId(anyString());
|
||||
verify(myBinaryDao, never()).delete(any(IIdType.class), any(SystemRequestDetails.class));
|
||||
verify(myJpaJobPersistence).deleteInstanceAndChunks(jobInstance.getInstanceId());
|
||||
}
|
||||
|
||||
final Date cutoffDate = myCutoffCaptor.getValue();
|
||||
assertEquals(DateUtils.truncate(computeDateFromConfig(expectedRetentionHours), Calendar.MINUTE), DateUtils.truncate(cutoffDate, Calendar.MINUTE));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void purgeExpiredFilesSingleJobSingleBinaryOneHourRetention() {
|
||||
final int expectedRetentionHours = 1;
|
||||
final int numBinariesPerJob = 1;
|
||||
final List<JobInstance> jobInstances = getJobInstances(numBinariesPerJob, StatusEnum.COMPLETED);
|
||||
|
||||
setupTestEnabled(expectedRetentionHours, jobInstances);
|
||||
|
||||
myBulkDataExportJobSchedulingHelper.purgeExpiredFiles();
|
||||
|
||||
for (JobInstance jobInstance : jobInstances) {
|
||||
verify(myJpaJobPersistence).fetchInstance(jobInstance.getInstanceId());
|
||||
for (int index = 0; index < numBinariesPerJob; index++) {
|
||||
verify(myBulkExportHelperSvc).toId(jobInstance.getInstanceId() + "-binary-" + index);
|
||||
verify(myBinaryDao).delete(eq(toId(jobInstance.getInstanceId() + "-binary-" + index)), any(SystemRequestDetails.class));
|
||||
}
|
||||
verify(myJpaJobPersistence).deleteInstanceAndChunks(jobInstance.getInstanceId());
|
||||
}
|
||||
|
||||
final Date cutoffDate = myCutoffCaptor.getValue();
|
||||
assertEquals(DateUtils.truncate(computeDateFromConfig(expectedRetentionHours), Calendar.MINUTE), DateUtils.truncate(cutoffDate, Calendar.MINUTE));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void purgeExpiredFilesSingleJobSingleBinaryOneHourRetentionStatusFailed() {
|
||||
final int expectedRetentionHours = 1;
|
||||
final int numBinariesPerJob = 1;
|
||||
final List<JobInstance> jobInstances = getJobInstances(numBinariesPerJob, StatusEnum.COMPLETED);
|
||||
|
||||
setupTestEnabled(expectedRetentionHours, jobInstances);
|
||||
|
||||
myBulkDataExportJobSchedulingHelper.purgeExpiredFiles();
|
||||
|
||||
for (JobInstance jobInstance : jobInstances) {
|
||||
verify(myJpaJobPersistence).fetchInstance(jobInstance.getInstanceId());
|
||||
for (int index = 0; index < numBinariesPerJob; index++) {
|
||||
verify(myBulkExportHelperSvc).toId(jobInstance.getInstanceId() + "-binary-" + index);
|
||||
verify(myBinaryDao).delete(eq(toId(jobInstance.getInstanceId() + "-binary-" + index)), any(SystemRequestDetails.class));
|
||||
}
|
||||
verify(myJpaJobPersistence).deleteInstanceAndChunks(jobInstance.getInstanceId());
|
||||
}
|
||||
|
||||
final Date cutoffDate = myCutoffCaptor.getValue();
|
||||
assertEquals(DateUtils.truncate(computeDateFromConfig(expectedRetentionHours), Calendar.MINUTE), DateUtils.truncate(cutoffDate, Calendar.MINUTE));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void purgeExpiredFilesSingleJobSingleBinaryTwoHourRetention() {
|
||||
final int expectedRetentionHours = 2;
|
||||
final int numBinariesPerJob = 1;
|
||||
final List<JobInstance> jobInstances = getJobInstances(numBinariesPerJob, StatusEnum.COMPLETED);
|
||||
|
||||
setupTestEnabled(expectedRetentionHours, jobInstances);
|
||||
|
||||
myBulkDataExportJobSchedulingHelper.purgeExpiredFiles();
|
||||
|
||||
for (JobInstance jobInstance : jobInstances) {
|
||||
verify(myJpaJobPersistence).fetchInstance(jobInstance.getInstanceId());
|
||||
for (int index = 0; index < numBinariesPerJob; index++) {
|
||||
verify(myBulkExportHelperSvc).toId(jobInstance.getInstanceId() + "-binary-" + index);
|
||||
verify(myBinaryDao).delete(eq(toId(jobInstance.getInstanceId() + "-binary-" + index)), any(SystemRequestDetails.class));
|
||||
}
|
||||
verify(myJpaJobPersistence).deleteInstanceAndChunks(jobInstance.getInstanceId());
|
||||
}
|
||||
|
||||
final Date cutoffDate = myCutoffCaptor.getValue();
|
||||
|
||||
assertEquals(DateUtils.truncate(computeDateFromConfig(expectedRetentionHours), Calendar.MINUTE), DateUtils.truncate(cutoffDate, Calendar.MINUTE));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void purgeExpiredFilesMultipleJobsMultipleBinariesTwoHourRetention() {
|
||||
final int expectedRetentionHours = 2;
|
||||
final int numBinariesPerJob = 3;
|
||||
final List<JobInstance> jobInstances = getJobInstances( numBinariesPerJob, StatusEnum.COMPLETED, StatusEnum.COMPLETED, StatusEnum.COMPLETED);
|
||||
|
||||
setupTestEnabled(expectedRetentionHours, jobInstances);
|
||||
|
||||
myBulkDataExportJobSchedulingHelper.purgeExpiredFiles();
|
||||
|
||||
for (JobInstance jobInstance : jobInstances) {
|
||||
verify(myJpaJobPersistence).fetchInstance(jobInstance.getInstanceId());
|
||||
for (int index = 0; index < numBinariesPerJob; index++) {
|
||||
verify(myBulkExportHelperSvc).toId(jobInstance.getInstanceId() + "-binary-" + index);
|
||||
verify(myBinaryDao).delete(eq(toId(jobInstance.getInstanceId() + "-binary-" + index)), any(SystemRequestDetails.class));
|
||||
}
|
||||
verify(myJpaJobPersistence).deleteInstanceAndChunks(jobInstance.getInstanceId());
|
||||
}
|
||||
|
||||
final Date cutoffDate = myCutoffCaptor.getValue();
|
||||
|
||||
assertEquals(DateUtils.truncate(computeDateFromConfig(expectedRetentionHours), Calendar.MINUTE), DateUtils.truncate(cutoffDate, Calendar.MINUTE));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void purgeExpiredFilesMultipleJobsMultipleBinariesTwoHourRetentionMixedStatuses() {
|
||||
final int expectedRetentionHours = 2;
|
||||
final int numBinariesPerJob = 3;
|
||||
final List<JobInstance> jobInstances = getJobInstances( numBinariesPerJob, StatusEnum.COMPLETED, StatusEnum.FAILED, StatusEnum.COMPLETED);
|
||||
|
||||
setupTestEnabled(expectedRetentionHours, jobInstances);
|
||||
|
||||
myBulkDataExportJobSchedulingHelper.purgeExpiredFiles();
|
||||
|
||||
for (JobInstance jobInstance : jobInstances) {
|
||||
verify(myJpaJobPersistence).fetchInstance(jobInstance.getInstanceId());
|
||||
if (StatusEnum.FAILED != jobInstance.getStatus()) {
|
||||
for (int index = 0; index < numBinariesPerJob; index++) {
|
||||
verify(myBulkExportHelperSvc).toId(jobInstance.getInstanceId() + "-binary-" + index);
|
||||
verify(myBinaryDao).delete(eq(toId(jobInstance.getInstanceId() + "-binary-" + index)), any(SystemRequestDetails.class));
|
||||
}
|
||||
|
||||
verify(myJpaJobPersistence).deleteInstanceAndChunks(jobInstance.getInstanceId());
|
||||
}
|
||||
}
|
||||
|
||||
final Date cutoffDate = myCutoffCaptor.getValue();
|
||||
|
||||
assertEquals(DateUtils.truncate(computeDateFromConfig(expectedRetentionHours), Calendar.MINUTE), DateUtils.truncate(cutoffDate, Calendar.MINUTE));
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private static List<JobInstance> getJobInstances(int theNumBinaries, StatusEnum... theStatusEnums) {
|
||||
return IntStream.range(0, theStatusEnums.length)
|
||||
.mapToObj(index -> Pair.of(index, theStatusEnums[index]))
|
||||
.map(pair -> {
|
||||
final JobInstance jobInstance = new JobInstance();
|
||||
final StatusEnum status = pair.getSecond();
|
||||
final String instanceId = status.name() + pair.getFirst();
|
||||
jobInstance.setInstanceId(instanceId);
|
||||
jobInstance.setReport(serialize(getBulkExportJobResults(instanceId, theNumBinaries)));
|
||||
jobInstance.setStatus(status);
|
||||
return jobInstance;
|
||||
}).toList();
|
||||
}
|
||||
|
||||
private static String serialize(BulkExportJobResults theBulkExportJobResults) {
|
||||
return JsonUtil.serialize(theBulkExportJobResults);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private static BulkExportJobResults getBulkExportJobResults(String theInstanceId, int theNumBinaries) {
|
||||
final BulkExportJobResults bulkExportJobResults = new BulkExportJobResults();
|
||||
bulkExportJobResults.setResourceTypeToBinaryIds(Map.of("Patient",
|
||||
IntStream.range(0, theNumBinaries)
|
||||
.mapToObj(theInt -> theInstanceId + "-binary-" + theInt)
|
||||
.toList()));
|
||||
return bulkExportJobResults;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private Date computeDateFromConfig(int theExpectedRetentionHours) {
|
||||
return Date.from(LocalDateTime.now()
|
||||
.minusHours(theExpectedRetentionHours)
|
||||
.atZone(ZoneId.systemDefault())
|
||||
.toInstant());
|
||||
}
|
||||
|
||||
private void setupTestDisabled() {
|
||||
setupTest(false, -1, List.of(), false);
|
||||
}
|
||||
|
||||
private void setupTestEnabled(int theRetentionHours, List<JobInstance> theJobInstances) {
|
||||
setupTest(true, theRetentionHours, theJobInstances, true);
|
||||
}
|
||||
|
||||
private void setupTestEnabledNoBinaries(int theRetentionHours, List<JobInstance> theJobInstances) {
|
||||
setupTest(true, theRetentionHours, theJobInstances, false);
|
||||
}
|
||||
|
||||
private void setupTest(boolean theIsEnabled, int theRetentionHours, List<JobInstance> theJobInstances, boolean theIsEnableBinaryMocks) {
|
||||
myBulkDataExportJobSchedulingHelper = new BulkDataExportJobSchedulingHelperImpl(myDaoRegistry, myTxManager, myDaoConfig, myBulkExportHelperSvc, myJpaJobPersistence, myTxTemplate);
|
||||
|
||||
when(myDaoConfig.isEnableTaskBulkExportJobExecution()).thenReturn(theIsEnabled);
|
||||
|
||||
if (!theIsEnabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
final Answer<List<JobInstance>> fetchInstancesAnswer = theInvocationOnMock -> {
|
||||
final TransactionCallback<List<JobInstance>> transactionCallback = theInvocationOnMock.getArgument(0);
|
||||
return transactionCallback.doInTransaction(null);
|
||||
};
|
||||
|
||||
final Answer<Void> purgeExpiredJobsAnswer = theInvocationOnMock -> {
|
||||
final TransactionCallback<Optional<JobInstance>> transactionCallback = theInvocationOnMock.getArgument(0);
|
||||
transactionCallback.doInTransaction(null);
|
||||
return null;
|
||||
};
|
||||
|
||||
when(myJpaJobPersistence.fetchInstances(eq(Batch2JobDefinitionConstants.BULK_EXPORT),
|
||||
eq(StatusEnum.getEndedStatuses()),
|
||||
myCutoffCaptor.capture(),
|
||||
any(PageRequest.class)))
|
||||
.thenReturn(theJobInstances);
|
||||
|
||||
when(myTxTemplate.execute(any()))
|
||||
.thenAnswer(fetchInstancesAnswer).thenAnswer(purgeExpiredJobsAnswer);
|
||||
|
||||
when(myDaoConfig.getBulkExportFileRetentionPeriodHours())
|
||||
.thenReturn(theRetentionHours);
|
||||
|
||||
if (theJobInstances.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
OngoingStubbing<Optional<JobInstance>> when = when(myJpaJobPersistence.fetchInstance(anyString()));
|
||||
|
||||
for (JobInstance jobInstance : theJobInstances) {
|
||||
when = when.thenReturn(Optional.of(jobInstance));
|
||||
}
|
||||
|
||||
if (!theIsEnableBinaryMocks) {
|
||||
return;
|
||||
}
|
||||
|
||||
when(myBulkExportHelperSvc.toId(anyString()))
|
||||
.thenAnswer(theInvocationOnMock -> toId(theInvocationOnMock.getArgument(0)));
|
||||
|
||||
when(myDaoRegistry.getResourceDao(Binary.class.getSimpleName())).thenReturn(myBinaryDao);
|
||||
}
|
||||
|
||||
private IIdType toId(String theResourceId) {
|
||||
final IIdType retVal = myFhirContext.getVersion().newIdType();
|
||||
retVal.setValue(theResourceId);
|
||||
return retVal;
|
||||
}
|
||||
}
|
|
@ -21,8 +21,11 @@ import org.junit.jupiter.params.ParameterizedTest;
|
|||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.ZoneId;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
|
@ -133,6 +136,86 @@ public class JpaJobPersistenceImplTest extends BaseJpaR4Test {
|
|||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchInstanceWithStatusAndCutoff_statues() {
|
||||
myCaptureQueriesListener.clear();
|
||||
|
||||
final String completedId = storeJobInstanceAndUpdateWithEndTime(StatusEnum.COMPLETED, 1);
|
||||
final String failedId = storeJobInstanceAndUpdateWithEndTime(StatusEnum.FAILED, 1);
|
||||
final String erroredId = storeJobInstanceAndUpdateWithEndTime(StatusEnum.ERRORED, 1);
|
||||
final String cancelledId = storeJobInstanceAndUpdateWithEndTime(StatusEnum.CANCELLED, 1);
|
||||
storeJobInstanceAndUpdateWithEndTime(StatusEnum.QUEUED, 1);
|
||||
storeJobInstanceAndUpdateWithEndTime(StatusEnum.IN_PROGRESS, 1);
|
||||
storeJobInstanceAndUpdateWithEndTime(StatusEnum.FINALIZE, 1);
|
||||
|
||||
final LocalDateTime cutoffLocalDateTime = LocalDateTime.now()
|
||||
.minusMinutes(0);
|
||||
final Date cutoffDate = Date.from(cutoffLocalDateTime
|
||||
.atZone(ZoneId.systemDefault())
|
||||
.toInstant());
|
||||
|
||||
final List<JobInstance> jobInstancesByCutoff =
|
||||
mySvc.fetchInstances(JOB_DEFINITION_ID, StatusEnum.getEndedStatuses(), cutoffDate, PageRequest.of(0, 100));
|
||||
|
||||
assertEquals(Set.of(completedId, failedId, erroredId, cancelledId),
|
||||
jobInstancesByCutoff.stream()
|
||||
.map(JobInstance::getInstanceId)
|
||||
.collect(Collectors.toUnmodifiableSet()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchInstanceWithStatusAndCutoff_cutoffs() {
|
||||
myCaptureQueriesListener.clear();
|
||||
|
||||
storeJobInstanceAndUpdateWithEndTime(StatusEnum.COMPLETED, 3);
|
||||
storeJobInstanceAndUpdateWithEndTime(StatusEnum.COMPLETED, 4);
|
||||
final String sevenMinutesAgoId = storeJobInstanceAndUpdateWithEndTime(StatusEnum.COMPLETED, 7);
|
||||
final String eightMinutesAgoId = storeJobInstanceAndUpdateWithEndTime(StatusEnum.COMPLETED, 8);
|
||||
|
||||
final LocalDateTime cutoffLocalDateTime = LocalDateTime.now()
|
||||
.minusMinutes(6);
|
||||
|
||||
final Date cutoffDate = Date.from(cutoffLocalDateTime
|
||||
.atZone(ZoneId.systemDefault())
|
||||
.toInstant());
|
||||
|
||||
final List<JobInstance> jobInstancesByCutoff =
|
||||
mySvc.fetchInstances(JOB_DEFINITION_ID, StatusEnum.getEndedStatuses(), cutoffDate, PageRequest.of(0, 100));
|
||||
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
myCaptureQueriesListener.getSelectQueries().forEach(query -> ourLog.info("query: {}", query.getSql(true, true)));
|
||||
|
||||
assertEquals(Set.of(sevenMinutesAgoId, eightMinutesAgoId),
|
||||
jobInstancesByCutoff.stream()
|
||||
.map(JobInstance::getInstanceId)
|
||||
.collect(Collectors.toUnmodifiableSet()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchInstanceWithStatusAndCutoff_pages() {
|
||||
final String job1 = storeJobInstanceAndUpdateWithEndTime(StatusEnum.COMPLETED, 5);
|
||||
final String job2 = storeJobInstanceAndUpdateWithEndTime(StatusEnum.COMPLETED, 5);
|
||||
storeJobInstanceAndUpdateWithEndTime(StatusEnum.COMPLETED, 5);
|
||||
storeJobInstanceAndUpdateWithEndTime(StatusEnum.COMPLETED, 5);
|
||||
storeJobInstanceAndUpdateWithEndTime(StatusEnum.COMPLETED, 5);
|
||||
storeJobInstanceAndUpdateWithEndTime(StatusEnum.COMPLETED, 5);
|
||||
|
||||
final LocalDateTime cutoffLocalDateTime = LocalDateTime.now()
|
||||
.minusMinutes(0);
|
||||
|
||||
final Date cutoffDate = Date.from(cutoffLocalDateTime
|
||||
.atZone(ZoneId.systemDefault())
|
||||
.toInstant());
|
||||
|
||||
final List<JobInstance> jobInstancesByCutoff =
|
||||
mySvc.fetchInstances(JOB_DEFINITION_ID, StatusEnum.getEndedStatuses(), cutoffDate, PageRequest.of(0, 2));
|
||||
|
||||
assertEquals(Set.of(job1, job2),
|
||||
jobInstancesByCutoff.stream()
|
||||
.map(JobInstance::getInstanceId)
|
||||
.collect(Collectors.toUnmodifiableSet()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a set of statuses, and whether they should be successfully picked up and started by a consumer.
|
||||
* @return
|
||||
|
@ -548,4 +631,29 @@ public class JpaJobPersistenceImplTest extends BaseJpaR4Test {
|
|||
return instance;
|
||||
}
|
||||
|
||||
|
||||
@Nonnull
|
||||
private String storeJobInstanceAndUpdateWithEndTime(StatusEnum theStatus, int minutes) {
|
||||
final JobInstance jobInstance = new JobInstance();
|
||||
|
||||
jobInstance.setJobDefinitionId(JOB_DEFINITION_ID);
|
||||
jobInstance.setStatus(theStatus);
|
||||
jobInstance.setJobDefinitionVersion(JOB_DEF_VER);
|
||||
jobInstance.setParameters(CHUNK_DATA);
|
||||
jobInstance.setReport("TEST");
|
||||
|
||||
final String id = mySvc.storeNewInstance(jobInstance);
|
||||
|
||||
jobInstance.setInstanceId(id);
|
||||
final LocalDateTime localDateTime = LocalDateTime.now()
|
||||
.minusMinutes(minutes);
|
||||
ourLog.info("localDateTime: {}", localDateTime);
|
||||
jobInstance.setEndTime(Date.from(localDateTime
|
||||
.atZone(ZoneId.systemDefault())
|
||||
.toInstant()));
|
||||
|
||||
mySvc.updateInstance(jobInstance);
|
||||
|
||||
return id;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,6 +9,9 @@ import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
|||
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ExpungeService;
|
||||
import ca.uhn.fhir.jpa.delete.ThreadSafeResourceDeleterSvc;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
|
||||
import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
|
@ -35,6 +38,7 @@ import org.apache.http.client.methods.HttpDelete;
|
|||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.BooleanType;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
import org.hl7.fhir.r4.model.DateType;
|
||||
import org.hl7.fhir.r4.model.DecimalType;
|
||||
|
@ -46,6 +50,7 @@ import org.hl7.fhir.r4.model.Patient;
|
|||
import org.hl7.fhir.r4.model.Quantity;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.hl7.fhir.r4.model.SearchParameter;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
@ -58,11 +63,13 @@ import java.nio.charset.StandardCharsets;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static ca.uhn.fhir.batch2.jobs.termcodesystem.TermCodeSystemJobConfig.TERM_CODE_SYSTEM_DELETE_JOB_NAME;
|
||||
import static org.awaitility.Awaitility.await;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
@ -77,6 +84,9 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
|||
private IIdType myOneVersionObservationId;
|
||||
private IIdType myTwoVersionObservationId;
|
||||
private IIdType myDeletedObservationId;
|
||||
private IIdType myOneVersionCodeSystemId;
|
||||
private IIdType myTwoVersionCodeSystemIdV1;
|
||||
private IIdType myTwoVersionCodeSystemIdV2;
|
||||
@Autowired
|
||||
private ISearchDao mySearchEntityDao;
|
||||
@Autowired
|
||||
|
@ -200,6 +210,40 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
|||
|
||||
}
|
||||
|
||||
public void createStandardCodeSystems() {
|
||||
CodeSystem codeSystem1 = new CodeSystem();
|
||||
codeSystem1.setUrl(URL_MY_CODE_SYSTEM);
|
||||
codeSystem1.setName("CS1-V1");
|
||||
codeSystem1.setVersion("1");
|
||||
codeSystem1.setContent(CodeSystem.CodeSystemContentMode.COMPLETE);
|
||||
codeSystem1
|
||||
.addConcept().setCode("C").setDisplay("Code C").addDesignation(
|
||||
new CodeSystem.ConceptDefinitionDesignationComponent().setLanguage("en").setValue("CodeCDesignation")).addProperty(
|
||||
new CodeSystem.ConceptPropertyComponent().setCode("CodeCProperty").setValue(new StringType("CodeCPropertyValue"))
|
||||
)
|
||||
.addConcept(new CodeSystem.ConceptDefinitionComponent().setCode("CA").setDisplay("Code CA")
|
||||
.addConcept(new CodeSystem.ConceptDefinitionComponent().setCode("CAA").setDisplay("Code CAA"))
|
||||
)
|
||||
.addConcept(new CodeSystem.ConceptDefinitionComponent().setCode("CB").setDisplay("Code CB"));
|
||||
codeSystem1
|
||||
.addConcept().setCode("D").setDisplay("Code D");
|
||||
myOneVersionCodeSystemId = myCodeSystemDao.create(codeSystem1).getId();
|
||||
|
||||
CodeSystem cs2v1 = new CodeSystem();
|
||||
cs2v1.setUrl(URL_MY_CODE_SYSTEM_2);
|
||||
cs2v1.setVersion("1");
|
||||
cs2v1.setName("CS2-V1");
|
||||
cs2v1.addConcept().setCode("E").setDisplay("Code E");
|
||||
myTwoVersionCodeSystemIdV1 = myCodeSystemDao.create(cs2v1).getId();
|
||||
|
||||
CodeSystem cs2v2 = new CodeSystem();
|
||||
cs2v2.setUrl(URL_MY_CODE_SYSTEM_2);
|
||||
cs2v2.setVersion("2");
|
||||
cs2v2.setName("CS2-V2");
|
||||
cs2v2.addConcept().setCode("F").setDisplay("Code F");
|
||||
myTwoVersionCodeSystemIdV2 = myCodeSystemDao.create(cs2v2).getId();
|
||||
}
|
||||
|
||||
private IFhirResourceDao<?> getDao(IIdType theId) {
|
||||
IFhirResourceDao<?> dao;
|
||||
switch (theId.getResourceType()) {
|
||||
|
@ -209,6 +253,9 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
|||
case "Observation":
|
||||
dao = myObservationDao;
|
||||
break;
|
||||
case "CodeSystem":
|
||||
dao = myCodeSystemDao;
|
||||
break;
|
||||
default:
|
||||
fail("Restype: " + theId.getResourceType());
|
||||
dao = myPatientDao;
|
||||
|
@ -809,6 +856,38 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
|||
assertTrue(actualRemainingPatientHistoryRecords <= maximumRemainingPatientHistoryRecords);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeleteCodeSystemByUrlThenExpunge() {
|
||||
createStandardCodeSystems();
|
||||
|
||||
myCodeSystemDao.deleteByUrl("CodeSystem?url=" + URL_MY_CODE_SYSTEM, null);
|
||||
myTerminologyDeferredStorageSvc.saveDeferred();
|
||||
myBatch2JobHelper.awaitAllJobsOfJobDefinitionIdToComplete(TERM_CODE_SYSTEM_DELETE_JOB_NAME);
|
||||
myCodeSystemDao.expunge(new ExpungeOptions()
|
||||
.setExpungeDeletedResources(true)
|
||||
.setExpungeOldVersions(true), null);
|
||||
|
||||
assertExpunged(myOneVersionCodeSystemId);
|
||||
assertStillThere(myTwoVersionCodeSystemIdV1);
|
||||
assertStillThere(myTwoVersionCodeSystemIdV2);
|
||||
runInTransaction(() -> {
|
||||
verifyOneVersionCodeSystemChildrenExpunged();
|
||||
verifyTwoVersionCodeSystemV1AndChildrenStillThere();
|
||||
verifyTwoVersionCodeSystemV2AndChildrenStillThere();
|
||||
});
|
||||
|
||||
myCodeSystemDao.deleteByUrl("CodeSystem?url=" + URL_MY_CODE_SYSTEM_2, null);
|
||||
myTerminologyDeferredStorageSvc.saveDeferred();
|
||||
myBatch2JobHelper.awaitAllJobsOfJobDefinitionIdToComplete(TERM_CODE_SYSTEM_DELETE_JOB_NAME);
|
||||
myCodeSystemDao.expunge(new ExpungeOptions()
|
||||
.setExpungeDeletedResources(true)
|
||||
.setExpungeOldVersions(true), null);
|
||||
|
||||
assertExpunged(myTwoVersionCodeSystemIdV1);
|
||||
assertExpunged(myTwoVersionCodeSystemIdV2);
|
||||
runInTransaction(this::verifyCodeSystemsAndChildrenExpunged);
|
||||
}
|
||||
|
||||
private List<Patient> createPatientsWithForcedIds(int theNumPatients) {
|
||||
RequestDetails requestDetails = new SystemRequestDetails();
|
||||
List<Patient> createdPatients = new ArrayList<>();
|
||||
|
@ -839,4 +918,60 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
|||
myPatientDao.delete(patient.getIdElement(), requestDetails);
|
||||
}
|
||||
}
|
||||
|
||||
private void verifyOneVersionCodeSystemChildrenExpunged() {
|
||||
List<TermCodeSystemVersion> myOneVersionCodeSystemVersions = myTermCodeSystemVersionDao.findByCodeSystemResourcePid(myOneVersionCodeSystemId.getIdPartAsLong());
|
||||
assertEquals(0, myOneVersionCodeSystemVersions.size());
|
||||
assertThat(myTermConceptDesignationDao.findAll(), empty());
|
||||
assertThat(myTermConceptPropertyDao.findAll(), empty());
|
||||
assertThat(myTermConceptParentChildLinkDao.findAll(), empty());
|
||||
List<TermConcept> existingCodeSystemConcepts = myTermConceptDao.findAll();
|
||||
for (TermConcept tc : existingCodeSystemConcepts) {
|
||||
if (tc.getCode().charAt(0) == 'C' || tc.getCode().charAt(0) == 'D') {
|
||||
fail();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void verifyTwoVersionCodeSystemV1AndChildrenStillThere() {
|
||||
TermCodeSystem myTwoVersionCodeSystem = myTermCodeSystemDao.findByResourcePid(myTwoVersionCodeSystemIdV2.getIdPartAsLong());
|
||||
TermCodeSystemVersion myTwoVersionCodeSystemVersion1 = verifyTermCodeSystemVersionExistsWithDisplayName("CS2-V1");
|
||||
assertNotEquals(myTwoVersionCodeSystem.getCurrentVersion().getPid(), myTwoVersionCodeSystemVersion1.getPid());
|
||||
List<TermConcept> myTwoVersionCodeSystemVersion1Concepts = new ArrayList(myTwoVersionCodeSystemVersion1.getConcepts());
|
||||
assertEquals(1, myTwoVersionCodeSystemVersion1Concepts.size());
|
||||
TermConcept conceptE = myTwoVersionCodeSystemVersion1Concepts.get(0);
|
||||
assertEquals("E", conceptE.getCode());
|
||||
}
|
||||
|
||||
private void verifyTwoVersionCodeSystemV2AndChildrenStillThere() {
|
||||
TermCodeSystem myTwoVersionCodeSystem = myTermCodeSystemDao.findByResourcePid(myTwoVersionCodeSystemIdV2.getIdPartAsLong());
|
||||
TermCodeSystemVersion myTwoVersionCodeSystemVersion2 = verifyTermCodeSystemVersionExistsWithDisplayName("CS2-V2");
|
||||
assertEquals(myTwoVersionCodeSystem.getCurrentVersion().getPid(), myTwoVersionCodeSystemVersion2.getPid());
|
||||
List<TermConcept> myTwoVersionCodeSystemVersion2Concepts = new ArrayList(myTwoVersionCodeSystemVersion2.getConcepts());
|
||||
assertEquals(1, myTwoVersionCodeSystemVersion2Concepts.size());
|
||||
TermConcept conceptF = myTwoVersionCodeSystemVersion2Concepts.get(0);
|
||||
assertEquals("F", conceptF.getCode());
|
||||
}
|
||||
|
||||
private TermCodeSystemVersion verifyTermCodeSystemVersionExistsWithDisplayName(String theDisplayName) {
|
||||
List<TermCodeSystemVersion> myCodeSystemVersions = myTermCodeSystemVersionDao.findAll();
|
||||
for (TermCodeSystemVersion csv : myCodeSystemVersions) {
|
||||
if (csv.getCodeSystemDisplayName().equals(theDisplayName)) {
|
||||
return csv;
|
||||
}
|
||||
}
|
||||
fail();
|
||||
return null;
|
||||
}
|
||||
|
||||
private void verifyCodeSystemsAndChildrenExpunged() {
|
||||
assertThat(myTermCodeSystemVersionDao.findAll(), empty());
|
||||
assertThat(myTermConceptDesignationDao.findAll(), empty());
|
||||
assertThat(myTermConceptPropertyDao.findAll(), empty());
|
||||
assertThat(myTermConceptParentChildLinkDao.findAll(), empty());
|
||||
assertThat(myTermConceptDao.findAll(), empty());
|
||||
assertThat(myResourceTableDao.findAll(), empty());
|
||||
assertThat(myResourceHistoryTableDao.findAll(), empty());
|
||||
assertThat(myForcedIdDao.findAll(), empty());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -60,6 +60,7 @@ import ca.uhn.fhir.jpa.dao.data.ISearchIncludeDao;
|
|||
import ca.uhn.fhir.jpa.dao.data.ISearchParamPresentDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITagDefinitionDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptDesignationDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptMapDao;
|
||||
|
@ -216,6 +217,7 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
|
|||
public static final String MY_VALUE_SET = "my-value-set";
|
||||
public static final String URL_MY_VALUE_SET = "http://example.com/my_value_set";
|
||||
public static final String URL_MY_CODE_SYSTEM = "http://example.com/my_code_system";
|
||||
public static final String URL_MY_CODE_SYSTEM_2 = "http://example.com/my_code_system_2";
|
||||
|
||||
@Autowired
|
||||
protected IPackageInstallerSvc myPackageInstallerSvc;
|
||||
|
@ -293,6 +295,8 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
|
|||
@Qualifier("myCodeSystemDaoR4")
|
||||
protected IFhirResourceDaoCodeSystem<CodeSystem> myCodeSystemDao;
|
||||
@Autowired
|
||||
protected ITermCodeSystemDao myTermCodeSystemDao;
|
||||
@Autowired
|
||||
protected ITermConceptParentChildLinkDao myTermConceptParentChildLinkDao;
|
||||
@Autowired
|
||||
@Qualifier("myCompartmentDefinitionDaoR4")
|
||||
|
|
|
@ -34,7 +34,7 @@ import static org.apache.commons.lang3.StringUtils.isBlank;
|
|||
* The approach used in this class is borrowed from org.flywaydb.community.database.ignite.thin.IgniteThinDatabase
|
||||
*/
|
||||
public class HapiMigrationLock implements AutoCloseable {
|
||||
static final Integer LOCK_PID = -100;
|
||||
public static final Integer LOCK_PID = -100;
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(HapiMigrationLock.class);
|
||||
public static final int SLEEP_MILLIS_BETWEEN_LOCK_RETRIES = 1000;
|
||||
public static final int DEFAULT_MAX_RETRY_ATTEMPTS = 50;
|
||||
|
@ -111,7 +111,11 @@ public class HapiMigrationLock implements AutoCloseable {
|
|||
|
||||
private boolean insertLockingRow() {
|
||||
try {
|
||||
return myMigrationStorageSvc.insertLockRecord(myLockDescription);
|
||||
boolean storedSuccessfully = myMigrationStorageSvc.insertLockRecord(myLockDescription);
|
||||
if (storedSuccessfully) {
|
||||
ourLog.info("Migration Lock Row added. [uuid={}]", myLockDescription);
|
||||
}
|
||||
return storedSuccessfully;
|
||||
} catch (Exception e) {
|
||||
ourLog.debug("Failed to insert lock record: {}", e.getMessage());
|
||||
return false;
|
||||
|
|
|
@ -31,7 +31,7 @@ import java.util.Set;
|
|||
|
||||
public class HapiMigrationStorageSvc {
|
||||
public static final String UNKNOWN_VERSION = "unknown";
|
||||
private static final String LOCK_TYPE = "hapi-fhir-lock";
|
||||
public static final String LOCK_TYPE = "hapi-fhir-lock";
|
||||
|
||||
private final HapiMigrationDao myHapiMigrationDao;
|
||||
|
||||
|
|
|
@ -100,6 +100,20 @@ public class HapiMigrator {
|
|||
return statementBuilder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to clear a lock with a given UUID.
|
||||
* @param theUUID the
|
||||
*/
|
||||
public void clearMigrationLockWithUUID(String theUUID) {
|
||||
ourLog.info("Attempting to remove lock entry. [uuid={}]", theUUID);
|
||||
boolean success = myHapiMigrationStorageSvc.deleteLockRecord(theUUID);
|
||||
if (success) {
|
||||
ourLog.info("Successfully removed lock entry. [uuid={}]", theUUID);
|
||||
} else {
|
||||
ourLog.error("Did not successfully remove lock entry. [uuid={}]", theUUID);
|
||||
}
|
||||
}
|
||||
|
||||
public MigrationResult migrate() {
|
||||
ourLog.info("Loaded {} migration tasks", myTaskList.size());
|
||||
MigrationResult retval = new MigrationResult();
|
||||
|
|
|
@ -31,7 +31,6 @@ import javax.persistence.Entity;
|
|||
import javax.persistence.GeneratedValue;
|
||||
import javax.persistence.GenerationType;
|
||||
import javax.persistence.Id;
|
||||
import org.hibernate.annotations.GenericGenerator;
|
||||
import java.util.Date;
|
||||
|
||||
// Note even though we are using javax.persistence annotations here, we are managing these records outside of jpa
|
||||
|
|
|
@ -36,6 +36,8 @@ import org.springframework.context.annotation.Scope;
|
|||
@Configuration
|
||||
public class BulkExportAppCtx {
|
||||
|
||||
public static final String WRITE_TO_BINARIES = "write-to-binaries";
|
||||
|
||||
@Bean
|
||||
public JobDefinition bulkExportJobDefinition() {
|
||||
JobDefinition.Builder<IModelJson, VoidModel> builder = JobDefinition.newBuilder();
|
||||
|
@ -63,7 +65,7 @@ public class BulkExportAppCtx {
|
|||
)
|
||||
// write binaries and save to db
|
||||
.addIntermediateStep(
|
||||
"write-to-binaries",
|
||||
WRITE_TO_BINARIES,
|
||||
"Writes the expanded resources to the binaries and saves",
|
||||
BulkExportBinaryFileId.class,
|
||||
writeBinaryStep()
|
||||
|
|
|
@ -21,6 +21,7 @@ import org.hl7.fhir.r4.model.InstantType;
|
|||
import org.hl7.fhir.r4.model.OperationOutcome;
|
||||
import org.hl7.fhir.r4.model.Parameters;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.hl7.fhir.r4.model.UriType;
|
||||
import org.hl7.fhir.r4.model.UrlType;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.MethodOrderer;
|
||||
|
@ -28,6 +29,8 @@ import org.junit.jupiter.api.Test;
|
|||
import org.junit.jupiter.api.TestMethodOrder;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Captor;
|
||||
import org.mockito.Mock;
|
||||
|
@ -72,11 +75,12 @@ public class BulkDataImportProviderTest {
|
|||
myProvider.setJobCoordinator(myJobCoordinator);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStart_Success() throws IOException {
|
||||
@ParameterizedTest
|
||||
@ValueSource(classes = {UrlType.class, UriType.class})
|
||||
public void testStart_Success(Class<?> type) throws IOException {
|
||||
// Setup
|
||||
|
||||
Parameters input = createRequest();
|
||||
Parameters input = createRequest(type);
|
||||
ourLog.debug("Input: {}", myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input));
|
||||
|
||||
String jobId = UUID.randomUUID().toString();
|
||||
|
@ -168,11 +172,15 @@ public class BulkDataImportProviderTest {
|
|||
|
||||
}
|
||||
|
||||
@Nonnull Parameters createRequest() {
|
||||
return createRequest(UriType.class);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private Parameters createRequest() {
|
||||
private Parameters createRequest(Class<?> type) {
|
||||
Parameters input = new Parameters();
|
||||
input.addParameter(BulkDataImportProvider.PARAM_INPUT_FORMAT, new CodeType(Constants.CT_FHIR_NDJSON));
|
||||
input.addParameter(BulkDataImportProvider.PARAM_INPUT_SOURCE, new UrlType("http://foo"));
|
||||
input.addParameter(BulkDataImportProvider.PARAM_INPUT_SOURCE, type == UrlType.class ? new UrlType("http://foo") : new UriType("http://foo"));
|
||||
input.addParameter()
|
||||
.setName(BulkDataImportProvider.PARAM_STORAGE_DETAIL)
|
||||
.addPart(new Parameters.ParametersParameterComponent().setName(BulkDataImportProvider.PARAM_STORAGE_DETAIL_TYPE).setValue(new CodeType(BulkDataImportProvider.PARAM_STORAGE_DETAIL_TYPE_VAL_HTTPS)))
|
||||
|
@ -181,11 +189,11 @@ public class BulkDataImportProviderTest {
|
|||
input.addParameter()
|
||||
.setName(BulkDataImportProvider.PARAM_INPUT)
|
||||
.addPart(new Parameters.ParametersParameterComponent().setName(BulkDataImportProvider.PARAM_INPUT_TYPE).setValue(new CodeType("Observation")))
|
||||
.addPart(new Parameters.ParametersParameterComponent().setName(BulkDataImportProvider.PARAM_INPUT_URL).setValue(new UrlType("http://example.com/Observation")));
|
||||
.addPart(new Parameters.ParametersParameterComponent().setName(BulkDataImportProvider.PARAM_INPUT_URL).setValue(type == UrlType.class ? new UrlType("http://example.com/Observation") : new UriType("http://example.com/Observation")));
|
||||
input.addParameter()
|
||||
.setName(BulkDataImportProvider.PARAM_INPUT)
|
||||
.addPart(new Parameters.ParametersParameterComponent().setName(BulkDataImportProvider.PARAM_INPUT_TYPE).setValue(new CodeType("Patient")))
|
||||
.addPart(new Parameters.ParametersParameterComponent().setName(BulkDataImportProvider.PARAM_INPUT_URL).setValue(new UrlType("http://example.com/Patient")));
|
||||
.addPart(new Parameters.ParametersParameterComponent().setName(BulkDataImportProvider.PARAM_INPUT_URL).setValue(type == UrlType.class ? new UrlType("http://example.com/Patient") : new UriType("http://example.com/Patient")));
|
||||
return input;
|
||||
}
|
||||
|
||||
|
|
|
@ -27,10 +27,13 @@ import ca.uhn.fhir.batch2.model.MarkWorkChunkAsErrorRequest;
|
|||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||
import ca.uhn.fhir.batch2.model.WorkChunk;
|
||||
import ca.uhn.fhir.batch2.models.JobInstanceFetchRequest;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
@ -74,6 +77,10 @@ public interface IJobPersistence {
|
|||
*/
|
||||
Optional<JobInstance> fetchInstance(String theInstanceId);
|
||||
|
||||
default List<JobInstance> fetchInstances(String theJobDefinitionId, Set<StatusEnum> theStatuses, Date theCutoff, Pageable thePageable) {
|
||||
throw new UnsupportedOperationException(Msg.code(2271) + "Unsupported operation in this implementation");
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches any existing jobs matching provided request parameters
|
||||
* @return
|
||||
|
|
|
@ -29,7 +29,9 @@ import ca.uhn.fhir.batch2.model.StatusEnum;
|
|||
import ca.uhn.fhir.batch2.model.WorkChunk;
|
||||
import ca.uhn.fhir.batch2.models.JobInstanceFetchRequest;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
@ -67,6 +69,11 @@ public class SynchronizedJobPersistenceWrapper implements IJobPersistence {
|
|||
return myWrap.fetchInstance(theInstanceId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<JobInstance> fetchInstances(String theJobDefinitionId, Set<StatusEnum> theStatuses, Date theCutoff, Pageable thePageable) {
|
||||
return myWrap.fetchInstances(theJobDefinitionId, theStatuses, theCutoff, thePageable);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized List<JobInstance> fetchInstances(FetchJobInstancesRequest theRequest, int theStart, int theBatchSize) {
|
||||
return myWrap.fetchInstances(theRequest, theStart, theBatchSize);
|
||||
|
|
|
@ -35,5 +35,6 @@ public interface IBulkDataExportJobSchedulingHelper {
|
|||
* Stops all invoked jobs, and then purges them.
|
||||
*/
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
@Deprecated
|
||||
void cancelAndPurgeAllJobs();
|
||||
}
|
||||
|
|
|
@ -367,6 +367,9 @@ public class JpaModelScannerAndVerifier {
|
|||
if (ourReservedWords.contains(theColumnName)) {
|
||||
throw new IllegalArgumentException(Msg.code(1631) + "Column name is a reserved word: " + theColumnName + " found on " + theElement);
|
||||
}
|
||||
if (theColumnName.startsWith("_")) {
|
||||
throw new IllegalArgumentException(Msg.code(2272) + "Column name "+ theColumnName +" starts with an '_' (underscore). This is not permitted for oracle field names. Found on " + theElement);
|
||||
}
|
||||
}
|
||||
|
||||
private static int calculateIndexLength(String[] theColumnNames, Map<String, Integer> theColumnNameToLength, String theIndexName) {
|
||||
|
|
6
pom.xml
6
pom.xml
|
@ -904,7 +904,7 @@
|
|||
<jaxb_runtime_version>3.0.0</jaxb_runtime_version>
|
||||
<jena_version>4.2.0</jena_version>
|
||||
<jersey_version>3.0.3</jersey_version>
|
||||
<jetty_version>10.0.12</jetty_version>
|
||||
<jetty_version>10.0.13</jetty_version>
|
||||
<jsr305_version>3.0.2</jsr305_version>
|
||||
<junit_version>5.9.1</junit_version>
|
||||
<flexmark_version>0.50.40</flexmark_version>
|
||||
|
@ -1334,7 +1334,7 @@
|
|||
<dependency>
|
||||
<groupId>org.springdoc</groupId>
|
||||
<artifactId>springdoc-openapi-ui</artifactId>
|
||||
<version>1.5.13</version>
|
||||
<version>1.6.14</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>net.sourceforge.htmlunit</groupId>
|
||||
|
@ -1870,7 +1870,7 @@
|
|||
<dependency>
|
||||
<groupId>org.postgresql</groupId>
|
||||
<artifactId>postgresql</artifactId>
|
||||
<version>42.5.0</version>
|
||||
<version>42.5.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.quartz-scheduler</groupId>
|
||||
|
|
Loading…
Reference in New Issue