4516 create hapi fhir cli command to clear stale lock entries (#4517)

* Initial implementation

* better tests

* Add changelog and docs

* Forgotten files

* Code review comments

* Fix checkstyle
This commit is contained in:
Tadgh 2023-02-07 11:37:52 -08:00 committed by GitHub
parent d08995a5e6
commit ca21abf0f8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 502 additions and 35 deletions

View File

@ -186,6 +186,7 @@ public abstract class BaseApp {
commands.add(new ExportConceptMapToCsvCommand()); commands.add(new ExportConceptMapToCsvCommand());
commands.add(new ImportCsvToConceptMapCommand()); commands.add(new ImportCsvToConceptMapCommand());
commands.add(new HapiFlywayMigrateDatabaseCommand()); commands.add(new HapiFlywayMigrateDatabaseCommand());
commands.add(new HapiClearMigrationLockCommand());
commands.add(new CreatePackageCommand()); commands.add(new CreatePackageCommand());
commands.add(new BulkImportCommand()); commands.add(new BulkImportCommand());
commands.add(new ReindexTerminologyCommand()); commands.add(new ReindexTerminologyCommand());

View File

@ -0,0 +1,92 @@
package ca.uhn.fhir.cli;
/*-
* #%L
* HAPI FHIR - Command Line Client - API
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.HapiMigrator;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import java.util.Arrays;
import java.util.stream.Collectors;
/**
*
*/
public abstract class BaseClearMigrationLockCommand extends BaseCommand {
public static final String CLEAR_LOCK = "clear-migration-lock";
private String myMigrationTableName;
@Override
public String getCommandDescription() {
return "This command clears a database migration lock";
}
@Override
public String getCommandName() {
return CLEAR_LOCK;
}
@Override
public Options getOptions() {
Options retVal = new Options();
addRequiredOption(retVal, "u", "url", "URL", "The JDBC database URL");
addRequiredOption(retVal, "n", "username", "Username", "The JDBC database username");
addRequiredOption(retVal, "p", "password", "Password", "The JDBC database password");
addRequiredOption(retVal, "d", "driver", "Driver", "The database driver to use (Options are " + driverOptions() + ")");
addRequiredOption(retVal, "l", "lock-uuid", "Lock UUID", "The UUID value of the lock held in the database.");
return retVal;
}
private String driverOptions() {
return Arrays.stream(DriverTypeEnum.values()).map(Enum::name).collect(Collectors.joining(", "));
}
@Override
public void run(CommandLine theCommandLine) throws ParseException {
String url = theCommandLine.getOptionValue("u");
String username = theCommandLine.getOptionValue("n");
String password = theCommandLine.getOptionValue("p");
String lockUUID = theCommandLine.getOptionValue("l");
DriverTypeEnum driverType;
String driverTypeString = theCommandLine.getOptionValue("d");
try {
driverType = DriverTypeEnum.valueOf(driverTypeString);
} catch (Exception e) {
throw new ParseException(Msg.code(2774) + "Invalid driver type \"" + driverTypeString + "\". Valid values are: " + driverOptions());
}
DriverTypeEnum.ConnectionProperties connectionProperties = driverType.newConnectionProperties(url, username, password);
HapiMigrator migrator = new HapiMigrator(myMigrationTableName, connectionProperties.getDataSource(), driverType);
migrator.clearMigrationLockWithUUID(lockUUID);
}
protected void setMigrationTableName(String theMigrationTableName) {
myMigrationTableName = theMigrationTableName;
}
}

View File

@ -0,0 +1,13 @@
package ca.uhn.fhir.cli;
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.ParseException;
public class HapiClearMigrationLockCommand extends BaseClearMigrationLockCommand {
@Override
public void run(CommandLine theCommandLine) throws ParseException {
setMigrationTableName(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME);
super.run(theCommandLine);
}
}

View File

@ -1,31 +0,0 @@
package ca.uhn.fhir.cli;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.MatcherAssert.assertThat;
public class BaseAppTest {
private final PrintStream standardOut = System.out;
private final ByteArrayOutputStream outputStreamCaptor = new ByteArrayOutputStream();
@BeforeEach
public void setUp() {
System.setOut(new PrintStream(outputStreamCaptor));
}
@AfterEach
public void tearDown() {
System.setOut(standardOut);
}
@Test
public void testHelpOption() {
App.main(new String[]{"help", "create-package"});
assertThat(outputStreamCaptor.toString().trim(), outputStreamCaptor.toString().trim(), containsString("Usage"));
}
}

View File

@ -0,0 +1,32 @@
package ca.uhn.fhir.cli;
import org.apache.commons.io.output.TeeOutputStream;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
/**
* This class splits output stream to both STDOUT, and a capturing byte array output stream, which can later be inspected.
*/
public class ConsoleOutputCapturingBaseTest {
protected final ByteArrayOutputStream outputStreamCaptor = new ByteArrayOutputStream();
protected final TeeOutputStream myTeeOutputStream = new TeeOutputStream(System.out, outputStreamCaptor);
@BeforeEach
public void setUp() {
System.setOut(new PrintStream(myTeeOutputStream));
}
@AfterEach
public void tearDown() {
outputStreamCaptor.reset();
System.setOut(System.out);
}
protected String getConsoleOutput() {
return outputStreamCaptor.toString().trim();
}
}

View File

@ -0,0 +1,311 @@
package ca.uhn.fhir.cli;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
import ca.uhn.fhir.system.HapiSystemProperties;
import com.google.common.base.Charsets;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.support.AbstractLobCreatingPreparedStatementCallback;
import org.springframework.jdbc.support.lob.DefaultLobHandler;
import org.springframework.jdbc.support.lob.LobCreator;
import javax.annotation.Nonnull;
import java.io.File;
import java.io.IOException;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.sql.Types;
import java.util.*;
import static ca.uhn.fhir.jpa.migrate.HapiMigrationLock.LOCK_PID;
import static ca.uhn.fhir.jpa.migrate.HapiMigrationStorageSvc.LOCK_TYPE;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.*;
import static org.slf4j.LoggerFactory.getLogger;
public class HapiClearMigrationLockCommandTest extends ConsoleOutputCapturingBaseTest {
private static final Logger ourLog = getLogger(HapiClearMigrationLockCommandTest.class);
public static final String DB_DIRECTORY = "target/h2_test";
static {
HapiSystemProperties.enableTestMode();
}
@Test
public void testClearNonExistingLockIncorrectLock() throws IOException {
ConnectionData connectionData = createSchemaAndMigrate("test_migrate_clear_incorrect_lock");
HapiMigrationDao dao = new HapiMigrationDao(connectionData.connectionProperties.getDataSource(), DriverTypeEnum.H2_EMBEDDED, "FLY_HFJ_MIGRATION");
String correctLockUUID = UUID.randomUUID().toString();
String incorrectLockUUID = UUID.randomUUID().toString();
createAndSaveLockRow(correctLockUUID, dao);
String[] args = new String[]{
BaseClearMigrationLockCommand.CLEAR_LOCK,
"-d", "H2_EMBEDDED",
"-u", connectionData.url,
"-n", "",
"-p", "",
"-l", incorrectLockUUID
};
int beforeClearMigrationCount = dao.findAll().size();
try {
App.main(args);
fail();
} catch (CommandFailureException e) {
assertThat(e.getMessage(), containsString("HAPI-2152: Internal error: on unlocking, a competing lock was found"));
}
}
@Test
public void testClearNonExistingLockNoLocks() throws IOException {
ConnectionData connectionData = createSchemaAndMigrate("test_migrate_clear_nonexisting_lock");
HapiMigrationDao dao = new HapiMigrationDao(connectionData.connectionProperties.getDataSource(), DriverTypeEnum.H2_EMBEDDED, "FLY_HFJ_MIGRATION");
String lockUUID = UUID.randomUUID().toString();
String[] args = new String[]{
BaseClearMigrationLockCommand.CLEAR_LOCK,
"-d", "H2_EMBEDDED",
"-u", connectionData.url,
"-n", "",
"-p", "",
"-l", lockUUID
};
int beforeClearMigrationCount = dao.findAll().size();
App.main(args);
int afterClearMigrationCount = dao.findAll().size();
int removedRows = beforeClearMigrationCount - afterClearMigrationCount;
assertEquals(0, removedRows);
assertThat(getConsoleOutput(), containsString("Did not successfully remove lock entry. [uuid="+ lockUUID +"]"));
}
@Test
public void testMigrateAndClearExistingLock() throws IOException, SQLException {
ConnectionData connectionData = createSchemaAndMigrate("test_migrate_clear_existing_lock");
HapiMigrationDao dao = new HapiMigrationDao(connectionData.connectionProperties.getDataSource(), DriverTypeEnum.H2_EMBEDDED, "FLY_HFJ_MIGRATION");
String lockUUID = UUID.randomUUID().toString();
createAndSaveLockRow(lockUUID, dao);
String[] args = new String[]{
BaseClearMigrationLockCommand.CLEAR_LOCK,
"-d", "H2_EMBEDDED",
"-u", connectionData.url,
"-n", "",
"-p", "",
"-l", lockUUID
};
int beforeClearMigrationCount = dao.findAll().size();
App.main(args);
int afterClearMigrationCount = dao.findAll().size();
int removedRows = beforeClearMigrationCount - afterClearMigrationCount;
assertEquals(1, removedRows);
assertThat(getConsoleOutput(), containsString("Successfully removed lock entry. [uuid="+ lockUUID +"]"));
}
private record ConnectionData(DriverTypeEnum.ConnectionProperties connectionProperties, String url) {}
public ConnectionData createSchemaAndMigrate(String theDbName) throws IOException {
File location = getLocation(theDbName);
String url = "jdbc:h2:" + location.getAbsolutePath();
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "", "");
String initSql = "/persistence_create_h2_340.sql";
executeSqlStatements(connectionProperties, initSql);
seedDatabase340(connectionProperties);
ourLog.info("**********************************************");
ourLog.info("Done Setup, Starting Migration...");
ourLog.info("**********************************************");
String[] args = new String[]{
BaseFlywayMigrateDatabaseCommand.MIGRATE_DATABASE,
"-d", "H2_EMBEDDED",
"-u", url,
"-n", "",
"-p", "",
"-r"
};
App.main(args);
return new ConnectionData(connectionProperties, url);
}
private static void createAndSaveLockRow(String theLockUUID, HapiMigrationDao theDao) {
HapiMigrationEntity me = new HapiMigrationEntity();
me.setPid(LOCK_PID);
me.setChecksum(100);
me.setDescription(theLockUUID);
me.setSuccess(true);
me.setExecutionTime(20);
me.setInstalledBy("gary");
me.setInstalledOn(new Date());
me.setVersion("2023.1");
me.setType(LOCK_TYPE);
theDao.save(me);
}
@Nonnull
private File getLocation(String theDatabaseName) throws IOException {
File directory = new File(DB_DIRECTORY);
if (directory.exists()) {
FileUtils.deleteDirectory(directory);
}
return new File(DB_DIRECTORY + "/" + theDatabaseName);
}
private void seedDatabase340(DriverTypeEnum.ConnectionProperties theConnectionProperties) {
theConnectionProperties.getTxTemplate().execute(t -> {
JdbcTemplate jdbcTemplate = theConnectionProperties.newJdbcTemplate();
jdbcTemplate.execute(
"insert into HFJ_RESOURCE (RES_DELETED_AT, RES_VERSION, FORCED_ID_PID, HAS_TAGS, RES_PUBLISHED, RES_UPDATED, SP_HAS_LINKS, HASH_SHA256, SP_INDEX_STATUS, RES_LANGUAGE, SP_CMPSTR_UNIQ_PRESENT, SP_COORDS_PRESENT, SP_DATE_PRESENT, SP_NUMBER_PRESENT, SP_QUANTITY_PRESENT, SP_STRING_PRESENT, SP_TOKEN_PRESENT, SP_URI_PRESENT, RES_PROFILE, RES_TYPE, RES_VER, RES_ID) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
@Override
protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
thePs.setNull(1, Types.TIMESTAMP);
thePs.setString(2, "R4");
thePs.setNull(3, Types.BIGINT);
thePs.setBoolean(4, false);
thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
thePs.setTimestamp(6, new Timestamp(System.currentTimeMillis()));
thePs.setBoolean(7, false);
thePs.setNull(8, Types.VARCHAR);
thePs.setLong(9, 1L);
thePs.setNull(10, Types.VARCHAR);
thePs.setBoolean(11, false);
thePs.setBoolean(12, false);
thePs.setBoolean(13, false);
thePs.setBoolean(14, false);
thePs.setBoolean(15, false);
thePs.setBoolean(16, false);
thePs.setBoolean(17, false);
thePs.setBoolean(18, false);
thePs.setNull(19, Types.VARCHAR);
thePs.setString(20, "Patient");
thePs.setLong(21, 1L);
thePs.setLong(22, 1L);
}
}
);
jdbcTemplate.execute(
"insert into HFJ_RES_VER (RES_DELETED_AT, RES_VERSION, FORCED_ID_PID, HAS_TAGS, RES_PUBLISHED, RES_UPDATED, RES_ENCODING, RES_TEXT, RES_ID, RES_TYPE, RES_VER, PID) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
@Override
protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
thePs.setNull(1, Types.TIMESTAMP);
thePs.setString(2, "R4");
thePs.setNull(3, Types.BIGINT);
thePs.setBoolean(4, false);
thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
thePs.setTimestamp(6, new Timestamp(System.currentTimeMillis()));
thePs.setString(7, "JSON");
theLobCreator.setBlobAsBytes(thePs, 8, "{\"resourceType\":\"Patient\"}".getBytes(Charsets.US_ASCII));
thePs.setLong(9, 1L);
thePs.setString(10, "Patient");
thePs.setLong(11, 1L);
thePs.setLong(12, 1L);
}
}
);
jdbcTemplate.execute(
"insert into HFJ_SPIDX_STRING (SP_MISSING, SP_NAME, RES_ID, RES_TYPE, SP_UPDATED, SP_VALUE_EXACT, SP_VALUE_NORMALIZED, SP_ID) values (?, ?, ?, ?, ?, ?, ?, ?)",
new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
@Override
protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
thePs.setBoolean(1, false);
thePs.setString(2, "given");
thePs.setLong(3, 1L); // res-id
thePs.setString(4, "Patient");
thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
thePs.setString(6, "ROBERT");
thePs.setString(7, "Robert");
thePs.setLong(8, 1L);
}
}
);
jdbcTemplate.execute(
"insert into HFJ_SPIDX_TOKEN (SP_MISSING, SP_NAME, RES_ID, RES_TYPE, SP_UPDATED, SP_SYSTEM, SP_VALUE, SP_ID) values (?, ?, ?, ?, ?, ?, ?, ?)",
new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
@Override
protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
thePs.setBoolean(1, false);
thePs.setString(2, "identifier");
thePs.setLong(3, 1L); // res-id
thePs.setString(4, "Patient");
thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
thePs.setString(6, "http://foo");
thePs.setString(7, "12345678");
thePs.setLong(8, 1L);
}
}
);
jdbcTemplate.execute(
"insert into HFJ_SPIDX_DATE (SP_MISSING, SP_NAME, RES_ID, RES_TYPE, SP_UPDATED, SP_VALUE_HIGH, SP_VALUE_LOW, SP_ID) values (?, ?, ?, ?, ?, ?, ?, ?)",
new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
@Override
protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
thePs.setBoolean(1, false);
thePs.setString(2, "birthdate");
thePs.setLong(3, 1L); // res-id
thePs.setString(4, "Patient");
thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
thePs.setTimestamp(6, new Timestamp(1000000000L)); // value high
thePs.setTimestamp(7, new Timestamp(1000000000L)); // value low
thePs.setLong(8, 1L);
}
}
);
return null;
});
}
private void executeSqlStatements(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theInitSql) throws
IOException {
String script = IOUtils.toString(HapiClearMigrationLockCommandTest.class.getResourceAsStream(theInitSql), Charsets.UTF_8);
List<String> scriptStatements = new ArrayList<>(Arrays.asList(script.split("\n")));
for (int i = 0; i < scriptStatements.size(); i++) {
String nextStatement = scriptStatements.get(i);
if (isBlank(nextStatement)) {
scriptStatements.remove(i);
i--;
continue;
}
nextStatement = nextStatement.trim();
while (nextStatement.endsWith(";")) {
nextStatement = nextStatement.substring(0, nextStatement.length() - 1);
}
scriptStatements.set(i, nextStatement);
}
theConnectionProperties.getTxTemplate().execute(t -> {
for (String next : scriptStatements) {
theConnectionProperties.newJdbcTemplate().execute(next);
}
return null;
});
}
}

View File

@ -0,0 +1,13 @@
package ca.uhn.fhir.cli;
import org.junit.jupiter.api.Test;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.MatcherAssert.assertThat;
public class HelpOptionTest extends ConsoleOutputCapturingBaseTest {
@Test
public void testHelpOption() {
App.main(new String[]{"help", "create-package"});
assertThat(outputStreamCaptor.toString().trim(), outputStreamCaptor.toString().trim(), containsString("Usage"));
}
}

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 4516
title: "A new command has been added to the HAPI-FHIR CLI called `clear-migration-lock`. This can be used to fix a database state which can occur if a migration is interrupted before completing."

View File

@ -101,6 +101,21 @@ The `migrate-database` command may be used to Migrate a database schema when upg
See [Upgrading HAPI FHIR JPA](/docs/server_jpa/upgrading.html) for information on how to use this command. See [Upgrading HAPI FHIR JPA](/docs/server_jpa/upgrading.html) for information on how to use this command.
# Clear Migration lock
the `clear-migration-lock` command should be used if an upgrade to HAPI-FHIR failed during a migration. The migration system creates a lock row when it begins. If the migration is cancelled before it finishes, the system will be left in an inconsistent state. In order to resume the migration, the lock row must be removed. From your migration logs, you will see a line which looks like the following:
```text
Migration Lock Row added. [uuid=05931c87-c2a4-49d6-8d82-d8ce09fdd8ef]
```
In order to clear this migration lock, you can run:
```bash
clear-migration-lock --lock-uuid 05931c87-c2a4-49d6-8d82-d8ce09fdd8ef
```
# Reindex Terminology # Reindex Terminology
The `reindex-terminology` command may be used to recreate freetext indexes for terminology resources. The `reindex-terminology` command may be used to recreate freetext indexes for terminology resources.

View File

@ -34,7 +34,7 @@ import static org.apache.commons.lang3.StringUtils.isBlank;
* The approach used in this class is borrowed from org.flywaydb.community.database.ignite.thin.IgniteThinDatabase * The approach used in this class is borrowed from org.flywaydb.community.database.ignite.thin.IgniteThinDatabase
*/ */
public class HapiMigrationLock implements AutoCloseable { public class HapiMigrationLock implements AutoCloseable {
static final Integer LOCK_PID = -100; public static final Integer LOCK_PID = -100;
private static final Logger ourLog = LoggerFactory.getLogger(HapiMigrationLock.class); private static final Logger ourLog = LoggerFactory.getLogger(HapiMigrationLock.class);
public static final int SLEEP_MILLIS_BETWEEN_LOCK_RETRIES = 1000; public static final int SLEEP_MILLIS_BETWEEN_LOCK_RETRIES = 1000;
public static final int DEFAULT_MAX_RETRY_ATTEMPTS = 50; public static final int DEFAULT_MAX_RETRY_ATTEMPTS = 50;
@ -111,7 +111,11 @@ public class HapiMigrationLock implements AutoCloseable {
private boolean insertLockingRow() { private boolean insertLockingRow() {
try { try {
return myMigrationStorageSvc.insertLockRecord(myLockDescription); boolean storedSuccessfully = myMigrationStorageSvc.insertLockRecord(myLockDescription);
if (storedSuccessfully) {
ourLog.info("Migration Lock Row added. [uuid={}]", myLockDescription);
}
return storedSuccessfully;
} catch (Exception e) { } catch (Exception e) {
ourLog.debug("Failed to insert lock record: {}", e.getMessage()); ourLog.debug("Failed to insert lock record: {}", e.getMessage());
return false; return false;

View File

@ -31,7 +31,7 @@ import java.util.Set;
public class HapiMigrationStorageSvc { public class HapiMigrationStorageSvc {
public static final String UNKNOWN_VERSION = "unknown"; public static final String UNKNOWN_VERSION = "unknown";
private static final String LOCK_TYPE = "hapi-fhir-lock"; public static final String LOCK_TYPE = "hapi-fhir-lock";
private final HapiMigrationDao myHapiMigrationDao; private final HapiMigrationDao myHapiMigrationDao;

View File

@ -100,6 +100,20 @@ public class HapiMigrator {
return statementBuilder; return statementBuilder;
} }
/**
* Helper method to clear a lock with a given UUID.
* @param theUUID the
*/
public void clearMigrationLockWithUUID(String theUUID) {
ourLog.info("Attempting to remove lock entry. [uuid={}]", theUUID);
boolean success = myHapiMigrationStorageSvc.deleteLockRecord(theUUID);
if (success) {
ourLog.info("Successfully removed lock entry. [uuid={}]", theUUID);
} else {
ourLog.error("Did not successfully remove lock entry. [uuid={}]", theUUID);
}
}
public MigrationResult migrate() { public MigrationResult migrate() {
ourLog.info("Loaded {} migration tasks", myTaskList.size()); ourLog.info("Loaded {} migration tasks", myTaskList.size());
MigrationResult retval = new MigrationResult(); MigrationResult retval = new MigrationResult();

View File

@ -31,7 +31,6 @@ import javax.persistence.Entity;
import javax.persistence.GeneratedValue; import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType; import javax.persistence.GenerationType;
import javax.persistence.Id; import javax.persistence.Id;
import org.hibernate.annotations.GenericGenerator;
import java.util.Date; import java.util.Date;
// Note even though we are using javax.persistence annotations here, we are managing these records outside of jpa // Note even though we are using javax.persistence annotations here, we are managing these records outside of jpa