Schema migrator returns status (#4937)
* Migrator should return status * Cleanup * Add changelog * Delete unused * Test fix
This commit is contained in:
parent
c98c4a4dca
commit
a04a49cb68
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: add
|
||||
issue: 4937
|
||||
title: "The SQL schema migrator now returns a status flag indicating whether the schema was initialized
|
||||
or not."
|
|
@ -1,63 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.searchparam;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
|
||||
import ca.uhn.fhir.jpa.searchparam.extractor.SearchParamExtractorDstu3;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.rest.server.util.ResourceSearchParams;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import org.hl7.fhir.dstu3.model.Patient;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class IndexStressTest {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(IndexStressTest.class);
|
||||
|
||||
@Test
|
||||
public void testExtractSearchParams() {
|
||||
Patient p = new Patient();
|
||||
p.addName().setFamily("FOO").addGiven("BAR").addGiven("BAR");
|
||||
p.getMaritalStatus().setText("DDDDD");
|
||||
p.addAddress().addLine("A").addLine("B").addLine("C");
|
||||
|
||||
FhirContext ctx = FhirContext.forDstu3();
|
||||
IValidationSupport mockValidationSupport = mock(IValidationSupport.class);
|
||||
when(mockValidationSupport.getFhirContext()).thenReturn(ctx);
|
||||
ISearchParamRegistry searchParamRegistry = mock(ISearchParamRegistry.class);
|
||||
SearchParamExtractorDstu3 extractor = new SearchParamExtractorDstu3(new StorageSettings(), new PartitionSettings(), ctx, searchParamRegistry);
|
||||
extractor.start();
|
||||
|
||||
ResourceSearchParams resourceSearchParams = new ResourceSearchParams("Patient");
|
||||
ctx.getResourceDefinition("Patient")
|
||||
.getSearchParams()
|
||||
.forEach(t -> resourceSearchParams.put(t.getName(), t));
|
||||
when(searchParamRegistry.getActiveSearchParams(eq("Patient"))).thenReturn(resourceSearchParams);
|
||||
|
||||
Set<ResourceIndexedSearchParamString> params = extractor.extractSearchParamStrings(p);
|
||||
|
||||
StopWatch sw = new StopWatch();
|
||||
int loops = 100;
|
||||
for (int i = 0; i < loops; i++) {
|
||||
params = extractor.extractSearchParamStrings(p);
|
||||
}
|
||||
|
||||
ourLog.info("Indexed {} times in {}ms/time", loops, sw.getMillisPerOperation(loops));
|
||||
|
||||
assertEquals(9, params.size());
|
||||
verify(mockValidationSupport, times(1)).fetchAllStructureDefinitions();
|
||||
}
|
||||
}
|
|
@ -9,7 +9,10 @@ import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
|
|||
import ca.uhn.fhir.jpa.migrate.tasks.HapiFhirJpaMigrationTasks;
|
||||
import ca.uhn.fhir.system.HapiSystemProperties;
|
||||
import ca.uhn.fhir.util.VersionEnum;
|
||||
import org.apache.commons.dbcp2.BasicDataSource;
|
||||
import org.h2.jdbcx.JdbcDataSource;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.ArgumentsSource;
|
||||
|
@ -17,11 +20,14 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.sql.DataSource;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Collections;
|
||||
import java.util.Properties;
|
||||
|
||||
import static ca.uhn.fhir.jpa.embedded.HapiEmbeddedDatabasesExtension.FIRST_TESTED_VERSION;
|
||||
import static ca.uhn.fhir.jpa.migrate.SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
|
||||
public class HapiSchemaMigrationTest {
|
||||
|
@ -61,10 +67,34 @@ public class HapiSchemaMigrationTest {
|
|||
int lastVersion = allVersions.length - 1;
|
||||
VersionEnum to = allVersions[lastVersion];
|
||||
|
||||
MigrationTaskList migrationTasks = new HapiFhirJpaMigrationTasks(Collections.EMPTY_SET).getTaskList(from, to);
|
||||
MigrationTaskList migrationTasks = new HapiFhirJpaMigrationTasks(Collections.emptySet()).getTaskList(from, to);
|
||||
SchemaMigrator schemaMigrator = new SchemaMigrator(TEST_SCHEMA_NAME, HAPI_FHIR_MIGRATION_TABLENAME, dataSource, new Properties(), migrationTasks, hapiMigrationStorageSvc);
|
||||
schemaMigrator.setDriverType(theDriverType);
|
||||
schemaMigrator.createMigrationTableIfRequired();
|
||||
schemaMigrator.migrate();
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testCreateMigrationTableIfRequired() throws SQLException {
|
||||
// Setup
|
||||
BasicDataSource dataSource = new BasicDataSource();
|
||||
dataSource.setUrl("jdbc:h2:mem:no-tables");
|
||||
dataSource.setUsername("SA");
|
||||
dataSource.setPassword("SA");
|
||||
dataSource.start();
|
||||
|
||||
MigrationTaskList migrationTasks = new HapiFhirJpaMigrationTasks(Collections.emptySet()).getTaskList(VersionEnum.V6_0_0, VersionEnum.V6_4_0);
|
||||
HapiMigrationDao hapiMigrationDao = new HapiMigrationDao(dataSource, DriverTypeEnum.H2_EMBEDDED, HAPI_FHIR_MIGRATION_TABLENAME);
|
||||
HapiMigrationStorageSvc hapiMigrationStorageSvc = new HapiMigrationStorageSvc(hapiMigrationDao);
|
||||
SchemaMigrator schemaMigrator = new SchemaMigrator(TEST_SCHEMA_NAME, HAPI_FHIR_MIGRATION_TABLENAME, dataSource, new Properties(), migrationTasks, hapiMigrationStorageSvc);
|
||||
schemaMigrator.setDriverType(DriverTypeEnum.H2_EMBEDDED);
|
||||
|
||||
// Test & Validate
|
||||
assertTrue(schemaMigrator.createMigrationTableIfRequired());
|
||||
assertFalse(schemaMigrator.createMigrationTableIfRequired());
|
||||
assertFalse(schemaMigrator.createMigrationTableIfRequired());
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -88,8 +88,8 @@ public class HapiMigrationStorageSvc {
|
|||
* Create the migration table if it does not already exist
|
||||
*/
|
||||
|
||||
public void createMigrationTableIfRequired() {
|
||||
myHapiMigrationDao.createMigrationTableIfRequired();
|
||||
public boolean createMigrationTableIfRequired() {
|
||||
return myHapiMigrationDao.createMigrationTableIfRequired();
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -111,7 +111,7 @@ public class SchemaMigrator {
|
|||
myCallbacks = theCallbacks;
|
||||
}
|
||||
|
||||
public void createMigrationTableIfRequired() {
|
||||
myHapiMigrationStorageSvc.createMigrationTableIfRequired();
|
||||
public boolean createMigrationTableIfRequired() {
|
||||
return myHapiMigrationStorageSvc.createMigrationTableIfRequired();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -104,9 +104,9 @@ public class HapiMigrationDao {
|
|||
return myJdbcTemplate.queryForObject(highestKeyQuery, Integer.class);
|
||||
}
|
||||
|
||||
public void createMigrationTableIfRequired() {
|
||||
public boolean createMigrationTableIfRequired() {
|
||||
if (migrationTableExists()) {
|
||||
return;
|
||||
return false;
|
||||
}
|
||||
ourLog.info("Creating table {}", myMigrationTablename);
|
||||
|
||||
|
@ -120,6 +120,8 @@ public class HapiMigrationDao {
|
|||
|
||||
HapiMigrationEntity entity = HapiMigrationEntity.tableCreatedRecord();
|
||||
myJdbcTemplate.update(myMigrationQueryBuilder.insertPreparedStatement(), entity.asPreparedStatementSetter());
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean migrationTableExists() {
|
||||
|
|
Loading…
Reference in New Issue