mirror of
https://github.com/hapifhir/hapi-fhir.git
synced 2025-03-25 01:18:37 +00:00
Merge branch 'jpa_migration'
This commit is contained in:
commit
261f2c73ab
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -10,7 +10,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -10,7 +10,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -10,7 +10,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../../pom.xml</relativePath>
|
||||
</parent>
|
||||
<artifactId>hapi-fhir-standalone-overlay-example</artifactId>
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -63,6 +63,10 @@
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-text</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-codec</groupId>
|
||||
<artifactId>commons-codec</artifactId>
|
||||
|
@ -178,7 +178,7 @@ class ModelScanner {
|
||||
}
|
||||
|
||||
/**
|
||||
* There are two implementations of all of the annotations (e.g. {@link Child} and {@link org.hl7.fhir.instance.model.annotations.Child}) since the HL7.org ones will eventually replace the HAPI
|
||||
* There are two implementations of all of the annotations (e.g. {@link Child} since the HL7.org ones will eventually replace the HAPI
|
||||
* ones. Annotations can't extend each other or implement interfaces or anything like that, so rather than duplicate all of the annotation processing code this method just creates an interface
|
||||
* Proxy to simulate the HAPI annotations if the HL7.org ones are found instead.
|
||||
*/
|
||||
@ -482,9 +482,8 @@ class ModelScanner {
|
||||
static Set<Class<? extends IBase>> scanVersionPropertyFile(Set<Class<? extends IBase>> theDatatypes, Map<String, Class<? extends IBaseResource>> theResourceTypes, FhirVersionEnum theVersion, Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> theExistingElementDefinitions) {
|
||||
Set<Class<? extends IBase>> retVal = new HashSet<Class<? extends IBase>>();
|
||||
|
||||
InputStream str = theVersion.getVersionImplementation().getFhirVersionPropertiesFile();
|
||||
Properties prop = new Properties();
|
||||
try {
|
||||
try (InputStream str = theVersion.getVersionImplementation().getFhirVersionPropertiesFile()) {
|
||||
Properties prop = new Properties();
|
||||
prop.load(str);
|
||||
for (Entry<Object, Object> nextEntry : prop.entrySet()) {
|
||||
String nextKey = nextEntry.getKey().toString();
|
||||
@ -542,8 +541,6 @@ class ModelScanner {
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new ConfigurationException("Failed to load model property file from classpath: " + "/ca/uhn/fhir/model/dstu/model.properties");
|
||||
} finally {
|
||||
IOUtils.closeQuietly(str);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
|
@ -0,0 +1,8 @@
|
||||
package ca.uhn.fhir.util;
|
||||
|
||||
public enum VersionEnum {
|
||||
|
||||
V3_4_0,
|
||||
V3_5_0
|
||||
|
||||
}
|
@ -23,8 +23,6 @@ package ca.uhn.fhir.util;
|
||||
import java.io.InputStream;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
|
||||
/**
|
||||
* Used internally by HAPI to log the version of the HAPI FHIR framework
|
||||
* once, when the framework is first loaded by the classloader.
|
||||
@ -43,17 +41,13 @@ public class VersionUtil {
|
||||
}
|
||||
|
||||
private static void initialize() {
|
||||
InputStream is = null;
|
||||
try {
|
||||
is = VersionUtil.class.getResourceAsStream("/ca/uhn/fhir/hapi-version.properties");
|
||||
try (InputStream is = VersionUtil.class.getResourceAsStream("/ca/uhn/fhir/hapi-version.properties")) {
|
||||
Properties p = new Properties();
|
||||
p.load(is);
|
||||
ourVersion = p.getProperty("version");
|
||||
ourLog.info("HAPI FHIR version is: " + ourVersion);
|
||||
} catch (Exception e) {
|
||||
ourLog.warn("Unable to determine HAPI version information", e);
|
||||
} finally {
|
||||
IOUtils.closeQuietly(is);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -212,7 +212,6 @@ public class SchemaBaseValidator implements IValidatorModule {
|
||||
|
||||
InputStream baseIs = FhirValidator.class.getResourceAsStream(pathToBase);
|
||||
if (baseIs == null) {
|
||||
IOUtils.closeQuietly(baseIs);
|
||||
throw new InternalErrorException("Schema file not found: " + pathToBase);
|
||||
}
|
||||
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.validation.schematron;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@ -30,12 +30,14 @@ import com.helger.commons.error.list.IErrorList;
|
||||
import com.helger.schematron.ISchematronResource;
|
||||
import com.helger.schematron.SchematronHelper;
|
||||
import com.helger.schematron.xslt.SchematronResourceSCH;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.oclc.purl.dsdl.svrl.SchematronOutputType;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.xml.transform.stream.StreamSource;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.StringReader;
|
||||
import java.util.HashMap;
|
||||
@ -49,9 +51,13 @@ import java.util.Map;
|
||||
*/
|
||||
public class SchematronBaseValidator implements IValidatorModule {
|
||||
|
||||
private Map<Class<? extends IBaseResource>, ISchematronResource> myClassToSchematron = new HashMap<Class<? extends IBaseResource>, ISchematronResource>();
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(SchematronBaseValidator.class);
|
||||
private final Map<Class<? extends IBaseResource>, ISchematronResource> myClassToSchematron = new HashMap<>();
|
||||
private FhirContext myCtx;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public SchematronBaseValidator(FhirContext theContext) {
|
||||
myCtx = theContext;
|
||||
}
|
||||
@ -66,7 +72,7 @@ public class SchematronBaseValidator implements IValidatorModule {
|
||||
validateResource(ValidationContext.subContext(theCtx, nextSubResource));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
ISchematronResource sch = getSchematron(theCtx);
|
||||
String resourceAsString;
|
||||
if (theCtx.getResourceAsStringEncoding() == EncodingEnum.XML) {
|
||||
@ -127,15 +133,14 @@ public class SchematronBaseValidator implements IValidatorModule {
|
||||
}
|
||||
|
||||
String pathToBase = myCtx.getVersion().getPathToSchemaDefinitions() + '/' + theCtx.getFhirContext().getResourceDefinition(theCtx.getResource()).getBaseDefinition().getName().toLowerCase()
|
||||
+ ".sch";
|
||||
InputStream baseIs = FhirValidator.class.getResourceAsStream(pathToBase);
|
||||
try {
|
||||
+ ".sch";
|
||||
try (InputStream baseIs = FhirValidator.class.getResourceAsStream(pathToBase)) {
|
||||
if (baseIs == null) {
|
||||
throw new InternalErrorException("Failed to load schematron for resource '" + theCtx.getFhirContext().getResourceDefinition(theCtx.getResource()).getBaseDefinition().getName() + "'. "
|
||||
+ SchemaBaseValidator.RESOURCES_JAR_NOTE);
|
||||
+ SchemaBaseValidator.RESOURCES_JAR_NOTE);
|
||||
}
|
||||
} finally {
|
||||
IOUtils.closeQuietly(baseIs);
|
||||
} catch (IOException e) {
|
||||
ourLog.error("Failed to close stream", e);
|
||||
}
|
||||
|
||||
retVal = SchematronResourceSCH.fromClassPath(pathToBase);
|
||||
@ -143,5 +148,4 @@ public class SchematronBaseValidator implements IValidatorModule {
|
||||
return retVal;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -0,0 +1,28 @@
|
||||
package ca.uhn.fhir.util;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.hasItem;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
public class VersionEnumTest {
|
||||
|
||||
@Test
|
||||
public void testCurrentVersionExists() {
|
||||
List<String> versions = Arrays.stream(VersionEnum.values())
|
||||
.map(Enum::name)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
String version = VersionUtil.getVersion();
|
||||
version = "V" + version.replace(".", "_");
|
||||
version = version.replace("-SNAPSHOT", "");
|
||||
|
||||
assertThat(versions, hasItem(version));
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -38,6 +38,11 @@
|
||||
<artifactId>hapi-fhir-igpacks</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-jpaserver-migrate</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
@ -83,6 +88,27 @@
|
||||
<artifactId>commons-cli</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.derby</groupId>
|
||||
<artifactId>derby</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mariadb.jdbc</groupId>
|
||||
<artifactId>mariadb-java-client</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>mysql</groupId>
|
||||
<artifactId>mysql-connector-java</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.postgresql</groupId>
|
||||
<artifactId>postgresql</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.microsoft.sqlserver</groupId>
|
||||
<artifactId>mssql-jdbc</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-core</artifactId>
|
||||
|
@ -27,7 +27,7 @@ import ch.qos.logback.core.joran.spi.JoranException;
|
||||
import com.helger.commons.io.file.FileHelper;
|
||||
import org.apache.commons.cli.*;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.text.WordUtils;
|
||||
import org.apache.commons.text.WordUtils;
|
||||
import org.fusesource.jansi.Ansi;
|
||||
import org.fusesource.jansi.AnsiConsole;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@ -77,11 +77,6 @@ public abstract class BaseApp {
|
||||
}
|
||||
|
||||
private void logCommandUsageNoHeader(BaseCommand theCommand) {
|
||||
System.out.println("Usage:");
|
||||
System.out.println(" " + provideCommandName() + " " + theCommand.getCommandName() + " [options]");
|
||||
System.out.println();
|
||||
System.out.println("Options:");
|
||||
|
||||
// This is passed in from the launch script
|
||||
String columnsString = System.getProperty("columns");
|
||||
int columns;
|
||||
@ -93,11 +88,34 @@ public abstract class BaseApp {
|
||||
columns = 80;
|
||||
}
|
||||
|
||||
// Usage
|
||||
System.out.println("Usage:");
|
||||
System.out.println(" " + provideCommandName() + " " + theCommand.getCommandName() + " [options]");
|
||||
System.out.println();
|
||||
|
||||
// Description
|
||||
String wrapped = WordUtils.wrap(theCommand.getCommandDescription(), columns);
|
||||
System.out.println(wrapped);
|
||||
System.out.println();
|
||||
|
||||
// Usage Notes
|
||||
List<String> usageNotes = theCommand.provideUsageNotes();
|
||||
for (String next : usageNotes) {
|
||||
wrapped = WordUtils.wrap(next, columns);
|
||||
System.out.println(wrapped);
|
||||
System.out.println();
|
||||
}
|
||||
|
||||
// Options
|
||||
System.out.println("Options:");
|
||||
HelpFormatter fmt = new HelpFormatter();
|
||||
PrintWriter pw = new PrintWriter(System.out);
|
||||
fmt.printOptions(pw, columns, theCommand.getOptions(), 2, 2);
|
||||
pw.flush();
|
||||
pw.close();
|
||||
|
||||
// That's it!
|
||||
System.out.println();
|
||||
}
|
||||
|
||||
private void logUsage() {
|
||||
@ -139,6 +157,7 @@ public abstract class BaseApp {
|
||||
commands.add(new IgPackUploader());
|
||||
commands.add(new ExportConceptMapToCsvCommand());
|
||||
commands.add(new ImportCsvToConceptMapCommand());
|
||||
commands.add(new HapiMigrateDatabaseCommand());
|
||||
return commands;
|
||||
}
|
||||
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.cli;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@ -36,7 +36,6 @@ import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.fusesource.jansi.Ansi;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@ -52,6 +51,7 @@ import static org.apache.commons.lang3.StringUtils.*;
|
||||
import static org.fusesource.jansi.Ansi.ansi;
|
||||
|
||||
public abstract class BaseCommand implements Comparable<BaseCommand> {
|
||||
public static final String PROMPT = "PROMPT";
|
||||
protected static final String BASE_URL_PARAM = "t";
|
||||
protected static final String BASE_URL_PARAM_LONGOPT = "target";
|
||||
protected static final String BASE_URL_PARAM_NAME = "target";
|
||||
@ -72,7 +72,6 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
|
||||
protected static final String VERBOSE_LOGGING_PARAM_DESC = "If specified, verbose logging will be used.";
|
||||
// TODO: Don't use qualified names for loggers in HAPI CLI.
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BaseCommand.class);
|
||||
public static final String PROMPT = "PROMPT";
|
||||
protected FhirContext myFhirCtx;
|
||||
|
||||
public BaseCommand() {
|
||||
@ -99,7 +98,7 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
|
||||
try {
|
||||
retVal = reader.readLine();
|
||||
} catch (IOException e) {
|
||||
throw new ParseException("Failed to read input from user: "+ e.toString());
|
||||
throw new ParseException("Failed to read input from user: " + e.toString());
|
||||
}
|
||||
|
||||
System.out.print(ansi().boldOff().fgDefault());
|
||||
@ -117,7 +116,6 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
|
||||
}
|
||||
|
||||
|
||||
|
||||
private void addOption(Options theOptions, OptionGroup theOptionGroup, boolean theRequired, String theOpt, String theLongOpt, boolean theHasArgument, String theArgumentName, String theDescription) {
|
||||
Option option = createOption(theRequired, theOpt, theLongOpt, theHasArgument, theDescription);
|
||||
if (theHasArgument && isNotBlank(theArgumentName)) {
|
||||
@ -268,9 +266,12 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
|
||||
return basicAuthHeaderValue;
|
||||
}
|
||||
|
||||
public <T extends Enum> T getAndParseOptionEnum(CommandLine theCommandLine, String theOption, Class<T> theEnumClass, T theDefault) throws ParseException {
|
||||
public <T extends Enum> T getAndParseOptionEnum(CommandLine theCommandLine, String theOption, Class<T> theEnumClass, boolean theRequired, T theDefault) throws ParseException {
|
||||
String val = theCommandLine.getOptionValue(theOption);
|
||||
if (isBlank(val)) {
|
||||
if (theRequired && theDefault == null) {
|
||||
throw new ParseException("Missing required option -" + theOption);
|
||||
}
|
||||
return theDefault;
|
||||
}
|
||||
try {
|
||||
@ -329,7 +330,7 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
|
||||
File suppliedFile = new File(FilenameUtils.normalize(theFilepath));
|
||||
|
||||
if (suppliedFile.isDirectory()) {
|
||||
inputFiles = FileUtils.listFiles(suppliedFile, new String[] {"zip"}, false);
|
||||
inputFiles = FileUtils.listFiles(suppliedFile, new String[]{"zip"}, false);
|
||||
} else {
|
||||
inputFiles = Collections.singletonList(suppliedFile);
|
||||
}
|
||||
@ -433,4 +434,8 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
|
||||
|
||||
|
||||
public abstract void run(CommandLine theCommandLine) throws ParseException, ExecutionException;
|
||||
|
||||
public List<String> provideUsageNotes() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,98 @@
|
||||
package ca.uhn.fhir.cli;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import ca.uhn.fhir.jpa.migrate.Migrator;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public abstract class BaseMigrateDatabaseCommand<T extends Enum> extends BaseCommand {
|
||||
|
||||
private static final String MIGRATE_DATABASE = "migrate-database";
|
||||
|
||||
@Override
|
||||
public String getCommandDescription() {
|
||||
return "This command migrates a HAPI FHIR JPA database from one version of HAPI FHIR to a newer version";
|
||||
}
|
||||
|
||||
protected abstract List<T> provideAllowedVersions();
|
||||
|
||||
protected abstract Class<T> provideVersionEnumType();
|
||||
|
||||
@Override
|
||||
public String getCommandName() {
|
||||
return MIGRATE_DATABASE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> provideUsageNotes() {
|
||||
String versions = "The following versions are supported: " +
|
||||
provideAllowedVersions().stream().map(Enum::name).collect(Collectors.joining(", "));
|
||||
return Collections.singletonList(versions);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options retVal = new Options();
|
||||
|
||||
addOptionalOption(retVal, "r", "dry-run", false, "Log the SQL statements that would be executed but to not actually make any changes");
|
||||
|
||||
addRequiredOption(retVal, "u", "url", "URL", "The JDBC database URL");
|
||||
addRequiredOption(retVal, "n", "username", "Username", "The JDBC database username");
|
||||
addRequiredOption(retVal, "p", "password", "Password", "The JDBC database password");
|
||||
addRequiredOption(retVal, "f", "from", "Version", "The database schema version to migrate FROM");
|
||||
addRequiredOption(retVal, "t", "to", "Version", "The database schema version to migrate TO");
|
||||
addRequiredOption(retVal, "d", "driver", "Driver", "The database driver to use (Options are " + driverOptions() + ")");
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private String driverOptions() {
|
||||
return Arrays.stream(DriverTypeEnum.values()).map(Enum::name).collect(Collectors.joining(", "));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run(CommandLine theCommandLine) throws ParseException {
|
||||
|
||||
String url = theCommandLine.getOptionValue("u");
|
||||
String username = theCommandLine.getOptionValue("n");
|
||||
String password = theCommandLine.getOptionValue("p");
|
||||
DriverTypeEnum driverType;
|
||||
String driverTypeString = theCommandLine.getOptionValue("d");
|
||||
try {
|
||||
driverType = DriverTypeEnum.valueOf(driverTypeString);
|
||||
} catch (Exception e) {
|
||||
throw new ParseException("Invalid driver type \"" + driverTypeString + "\". Valid values are: " + driverOptions());
|
||||
}
|
||||
|
||||
T from = getAndParseOptionEnum(theCommandLine, "f", provideVersionEnumType(), true, null);
|
||||
validateVersionSupported(from);
|
||||
T to = getAndParseOptionEnum(theCommandLine, "t", provideVersionEnumType(), true, null);
|
||||
validateVersionSupported(to);
|
||||
|
||||
boolean dryRun = theCommandLine.hasOption("r");
|
||||
|
||||
Migrator migrator = new Migrator();
|
||||
migrator.setConnectionUrl(url);
|
||||
migrator.setDriverType(driverType);
|
||||
migrator.setUsername(username);
|
||||
migrator.setPassword(password);
|
||||
migrator.setDryRun(dryRun);
|
||||
addTasks(migrator, from, to);
|
||||
|
||||
migrator.migrate();
|
||||
}
|
||||
|
||||
private void validateVersionSupported(T theFrom) throws ParseException {
|
||||
if (provideAllowedVersions().contains(theFrom) == false) {
|
||||
throw new ParseException("The version " + theFrom + " is not supported for migration");
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void addTasks(Migrator theMigrator, T theFrom, T theTo);
|
||||
}
|
@ -0,0 +1,28 @@
|
||||
package ca.uhn.fhir.cli;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.Migrator;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
|
||||
import ca.uhn.fhir.jpa.migrate.tasks.HapiFhirJpaMigrationTasks;
|
||||
import ca.uhn.fhir.util.VersionEnum;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
public class HapiMigrateDatabaseCommand extends BaseMigrateDatabaseCommand<VersionEnum> {
|
||||
|
||||
@Override
|
||||
protected List<VersionEnum> provideAllowedVersions() {
|
||||
return Arrays.asList(VersionEnum.values());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Class<VersionEnum> provideVersionEnumType() {
|
||||
return VersionEnum.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void addTasks(Migrator theMigrator, VersionEnum theFrom, VersionEnum theTo) {
|
||||
List<BaseTask<?>> tasks = new HapiFhirJpaMigrationTasks().getTasks(theFrom, theTo);
|
||||
tasks.forEach(theMigrator::addTask);
|
||||
}
|
||||
}
|
@ -26,6 +26,7 @@ import ca.uhn.fhir.jpa.demo.FhirServerConfig;
|
||||
import ca.uhn.fhir.jpa.demo.FhirServerConfigDstu3;
|
||||
import ca.uhn.fhir.jpa.demo.FhirServerConfigR4;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.OptionGroup;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
@ -71,6 +72,8 @@ public class RunServerCommand extends BaseCommand {
|
||||
options.addOption(null, OPTION_ALLOW_EXTERNAL_REFS, false, "If this flag is set, the server will allow resources to be persisted contaning external resource references");
|
||||
options.addOption(null, OPTION_DISABLE_REFERENTIAL_INTEGRITY, false, "If this flag is set, the server will not enforce referential integrity");
|
||||
|
||||
addOptionalOption(options, "u", "url", "Url", "If this option is set, specifies the JDBC URL to use for the database connection");
|
||||
|
||||
Long defaultReuseSearchResults = DaoConfig.DEFAULT_REUSE_CACHED_SEARCH_RESULTS_FOR_MILLIS;
|
||||
String defaultReuseSearchResultsStr = defaultReuseSearchResults == null ? "off" : String.valueOf(defaultReuseSearchResults);
|
||||
options.addOption(null, OPTION_REUSE_SEARCH_RESULTS_MILLIS, true, "The time in milliseconds within which the same results will be returned for multiple identical searches, or \"off\" (default is " + defaultReuseSearchResultsStr + ")");
|
||||
@ -106,6 +109,8 @@ public class RunServerCommand extends BaseCommand {
|
||||
ContextHolder.setDisableReferentialIntegrity(true);
|
||||
}
|
||||
|
||||
ContextHolder.setDatabaseUrl(theCommandLine.getOptionValue("u"));
|
||||
|
||||
String reuseSearchResults = theCommandLine.getOptionValue(OPTION_REUSE_SEARCH_RESULTS_MILLIS);
|
||||
if (reuseSearchResults != null) {
|
||||
if (reuseSearchResults.equals("off")) {
|
||||
|
@ -38,6 +38,12 @@
|
||||
<appender-ref ref="STDOUT" />
|
||||
</logger>
|
||||
|
||||
<!--
|
||||
Always log the migrator
|
||||
-->
|
||||
<logger name="ca.uhn.fhir.jpa.migrate" additivity="false" level="info">
|
||||
<appender-ref ref="STDOUT" />
|
||||
</logger>
|
||||
|
||||
<root level="warn">
|
||||
<appender-ref ref="STDOUT" />
|
||||
|
@ -0,0 +1,93 @@
|
||||
package ca.uhn.fhir.cli;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.junit.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
|
||||
public class HapiMigrateDatabaseCommandTest {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(HapiMigrateDatabaseCommandTest.class);
|
||||
|
||||
static {
|
||||
System.setProperty("test", "true");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMigrate() throws IOException {
|
||||
|
||||
File directory = new File("target/migrator_derby_test_340_350");
|
||||
if (directory.exists()) {
|
||||
FileUtils.deleteDirectory(directory);
|
||||
}
|
||||
|
||||
String url = "jdbc:derby:directory:target/migrator_derby_test_340_350;create=true";
|
||||
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newConnectionProperties(url, "", "");
|
||||
|
||||
String script = IOUtils.toString(HapiMigrateDatabaseCommandTest.class.getResourceAsStream("/persistence_create_derby107_340.sql"), Charsets.UTF_8);
|
||||
List<String> scriptStatements = new ArrayList<>(Arrays.asList(script.split("\n")));
|
||||
for (int i = 0; i < scriptStatements.size(); i++) {
|
||||
String nextStatement = scriptStatements.get(i);
|
||||
if (isBlank(nextStatement)) {
|
||||
scriptStatements.remove(i);
|
||||
i--;
|
||||
continue;
|
||||
}
|
||||
|
||||
nextStatement = nextStatement.trim();
|
||||
while (nextStatement.endsWith(";")) {
|
||||
nextStatement = nextStatement.substring(0, nextStatement.length() - 1);
|
||||
}
|
||||
scriptStatements.set(i, nextStatement);
|
||||
}
|
||||
|
||||
connectionProperties.getTxTemplate().execute(t -> {
|
||||
for (String next : scriptStatements) {
|
||||
connectionProperties.newJdbcTemplate().execute(next);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
ourLog.info("**********************************************");
|
||||
ourLog.info("Done Setup, Starting Dry Run...");
|
||||
ourLog.info("**********************************************");
|
||||
|
||||
String[] args = new String[]{
|
||||
"migrate-database",
|
||||
"-d", "DERBY_EMBEDDED",
|
||||
"-u", url,
|
||||
"-n", "",
|
||||
"-p", "",
|
||||
"-r",
|
||||
"-f", "V3_4_0",
|
||||
"-t", "V3_5_0"
|
||||
};
|
||||
App.main(args);
|
||||
|
||||
ourLog.info("**********************************************");
|
||||
ourLog.info("Done Setup, Starting Migration...");
|
||||
ourLog.info("**********************************************");
|
||||
|
||||
args = new String[]{
|
||||
"migrate-database",
|
||||
"-d", "DERBY_EMBEDDED",
|
||||
"-u", url,
|
||||
"-n", "",
|
||||
"-p", "",
|
||||
"-f", "V3_4_0",
|
||||
"-t", "V3_5_0"
|
||||
};
|
||||
App.main(args);
|
||||
}
|
||||
}
|
@ -0,0 +1,156 @@
|
||||
create sequence SEQ_CNCPT_MAP_GRP_ELM_TGT_PID start with 1 increment by 50;
|
||||
create sequence SEQ_CODESYSTEM_PID start with 1 increment by 50;
|
||||
create sequence SEQ_CODESYSTEMVER_PID start with 1 increment by 50;
|
||||
create sequence SEQ_CONCEPT_DESIG_PID start with 1 increment by 50;
|
||||
create sequence SEQ_CONCEPT_MAP_GROUP_PID start with 1 increment by 50;
|
||||
create sequence SEQ_CONCEPT_MAP_GRP_ELM_PID start with 1 increment by 50;
|
||||
create sequence SEQ_CONCEPT_MAP_PID start with 1 increment by 50;
|
||||
create sequence SEQ_CONCEPT_PC_PID start with 1 increment by 50;
|
||||
create sequence SEQ_CONCEPT_PID start with 1 increment by 50;
|
||||
create sequence SEQ_CONCEPT_PROP_PID start with 1 increment by 50;
|
||||
create sequence SEQ_FORCEDID_ID start with 1 increment by 50;
|
||||
create sequence SEQ_HISTORYTAG_ID start with 1 increment by 50;
|
||||
create sequence SEQ_IDXCMPSTRUNIQ_ID start with 1 increment by 50;
|
||||
create sequence SEQ_RESLINK_ID start with 1 increment by 50;
|
||||
create sequence SEQ_RESOURCE_HISTORY_ID start with 1 increment by 50;
|
||||
create sequence SEQ_RESOURCE_ID start with 1 increment by 50;
|
||||
create sequence SEQ_RESPARMPRESENT_ID start with 1 increment by 50;
|
||||
create sequence SEQ_RESTAG_ID start with 1 increment by 50;
|
||||
create sequence SEQ_SEARCH start with 1 increment by 50;
|
||||
create sequence SEQ_SEARCH_INC start with 1 increment by 50;
|
||||
create sequence SEQ_SEARCH_RES start with 1 increment by 50;
|
||||
create sequence SEQ_SEARCHPARM_ID start with 1 increment by 50;
|
||||
create sequence SEQ_SPIDX_COORDS start with 1 increment by 50;
|
||||
create sequence SEQ_SPIDX_DATE start with 1 increment by 50;
|
||||
create sequence SEQ_SPIDX_NUMBER start with 1 increment by 50;
|
||||
create sequence SEQ_SPIDX_QUANTITY start with 1 increment by 50;
|
||||
create sequence SEQ_SPIDX_STRING start with 1 increment by 50;
|
||||
create sequence SEQ_SPIDX_TOKEN start with 1 increment by 50;
|
||||
create sequence SEQ_SPIDX_URI start with 1 increment by 50;
|
||||
create sequence SEQ_SUBSCRIPTION_ID start with 1 increment by 50;
|
||||
create sequence SEQ_TAGDEF_ID start with 1 increment by 50;
|
||||
create table HFJ_FORCED_ID (PID bigint not null, FORCED_ID varchar(100) not null, RESOURCE_PID bigint not null, RESOURCE_TYPE varchar(100) default '', primary key (PID));
|
||||
create table HFJ_HISTORY_TAG (PID bigint not null, TAG_ID bigint, RES_ID bigint not null, RES_TYPE varchar(30) not null, RES_VER_PID bigint not null, primary key (PID));
|
||||
create table HFJ_IDX_CMP_STRING_UNIQ (PID bigint not null, IDX_STRING varchar(150) not null, RES_ID bigint, primary key (PID));
|
||||
create table HFJ_RES_LINK (PID bigint not null, SRC_PATH varchar(100) not null, SRC_RESOURCE_ID bigint not null, SOURCE_RESOURCE_TYPE varchar(30) default '' not null, TARGET_RESOURCE_ID bigint, TARGET_RESOURCE_TYPE varchar(30) default '' not null, TARGET_RESOURCE_URL varchar(200), SP_UPDATED timestamp, primary key (PID));
|
||||
create table HFJ_RES_PARAM_PRESENT (PID bigint not null, SP_PRESENT boolean not null, RES_ID bigint not null, SP_ID bigint not null, primary key (PID));
|
||||
create table HFJ_RES_TAG (PID bigint not null, TAG_ID bigint, RES_ID bigint, RES_TYPE varchar(30) not null, primary key (PID));
|
||||
create table HFJ_RES_VER (PID bigint not null, RES_DELETED_AT timestamp, RES_VERSION varchar(7), HAS_TAGS boolean not null, RES_PUBLISHED timestamp not null, RES_UPDATED timestamp not null, RES_ENCODING varchar(5) not null, RES_TEXT blob, RES_ID bigint, RES_TYPE varchar(30) not null, RES_VER bigint not null, FORCED_ID_PID bigint, primary key (PID));
|
||||
create table HFJ_RESOURCE (RES_ID bigint not null, RES_DELETED_AT timestamp, RES_VERSION varchar(7), HAS_TAGS boolean not null, RES_PUBLISHED timestamp not null, RES_UPDATED timestamp not null, SP_HAS_LINKS boolean, HASH_SHA256 varchar(64), SP_INDEX_STATUS bigint, RES_LANGUAGE varchar(20), SP_CMPSTR_UNIQ_PRESENT boolean, SP_COORDS_PRESENT boolean, SP_DATE_PRESENT boolean, SP_NUMBER_PRESENT boolean, SP_QUANTITY_PRESENT boolean, SP_STRING_PRESENT boolean, SP_TOKEN_PRESENT boolean, SP_URI_PRESENT boolean, RES_PROFILE varchar(200), RES_TYPE varchar(30), RES_VER bigint, FORCED_ID_PID bigint, primary key (RES_ID));
|
||||
create table HFJ_SEARCH (PID bigint not null, CREATED timestamp not null, FAILURE_CODE integer, FAILURE_MESSAGE varchar(500), LAST_UPDATED_HIGH timestamp, LAST_UPDATED_LOW timestamp, NUM_FOUND integer not null, PREFERRED_PAGE_SIZE integer, RESOURCE_ID bigint, RESOURCE_TYPE varchar(200), SEARCH_LAST_RETURNED timestamp, SEARCH_QUERY_STRING clob(10000), SEARCH_QUERY_STRING_HASH integer, SEARCH_TYPE integer not null, SEARCH_STATUS varchar(10) not null, TOTAL_COUNT integer, SEARCH_UUID varchar(40) not null, primary key (PID));
|
||||
create table HFJ_SEARCH_INCLUDE (PID bigint not null, SEARCH_INCLUDE varchar(200) not null, INC_RECURSE boolean not null, REVINCLUDE boolean not null, SEARCH_PID bigint not null, primary key (PID));
|
||||
create table HFJ_SEARCH_PARM (PID bigint not null, PARAM_NAME varchar(100) not null, RES_TYPE varchar(30) not null, primary key (PID));
|
||||
create table HFJ_SEARCH_RESULT (PID bigint not null, SEARCH_ORDER integer not null, RESOURCE_PID bigint not null, SEARCH_PID bigint not null, primary key (PID));
|
||||
create table HFJ_SPIDX_COORDS (SP_ID bigint not null, SP_MISSING boolean, SP_NAME varchar(100) not null, RES_ID bigint, RES_TYPE varchar(255) not null, SP_UPDATED timestamp, SP_LATITUDE double, SP_LONGITUDE double, primary key (SP_ID));
|
||||
create table HFJ_SPIDX_DATE (SP_ID bigint not null, SP_MISSING boolean, SP_NAME varchar(100) not null, RES_ID bigint, RES_TYPE varchar(255) not null, SP_UPDATED timestamp, SP_VALUE_HIGH timestamp, SP_VALUE_LOW timestamp, primary key (SP_ID));
|
||||
create table HFJ_SPIDX_NUMBER (SP_ID bigint not null, SP_MISSING boolean, SP_NAME varchar(100) not null, RES_ID bigint, RES_TYPE varchar(255) not null, SP_UPDATED timestamp, SP_VALUE numeric(19,2), primary key (SP_ID));
|
||||
create table HFJ_SPIDX_QUANTITY (SP_ID bigint not null, SP_MISSING boolean, SP_NAME varchar(100) not null, RES_ID bigint, RES_TYPE varchar(255) not null, SP_UPDATED timestamp, HASH_UNITS_AND_VALPREFIX bigint, HASH_VALPREFIX bigint, SP_SYSTEM varchar(200), SP_UNITS varchar(200), SP_VALUE numeric(19,2), primary key (SP_ID));
|
||||
create table HFJ_SPIDX_STRING (SP_ID bigint not null, SP_MISSING boolean, SP_NAME varchar(100) not null, RES_ID bigint, RES_TYPE varchar(255) not null, SP_UPDATED timestamp, HASH_EXACT bigint, HASH_NORM_PREFIX bigint, SP_VALUE_EXACT varchar(200), SP_VALUE_NORMALIZED varchar(200), primary key (SP_ID));
|
||||
create table HFJ_SPIDX_TOKEN (SP_ID bigint not null, SP_MISSING boolean, SP_NAME varchar(100) not null, RES_ID bigint, RES_TYPE varchar(255) not null, SP_UPDATED timestamp, HASH_SYS bigint, HASH_SYS_AND_VALUE bigint, HASH_VALUE bigint, SP_SYSTEM varchar(200), SP_VALUE varchar(200), primary key (SP_ID));
|
||||
create table HFJ_SPIDX_URI (SP_ID bigint not null, SP_MISSING boolean, SP_NAME varchar(100) not null, RES_ID bigint, RES_TYPE varchar(255) not null, SP_UPDATED timestamp, HASH_URI bigint, SP_URI varchar(255), primary key (SP_ID));
|
||||
create table HFJ_SUBSCRIPTION_STATS (PID bigint not null, CREATED_TIME timestamp not null, RES_ID bigint, primary key (PID));
|
||||
create table HFJ_TAG_DEF (TAG_ID bigint not null, TAG_CODE varchar(200), TAG_DISPLAY varchar(200), TAG_SYSTEM varchar(200), TAG_TYPE integer not null, primary key (TAG_ID));
|
||||
create table TRM_CODESYSTEM (PID bigint not null, CODE_SYSTEM_URI varchar(255) not null, CS_NAME varchar(255), RES_ID bigint, CURRENT_VERSION_PID bigint, primary key (PID));
|
||||
create table TRM_CODESYSTEM_VER (PID bigint not null, CS_VERSION_ID varchar(255), CODESYSTEM_PID bigint, RES_ID bigint not null, primary key (PID));
|
||||
create table TRM_CONCEPT (PID bigint not null, CODE varchar(100) not null, CODESYSTEM_PID bigint, DISPLAY varchar(400), INDEX_STATUS bigint, CODE_SEQUENCE integer, primary key (PID));
|
||||
create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CONCEPT_PID bigint, primary key (PID));
|
||||
create table TRM_CONCEPT_MAP (PID bigint not null, RES_ID bigint, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID));
|
||||
create table TRM_CONCEPT_MAP_GROUP (PID bigint not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID bigint not null, primary key (PID));
|
||||
create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varchar(100) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID bigint not null, primary key (PID));
|
||||
create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(50) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID));
|
||||
create table TRM_CONCEPT_PC_LINK (PID bigint not null, CHILD_PID bigint, PARENT_PID bigint, REL_TYPE integer, CODESYSTEM_PID bigint not null, primary key (PID));
|
||||
create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE integer not null, PROP_VAL varchar(500), CONCEPT_PID bigint, primary key (PID));
|
||||
create index IDX_FORCEDID_TYPE_FORCEDID on HFJ_FORCED_ID (RESOURCE_TYPE, FORCED_ID);
|
||||
create unique index IDX_FORCEDID_RESID on HFJ_FORCED_ID (RESOURCE_PID);
|
||||
create unique index IDX_FORCEDID_TYPE_RESID on HFJ_FORCED_ID (RESOURCE_TYPE, RESOURCE_PID);
|
||||
create unique index IDX_RESHISTTAG_TAGID on HFJ_HISTORY_TAG (RES_VER_PID, TAG_ID);
|
||||
create index IDX_IDXCMPSTRUNIQ_RESOURCE on HFJ_IDX_CMP_STRING_UNIQ (RES_ID);
|
||||
create unique index IDX_IDXCMPSTRUNIQ_STRING on HFJ_IDX_CMP_STRING_UNIQ (IDX_STRING);
|
||||
create index IDX_RL_TPATHRES on HFJ_RES_LINK (SRC_PATH, TARGET_RESOURCE_ID);
|
||||
create index IDX_RL_SRC on HFJ_RES_LINK (SRC_RESOURCE_ID);
|
||||
create index IDX_RL_DEST on HFJ_RES_LINK (TARGET_RESOURCE_ID);
|
||||
create index IDX_RESPARMPRESENT_RESID on HFJ_RES_PARAM_PRESENT (RES_ID);
|
||||
create unique index IDX_RESPARMPRESENT_SPID_RESID on HFJ_RES_PARAM_PRESENT (SP_ID, RES_ID);
|
||||
create unique index IDX_RESTAG_TAGID on HFJ_RES_TAG (RES_ID, TAG_ID);
|
||||
create index IDX_RESVER_TYPE_DATE on HFJ_RES_VER (RES_TYPE, RES_UPDATED);
|
||||
create index IDX_RESVER_ID_DATE on HFJ_RES_VER (RES_ID, RES_UPDATED);
|
||||
create index IDX_RESVER_DATE on HFJ_RES_VER (RES_UPDATED);
|
||||
create unique index IDX_RESVER_ID_VER on HFJ_RES_VER (RES_ID, RES_VER);
|
||||
create index IDX_RES_DATE on HFJ_RESOURCE (RES_UPDATED);
|
||||
create index IDX_RES_LANG on HFJ_RESOURCE (RES_TYPE, RES_LANGUAGE);
|
||||
create index IDX_RES_PROFILE on HFJ_RESOURCE (RES_PROFILE);
|
||||
create index IDX_RES_TYPE on HFJ_RESOURCE (RES_TYPE);
|
||||
create index IDX_INDEXSTATUS on HFJ_RESOURCE (SP_INDEX_STATUS);
|
||||
create index IDX_SEARCH_LASTRETURNED on HFJ_SEARCH (SEARCH_LAST_RETURNED);
|
||||
create index IDX_SEARCH_RESTYPE_HASHS on HFJ_SEARCH (RESOURCE_TYPE, SEARCH_QUERY_STRING_HASH, CREATED);
|
||||
create unique index IDX_SEARCH_UUID on HFJ_SEARCH (SEARCH_UUID);
|
||||
create unique index IDX_SEARCHPARM_RESTYPE_SPNAME on HFJ_SEARCH_PARM (RES_TYPE, PARAM_NAME);
|
||||
create unique index IDX_SEARCHRES_ORDER on HFJ_SEARCH_RESULT (SEARCH_PID, SEARCH_ORDER);
|
||||
create index IDX_SP_COORDS on HFJ_SPIDX_COORDS (RES_TYPE, SP_NAME, SP_LATITUDE, SP_LONGITUDE);
|
||||
create index IDX_SP_COORDS_UPDATED on HFJ_SPIDX_COORDS (SP_UPDATED);
|
||||
create index IDX_SP_COORDS_RESID on HFJ_SPIDX_COORDS (RES_ID);
|
||||
create index IDX_SP_DATE on HFJ_SPIDX_DATE (RES_TYPE, SP_NAME, SP_VALUE_LOW, SP_VALUE_HIGH);
|
||||
create index IDX_SP_DATE_UPDATED on HFJ_SPIDX_DATE (SP_UPDATED);
|
||||
create index IDX_SP_DATE_RESID on HFJ_SPIDX_DATE (RES_ID);
|
||||
create index IDX_SP_NUMBER on HFJ_SPIDX_NUMBER (RES_TYPE, SP_NAME, SP_VALUE);
|
||||
create index IDX_SP_NUMBER_UPDATED on HFJ_SPIDX_NUMBER (SP_UPDATED);
|
||||
create index IDX_SP_NUMBER_RESID on HFJ_SPIDX_NUMBER (RES_ID);
|
||||
create index IDX_SP_QUANTITY on HFJ_SPIDX_QUANTITY (RES_TYPE, SP_NAME, SP_SYSTEM, SP_UNITS, SP_VALUE);
|
||||
create index IDX_SP_QUANTITY_UPDATED on HFJ_SPIDX_QUANTITY (SP_UPDATED);
|
||||
create index IDX_SP_QUANTITY_RESID on HFJ_SPIDX_QUANTITY (RES_ID);
|
||||
create index IDX_SP_STRING on HFJ_SPIDX_STRING (RES_TYPE, SP_NAME, SP_VALUE_NORMALIZED);
|
||||
create index IDX_SP_STRING_UPDATED on HFJ_SPIDX_STRING (SP_UPDATED);
|
||||
create index IDX_SP_STRING_RESID on HFJ_SPIDX_STRING (RES_ID);
|
||||
create index IDX_SP_TOKEN on HFJ_SPIDX_TOKEN (RES_TYPE, SP_NAME, SP_SYSTEM, SP_VALUE);
|
||||
create index IDX_SP_TOKEN_UNQUAL on HFJ_SPIDX_TOKEN (RES_TYPE, SP_NAME, SP_VALUE);
|
||||
create index IDX_SP_TOKEN_UPDATED on HFJ_SPIDX_TOKEN (SP_UPDATED);
|
||||
create index IDX_SP_TOKEN_RESID on HFJ_SPIDX_TOKEN (RES_ID);
|
||||
create index IDX_SP_URI on HFJ_SPIDX_URI (RES_TYPE, SP_NAME, SP_URI);
|
||||
create index IDX_SP_URI_RESTYPE_NAME on HFJ_SPIDX_URI (RES_TYPE, SP_NAME);
|
||||
create index IDX_SP_URI_UPDATED on HFJ_SPIDX_URI (SP_UPDATED);
|
||||
create index IDX_SP_URI_COORDS on HFJ_SPIDX_URI (RES_ID);
|
||||
create unique index IDX_SUBSC_RESID on HFJ_SUBSCRIPTION_STATS (RES_ID);
|
||||
create unique index IDX_TAGDEF_TYPESYSCODE on HFJ_TAG_DEF (TAG_TYPE, TAG_SYSTEM, TAG_CODE);
|
||||
create unique index IDX_CS_CODESYSTEM on TRM_CODESYSTEM (CODE_SYSTEM_URI);
|
||||
create index IDX_CONCEPT_INDEXSTATUS on TRM_CONCEPT (INDEX_STATUS);
|
||||
create unique index IDX_CONCEPT_CS_CODE on TRM_CONCEPT (CODESYSTEM_PID, CODE);
|
||||
create unique index IDX_CONCEPT_MAP_URL on TRM_CONCEPT_MAP (URL);
|
||||
create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE);
|
||||
create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE);
|
||||
alter table HFJ_FORCED_ID add constraint FK_FORCEDID_RESOURCE foreign key (RESOURCE_PID) references HFJ_RESOURCE;
|
||||
alter table HFJ_HISTORY_TAG add constraint FKtderym7awj6q8iq5c51xv4ndw foreign key (TAG_ID) references HFJ_TAG_DEF;
|
||||
alter table HFJ_HISTORY_TAG add constraint FK_HISTORYTAG_HISTORY foreign key (RES_VER_PID) references HFJ_RES_VER;
|
||||
alter table HFJ_IDX_CMP_STRING_UNIQ add constraint FK_IDXCMPSTRUNIQ_RES_ID foreign key (RES_ID) references HFJ_RESOURCE;
|
||||
alter table HFJ_RES_LINK add constraint FK_RESLINK_SOURCE foreign key (SRC_RESOURCE_ID) references HFJ_RESOURCE;
|
||||
alter table HFJ_RES_LINK add constraint FK_RESLINK_TARGET foreign key (TARGET_RESOURCE_ID) references HFJ_RESOURCE;
|
||||
alter table HFJ_RES_PARAM_PRESENT add constraint FK_RESPARMPRES_RESID foreign key (RES_ID) references HFJ_RESOURCE;
|
||||
alter table HFJ_RES_PARAM_PRESENT add constraint FK_RESPARMPRES_SPID foreign key (SP_ID) references HFJ_SEARCH_PARM;
|
||||
alter table HFJ_RES_TAG add constraint FKbfcjbaftmiwr3rxkwsy23vneo foreign key (TAG_ID) references HFJ_TAG_DEF;
|
||||
alter table HFJ_RES_TAG add constraint FK_RESTAG_RESOURCE foreign key (RES_ID) references HFJ_RESOURCE;
|
||||
alter table HFJ_RES_VER add constraint FKh20i7lcbchkaxekvwg9ix4hc5 foreign key (FORCED_ID_PID) references HFJ_FORCED_ID;
|
||||
alter table HFJ_RESOURCE add constraint FKhjgj8cp879gfxko25cx5o692r foreign key (FORCED_ID_PID) references HFJ_FORCED_ID;
|
||||
alter table HFJ_SEARCH_INCLUDE add constraint FK_SEARCHINC_SEARCH foreign key (SEARCH_PID) references HFJ_SEARCH;
|
||||
alter table HFJ_SEARCH_RESULT add constraint FK_SEARCHRES_RES foreign key (RESOURCE_PID) references HFJ_RESOURCE;
|
||||
alter table HFJ_SEARCH_RESULT add constraint FK_SEARCHRES_SEARCH foreign key (SEARCH_PID) references HFJ_SEARCH;
|
||||
alter table HFJ_SPIDX_COORDS add constraint FKc97mpk37okwu8qvtceg2nh9vn foreign key (RES_ID) references HFJ_RESOURCE;
|
||||
alter table HFJ_SPIDX_DATE add constraint FK17s70oa59rm9n61k9thjqrsqm foreign key (RES_ID) references HFJ_RESOURCE;
|
||||
alter table HFJ_SPIDX_NUMBER add constraint FKcltihnc5tgprj9bhpt7xi5otb foreign key (RES_ID) references HFJ_RESOURCE;
|
||||
alter table HFJ_SPIDX_QUANTITY add constraint FKn603wjjoi1a6asewxbbd78bi5 foreign key (RES_ID) references HFJ_RESOURCE;
|
||||
alter table HFJ_SPIDX_STRING add constraint FK_SPIDXSTR_RESOURCE foreign key (RES_ID) references HFJ_RESOURCE;
|
||||
alter table HFJ_SPIDX_TOKEN add constraint FK7ulx3j1gg3v7maqrejgc7ybc4 foreign key (RES_ID) references HFJ_RESOURCE;
|
||||
alter table HFJ_SPIDX_URI add constraint FKgxsreutymmfjuwdswv3y887do foreign key (RES_ID) references HFJ_RESOURCE;
|
||||
alter table HFJ_SUBSCRIPTION_STATS add constraint FK_SUBSC_RESOURCE_ID foreign key (RES_ID) references HFJ_RESOURCE;
|
||||
alter table TRM_CODESYSTEM add constraint FK_TRMCODESYSTEM_CURVER foreign key (CURRENT_VERSION_PID) references TRM_CODESYSTEM_VER;
|
||||
alter table TRM_CODESYSTEM add constraint FK_TRMCODESYSTEM_RES foreign key (RES_ID) references HFJ_RESOURCE;
|
||||
alter table TRM_CODESYSTEM_VER add constraint FK_CODESYSVER_CS_ID foreign key (CODESYSTEM_PID) references TRM_CODESYSTEM;
|
||||
alter table TRM_CODESYSTEM_VER add constraint FK_CODESYSVER_RES_ID foreign key (RES_ID) references HFJ_RESOURCE;
|
||||
alter table TRM_CONCEPT add constraint FK_CONCEPT_PID_CS_PID foreign key (CODESYSTEM_PID) references TRM_CODESYSTEM_VER;
|
||||
alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT;
|
||||
alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE;
|
||||
alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP;
|
||||
alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP;
|
||||
alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT;
|
||||
alter table TRM_CONCEPT_PC_LINK add constraint FK_TERM_CONCEPTPC_CHILD foreign key (CHILD_PID) references TRM_CONCEPT;
|
||||
alter table TRM_CONCEPT_PC_LINK add constraint FK_TERM_CONCEPTPC_CS foreign key (CODESYSTEM_PID) references TRM_CODESYSTEM_VER;
|
||||
alter table TRM_CONCEPT_PC_LINK add constraint FK_TERM_CONCEPTPC_PARENT foreign key (PARENT_PID) references TRM_CONCEPT;
|
||||
alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT;
|
@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -12,6 +12,7 @@ public class ContextHolder {
|
||||
private static boolean ourDisableReferentialIntegrity;
|
||||
private static String ourPath;
|
||||
private static Long ourReuseSearchResultsMillis;
|
||||
private static String ourDatabaseUrl;
|
||||
|
||||
static {
|
||||
ourReuseSearchResultsMillis = DaoConfig.DEFAULT_REUSE_CACHED_SEARCH_RESULTS_FOR_MILLIS;
|
||||
@ -27,12 +28,15 @@ public class ContextHolder {
|
||||
case DSTU2:
|
||||
ourPath = "/baseDstu2/";
|
||||
break;
|
||||
case DSTU2_1:
|
||||
break;
|
||||
case DSTU3:
|
||||
ourPath = "/baseDstu3/";
|
||||
break;
|
||||
case R4:
|
||||
ourPath = "/baseR4/";
|
||||
break;
|
||||
case DSTU2_HL7ORG:
|
||||
default:
|
||||
throw new ParseException("FHIR version not supported by this command: " + theCtx.getVersion().getVersion());
|
||||
}
|
||||
@ -68,4 +72,12 @@ public class ContextHolder {
|
||||
public static void setDisableReferentialIntegrity(boolean theDisableReferentialIntegrity) {
|
||||
ourDisableReferentialIntegrity = theDisableReferentialIntegrity;
|
||||
}
|
||||
|
||||
public static String getDatabaseUrl() {
|
||||
return ourDatabaseUrl;
|
||||
}
|
||||
|
||||
public static void setDatabaseUrl(String theDatabaseUrl) {
|
||||
ourDatabaseUrl = theDatabaseUrl;
|
||||
}
|
||||
}
|
||||
|
@ -4,14 +4,38 @@ import java.util.Properties;
|
||||
|
||||
import ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory;
|
||||
import ca.uhn.fhir.jpa.util.DerbyTenSevenHapiFhirDialect;
|
||||
import org.apache.commons.dbcp2.BasicDataSource;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
import javax.sql.DataSource;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
@SuppressWarnings("Duplicates")
|
||||
@Configuration
|
||||
public class FhirDbConfig {
|
||||
|
||||
|
||||
private boolean ourLowMemMode;
|
||||
/**
|
||||
* The following bean configures the database connection. The 'url' property value of "jdbc:derby:directory:jpaserver_derby_files;create=true" indicates that the server should save resources in a
|
||||
* directory called "jpaserver_derby_files".
|
||||
*
|
||||
* A URL to a remote database could also be placed here, along with login credentials and other properties supported by BasicDataSource.
|
||||
*/
|
||||
@Bean(destroyMethod = "close")
|
||||
public DataSource dataSource() {
|
||||
String url = "jdbc:derby:directory:target/jpaserver_derby_files;create=true";
|
||||
if (isNotBlank(ContextHolder.getDatabaseUrl())) {
|
||||
url = ContextHolder.getDatabaseUrl();
|
||||
}
|
||||
|
||||
BasicDataSource retVal = new BasicDataSource();
|
||||
retVal.setDriver(new org.apache.derby.jdbc.EmbeddedDriver());
|
||||
retVal.setUrl(url);
|
||||
retVal.setUsername("");
|
||||
retVal.setPassword("");
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Bean()
|
||||
public Properties jpaProperties() {
|
||||
|
@ -1,11 +1,11 @@
|
||||
package ca.uhn.fhir.jpa.demo;
|
||||
|
||||
import java.util.Properties;
|
||||
|
||||
import javax.persistence.EntityManagerFactory;
|
||||
import javax.sql.DataSource;
|
||||
|
||||
import org.apache.commons.dbcp2.BasicDataSource;
|
||||
import ca.uhn.fhir.jpa.config.BaseJavaConfigDstu2;
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.util.SubscriptionsRequireManualActivationInterceptorDstu2;
|
||||
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
|
||||
import ca.uhn.fhir.rest.server.interceptor.LoggingInterceptor;
|
||||
import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hibernate.jpa.HibernatePersistenceProvider;
|
||||
import org.springframework.beans.factory.annotation.Autowire;
|
||||
@ -18,18 +18,21 @@ import org.springframework.orm.jpa.JpaTransactionManager;
|
||||
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
|
||||
import org.springframework.transaction.annotation.EnableTransactionManagement;
|
||||
|
||||
import ca.uhn.fhir.jpa.config.BaseJavaConfigDstu2;
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.util.SubscriptionsRequireManualActivationInterceptorDstu2;
|
||||
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
|
||||
import ca.uhn.fhir.rest.server.interceptor.LoggingInterceptor;
|
||||
import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
|
||||
import javax.persistence.EntityManagerFactory;
|
||||
import javax.sql.DataSource;
|
||||
import java.util.Properties;
|
||||
|
||||
@Configuration
|
||||
@EnableTransactionManagement()
|
||||
@Import(FhirDbConfig.class)
|
||||
public class FhirServerConfig extends BaseJavaConfigDstu2 {
|
||||
|
||||
@Autowired
|
||||
private DataSource myDataSource;
|
||||
@Autowired()
|
||||
@Qualifier("jpaProperties")
|
||||
private Properties myJpaProperties;
|
||||
|
||||
/**
|
||||
* Configure FHIR properties around the the JPA server via this bean
|
||||
*/
|
||||
@ -43,32 +46,12 @@ public class FhirServerConfig extends BaseJavaConfigDstu2 {
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* The following bean configures the database connection. The 'url' property value of "jdbc:derby:directory:jpaserver_derby_files;create=true" indicates that the server should save resources in a
|
||||
* directory called "jpaserver_derby_files".
|
||||
*
|
||||
* A URL to a remote database could also be placed here, along with login credentials and other properties supported by BasicDataSource.
|
||||
*/
|
||||
@Bean(destroyMethod = "close")
|
||||
public DataSource dataSource() {
|
||||
BasicDataSource retVal = new BasicDataSource();
|
||||
retVal.setDriver(new org.apache.derby.jdbc.EmbeddedDriver());
|
||||
retVal.setUrl("jdbc:derby:directory:target/jpaserver_derby_files;create=true");
|
||||
retVal.setUsername("");
|
||||
retVal.setPassword("");
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Autowired()
|
||||
@Qualifier("jpaProperties")
|
||||
private Properties myJpaProperties;
|
||||
|
||||
@Override
|
||||
@Bean()
|
||||
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
|
||||
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
|
||||
retVal.setPersistenceUnitName("HAPI_PU");
|
||||
retVal.setDataSource(dataSource());
|
||||
retVal.setDataSource(myDataSource);
|
||||
retVal.setPackagesToScan("ca.uhn.fhir.jpa.entity");
|
||||
retVal.setPersistenceProvider(new HibernatePersistenceProvider());
|
||||
retVal.setJpaProperties(myJpaProperties);
|
||||
@ -83,7 +66,7 @@ public class FhirServerConfig extends BaseJavaConfigDstu2 {
|
||||
LoggingInterceptor retVal = new LoggingInterceptor();
|
||||
retVal.setLoggerName("fhirtest.access");
|
||||
retVal.setMessageFormat(
|
||||
"Path[${servletPath}] Source[${requestHeader.x-forwarded-for}] Operation[${operationType} ${operationName} ${idOrResourceName}] UA[${requestHeader.user-agent}] Params[${requestParameters}] ResponseEncoding[${responseEncodingNoDefault}]");
|
||||
"Path[${servletPath}] Source[${requestHeader.x-forwarded-for}] Operation[${operationType} ${operationName} ${idOrResourceName}] UA[${requestHeader.user-agent}] Params[${requestParameters}] ResponseEncoding[${responseEncodingNoDefault}]");
|
||||
retVal.setLogExceptions(true);
|
||||
retVal.setErrorMessageFormat("ERROR - ${requestVerb} ${requestUrl}");
|
||||
return retVal;
|
||||
|
@ -1,13 +1,12 @@
|
||||
package ca.uhn.fhir.jpa.demo;
|
||||
|
||||
import java.util.Properties;
|
||||
|
||||
import javax.persistence.EntityManagerFactory;
|
||||
import javax.sql.DataSource;
|
||||
|
||||
import org.apache.commons.dbcp2.BasicDataSource;
|
||||
import ca.uhn.fhir.jpa.config.BaseJavaConfigDstu3;
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.util.SubscriptionsRequireManualActivationInterceptorDstu3;
|
||||
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
|
||||
import ca.uhn.fhir.rest.server.interceptor.LoggingInterceptor;
|
||||
import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hibernate.jpa.HibernatePersistenceProvider;
|
||||
import org.springframework.beans.factory.annotation.Autowire;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
@ -18,18 +17,15 @@ import org.springframework.orm.jpa.JpaTransactionManager;
|
||||
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
|
||||
import org.springframework.transaction.annotation.EnableTransactionManagement;
|
||||
|
||||
import ca.uhn.fhir.jpa.config.BaseJavaConfigDstu3;
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.util.SubscriptionsRequireManualActivationInterceptorDstu3;
|
||||
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
|
||||
import ca.uhn.fhir.rest.server.interceptor.LoggingInterceptor;
|
||||
import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
|
||||
import javax.persistence.EntityManagerFactory;
|
||||
import javax.sql.DataSource;
|
||||
import java.util.Properties;
|
||||
|
||||
/**
|
||||
* This class isn't used by default by the example, but
|
||||
* This class isn't used by default by the example, but
|
||||
* you can use it as a config if you want to support DSTU3
|
||||
* instead of DSTU2 in your server.
|
||||
*
|
||||
* <p>
|
||||
* See https://github.com/jamesagnew/hapi-fhir/issues/278
|
||||
*/
|
||||
@Configuration
|
||||
@ -37,6 +33,12 @@ import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
|
||||
@Import(FhirDbConfig.class)
|
||||
public class FhirServerConfigDstu3 extends BaseJavaConfigDstu3 {
|
||||
|
||||
@Autowired
|
||||
private DataSource myDataSource;
|
||||
@Autowired()
|
||||
@Qualifier("jpaProperties")
|
||||
private Properties myJpaProperties;
|
||||
|
||||
/**
|
||||
* Configure FHIR properties around the the JPA server via this bean
|
||||
*/
|
||||
@ -50,36 +52,16 @@ public class FhirServerConfigDstu3 extends BaseJavaConfigDstu3 {
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* The following bean configures the database connection. The 'url' property value of "jdbc:derby:directory:jpaserver_derby_files;create=true" indicates that the server should save resources in a
|
||||
* directory called "jpaserver_derby_files".
|
||||
*
|
||||
* A URL to a remote database could also be placed here, along with login credentials and other properties supported by BasicDataSource.
|
||||
*/
|
||||
@Bean(destroyMethod = "close")
|
||||
public DataSource dataSource() {
|
||||
BasicDataSource retVal = new BasicDataSource();
|
||||
retVal.setDriver(new org.apache.derby.jdbc.EmbeddedDriver());
|
||||
retVal.setUrl("jdbc:derby:directory:target/jpaserver_derby_files;create=true");
|
||||
retVal.setUsername("");
|
||||
retVal.setPassword("");
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Bean()
|
||||
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
|
||||
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
|
||||
retVal.setPersistenceUnitName("HAPI_PU");
|
||||
retVal.setDataSource(dataSource());
|
||||
retVal.setDataSource(myDataSource);
|
||||
retVal.setJpaProperties(myJpaProperties);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Autowired()
|
||||
@Qualifier("jpaProperties")
|
||||
private Properties myJpaProperties;
|
||||
|
||||
/**
|
||||
* Do some fancy logging to create a nice access log that has details about each incoming request.
|
||||
*/
|
||||
@ -87,7 +69,7 @@ public class FhirServerConfigDstu3 extends BaseJavaConfigDstu3 {
|
||||
LoggingInterceptor retVal = new LoggingInterceptor();
|
||||
retVal.setLoggerName("fhirtest.access");
|
||||
retVal.setMessageFormat(
|
||||
"Path[${servletPath}] Source[${requestHeader.x-forwarded-for}] Operation[${operationType} ${operationName} ${idOrResourceName}] UA[${requestHeader.user-agent}] Params[${requestParameters}] ResponseEncoding[${responseEncodingNoDefault}]");
|
||||
"Path[${servletPath}] Source[${requestHeader.x-forwarded-for}] Operation[${operationType} ${operationName} ${idOrResourceName}] UA[${requestHeader.user-agent}] Params[${requestParameters}] ResponseEncoding[${responseEncodingNoDefault}]");
|
||||
retVal.setLogExceptions(true);
|
||||
retVal.setErrorMessageFormat("ERROR - ${requestVerb} ${requestUrl}");
|
||||
return retVal;
|
||||
|
@ -1,13 +1,12 @@
|
||||
package ca.uhn.fhir.jpa.demo;
|
||||
|
||||
import java.util.Properties;
|
||||
|
||||
import javax.persistence.EntityManagerFactory;
|
||||
import javax.sql.DataSource;
|
||||
|
||||
import org.apache.commons.dbcp2.BasicDataSource;
|
||||
import ca.uhn.fhir.jpa.config.BaseJavaConfigR4;
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.util.SubscriptionsRequireManualActivationInterceptorR4;
|
||||
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
|
||||
import ca.uhn.fhir.rest.server.interceptor.LoggingInterceptor;
|
||||
import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hibernate.jpa.HibernatePersistenceProvider;
|
||||
import org.springframework.beans.factory.annotation.Autowire;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
@ -18,18 +17,15 @@ import org.springframework.orm.jpa.JpaTransactionManager;
|
||||
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
|
||||
import org.springframework.transaction.annotation.EnableTransactionManagement;
|
||||
|
||||
import ca.uhn.fhir.jpa.config.BaseJavaConfigR4;
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.util.SubscriptionsRequireManualActivationInterceptorR4;
|
||||
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
|
||||
import ca.uhn.fhir.rest.server.interceptor.LoggingInterceptor;
|
||||
import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
|
||||
import javax.persistence.EntityManagerFactory;
|
||||
import javax.sql.DataSource;
|
||||
import java.util.Properties;
|
||||
|
||||
/**
|
||||
* This class isn't used by default by the example, but
|
||||
* This class isn't used by default by the example, but
|
||||
* you can use it as a config if you want to support DSTU3
|
||||
* instead of DSTU2 in your server.
|
||||
*
|
||||
* <p>
|
||||
* See https://github.com/jamesagnew/hapi-fhir/issues/278
|
||||
*/
|
||||
@Configuration
|
||||
@ -37,6 +33,12 @@ import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
|
||||
@Import(FhirDbConfig.class)
|
||||
public class FhirServerConfigR4 extends BaseJavaConfigR4 {
|
||||
|
||||
@Autowired
|
||||
private DataSource myDataSource;
|
||||
@Autowired()
|
||||
@Qualifier("jpaProperties")
|
||||
private Properties myJpaProperties;
|
||||
|
||||
/**
|
||||
* Configure FHIR properties around the the JPA server via this bean
|
||||
*/
|
||||
@ -50,36 +52,16 @@ public class FhirServerConfigR4 extends BaseJavaConfigR4 {
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* The following bean configures the database connection. The 'url' property value of "jdbc:derby:directory:jpaserver_derby_files;create=true" indicates that the server should save resources in a
|
||||
* directory called "jpaserver_derby_files".
|
||||
*
|
||||
* A URL to a remote database could also be placed here, along with login credentials and other properties supported by BasicDataSource.
|
||||
*/
|
||||
@Bean(destroyMethod = "close")
|
||||
public DataSource dataSource() {
|
||||
BasicDataSource retVal = new BasicDataSource();
|
||||
retVal.setDriver(new org.apache.derby.jdbc.EmbeddedDriver());
|
||||
retVal.setUrl("jdbc:derby:directory:target/jpaserver_derby_files;create=true");
|
||||
retVal.setUsername("");
|
||||
retVal.setPassword("");
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Bean()
|
||||
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
|
||||
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
|
||||
retVal.setPersistenceUnitName("HAPI_PU");
|
||||
retVal.setDataSource(dataSource());
|
||||
retVal.setDataSource(myDataSource);
|
||||
retVal.setJpaProperties(myJpaProperties);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Autowired()
|
||||
@Qualifier("jpaProperties")
|
||||
private Properties myJpaProperties;
|
||||
|
||||
/**
|
||||
* Do some fancy logging to create a nice access log that has details about each incoming request.
|
||||
*/
|
||||
@ -87,7 +69,7 @@ public class FhirServerConfigR4 extends BaseJavaConfigR4 {
|
||||
LoggingInterceptor retVal = new LoggingInterceptor();
|
||||
retVal.setLoggerName("fhirtest.access");
|
||||
retVal.setMessageFormat(
|
||||
"Path[${servletPath}] Source[${requestHeader.x-forwarded-for}] Operation[${operationType} ${operationName} ${idOrResourceName}] UA[${requestHeader.user-agent}] Params[${requestParameters}] ResponseEncoding[${responseEncodingNoDefault}]");
|
||||
"Path[${servletPath}] Source[${requestHeader.x-forwarded-for}] Operation[${operationType} ${operationName} ${idOrResourceName}] UA[${requestHeader.user-agent}] Params[${requestParameters}] ResponseEncoding[${responseEncodingNoDefault}]");
|
||||
retVal.setLogExceptions(true);
|
||||
retVal.setErrorMessageFormat("ERROR - ${requestVerb} ${requestUrl}");
|
||||
return retVal;
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -16,14 +16,14 @@
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-base</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Server -->
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-server</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
@ -35,43 +35,43 @@
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-structures-dstu2</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-structures-hl7org-dstu2</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-structures-dstu2.1</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-structures-dstu3</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-structures-r4</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-validation-resources-dstu2</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-validation-resources-dstu3</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -236,7 +236,12 @@
|
||||
<artifactId>commons-dbcp2</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<!--
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<!--
|
||||
<dependency>
|
||||
<groupId>org.apache.tomcat</groupId>
|
||||
<artifactId>tomcat-catalina</artifactId>
|
||||
@ -338,6 +343,10 @@
|
||||
<groupId>org.jboss.spec.javax.transaction</groupId>
|
||||
<artifactId>jboss-transaction-api_1.2_spec</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>javax.activation</groupId>
|
||||
<artifactId>activation</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
@ -394,10 +403,11 @@
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<!-- <dependency>
|
||||
<groupId>com.sun.activation</groupId>
|
||||
<artifactId>javax.activation</artifactId>
|
||||
</dependency>
|
||||
-->
|
||||
<!--<dependency>
|
||||
<groupId>javax.validation</groupId>
|
||||
<artifactId>validation-api</artifactId>
|
||||
@ -563,10 +573,21 @@
|
||||
<goal>create</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<dialect>org.hibernate.dialect.MySQL57InnoDBDialect</dialect>
|
||||
<dialect>org.hibernate.dialect.MySQL57Dialect</dialect>
|
||||
<outputFile>${project.build.directory}/classes/ca/uhn/hapi/fhir/jpa/docs/database/persistence_create_mysql57.sql</outputFile>
|
||||
</configuration>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>mariadb103</id>
|
||||
<phase>process-classes</phase>
|
||||
<goals>
|
||||
<goal>create</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<dialect>org.hibernate.dialect.MariaDB103Dialect</dialect>
|
||||
<outputFile>${project.build.directory}/classes/ca/uhn/hapi/fhir/jpa/docs/database/persistence_create_mariadb103.sql</outputFile>
|
||||
</configuration>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>oracle12c</id>
|
||||
<phase>process-classes</phase>
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@ -19,23 +19,34 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
import java.util.Date;
|
||||
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.model.primitive.InstantDt;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
public interface IBaseResourceEntity {
|
||||
|
||||
Date getDeleted();
|
||||
|
||||
FhirVersionEnum getFhirVersion();
|
||||
|
||||
Long getId();
|
||||
|
||||
IdDt getIdDt();
|
||||
|
||||
InstantDt getPublished();
|
||||
|
||||
Long getResourceId();
|
||||
|
||||
String getResourceType();
|
||||
|
||||
InstantDt getUpdated();
|
||||
|
||||
Date getUpdatedDate();
|
||||
|
||||
long getVersion();
|
||||
|
||||
boolean isHasTags();
|
||||
}
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@ -34,10 +34,14 @@ public enum ResourceEncodingEnum {
|
||||
* See ResourceHistoryTable RES_ENCODING column
|
||||
*/
|
||||
|
||||
/** Json */
|
||||
/**
|
||||
* Json
|
||||
*/
|
||||
JSON,
|
||||
|
||||
/** Json Compressed */
|
||||
|
||||
/**
|
||||
* Json Compressed
|
||||
*/
|
||||
JSONC,
|
||||
|
||||
/**
|
||||
@ -48,5 +52,5 @@ public enum ResourceEncodingEnum {
|
||||
public IParser newParser(FhirContext theContext) {
|
||||
return theContext.newJsonParser();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@ -32,7 +32,7 @@ import javax.persistence.*;
|
||||
@Embeddable
|
||||
@Entity
|
||||
@Table(name = "HFJ_SPIDX_COORDS", indexes = {
|
||||
@Index(name = "IDX_SP_COORDS", columnList = "RES_TYPE,SP_NAME,SP_LATITUDE,SP_LONGITUDE"),
|
||||
@Index(name = "IDX_SP_COORDS_HASH", columnList = "HASH_IDENTITY,SP_LATITUDE,SP_LONGITUDE"),
|
||||
@Index(name = "IDX_SP_COORDS_UPDATED", columnList = "SP_UPDATED"),
|
||||
@Index(name = "IDX_SP_COORDS_RESID", columnList = "RES_ID")
|
||||
})
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@ -37,7 +37,7 @@ import java.util.Date;
|
||||
@Embeddable
|
||||
@Entity
|
||||
@Table(name = "HFJ_SPIDX_DATE", indexes = {
|
||||
// @Index(name = "IDX_SP_DATE", columnList = "RES_TYPE,SP_NAME,SP_VALUE_LOW,SP_VALUE_HIGH"),
|
||||
// We previously had an index called IDX_SP_DATE - Dont reuse
|
||||
@Index(name = "IDX_SP_DATE_HASH", columnList = "HASH_IDENTITY,SP_VALUE_LOW,SP_VALUE_HIGH"),
|
||||
@Index(name = "IDX_SP_DATE_UPDATED", columnList = "SP_UPDATED"),
|
||||
@Index(name = "IDX_SP_DATE_RESID", columnList = "RES_ID")
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@ -37,7 +37,7 @@ import java.math.BigDecimal;
|
||||
@Embeddable
|
||||
@Entity
|
||||
@Table(name = "HFJ_SPIDX_NUMBER", indexes = {
|
||||
// @Index(name = "IDX_SP_NUMBER", columnList = "RES_TYPE,SP_NAME,SP_VALUE"),
|
||||
// We used to have an index with name IDX_SP_NUMBER - Dont reuse
|
||||
@Index(name = "IDX_SP_NUMBER_HASH_VAL", columnList = "HASH_IDENTITY,SP_VALUE"),
|
||||
@Index(name = "IDX_SP_NUMBER_UPDATED", columnList = "SP_UPDATED"),
|
||||
@Index(name = "IDX_SP_NUMBER_RESID", columnList = "RES_ID")
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@ -38,13 +38,13 @@ import java.math.BigDecimal;
|
||||
@Embeddable
|
||||
@Entity
|
||||
@Table(name = "HFJ_SPIDX_QUANTITY", indexes = {
|
||||
// @Index(name = "IDX_SP_QUANTITY", columnList = "RES_TYPE,SP_NAME,SP_SYSTEM,SP_UNITS,SP_VALUE"),
|
||||
// We used to have an index named IDX_SP_QUANTITY - Dont reuse
|
||||
@Index(name = "IDX_SP_QUANTITY_HASH", columnList = "HASH_IDENTITY,SP_VALUE"),
|
||||
@Index(name = "IDX_SP_QUANTITY_HASH_UN", columnList = "HASH_IDENTITY_AND_UNITS,SP_VALUE"),
|
||||
@Index(name = "IDX_SP_QUANTITY_HASH_SYSUN", columnList = "HASH_IDENTITY_SYS_UNITS,SP_VALUE"),
|
||||
@Index(name = "IDX_SP_QUANTITY_UPDATED", columnList = "SP_UPDATED"),
|
||||
@Index(name = "IDX_SP_QUANTITY_RESID", columnList = "RES_ID")
|
||||
})
|
||||
//@formatter:on
|
||||
public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearchParam {
|
||||
|
||||
private static final int MAX_LENGTH = 200;
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@ -20,36 +20,21 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.FetchType;
|
||||
import javax.persistence.ForeignKey;
|
||||
import javax.persistence.GeneratedValue;
|
||||
import javax.persistence.GenerationType;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.Index;
|
||||
import javax.persistence.JoinColumn;
|
||||
import javax.persistence.ManyToOne;
|
||||
import javax.persistence.SequenceGenerator;
|
||||
import javax.persistence.Table;
|
||||
import javax.persistence.Temporal;
|
||||
import javax.persistence.TemporalType;
|
||||
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
import org.hibernate.annotations.ColumnDefault;
|
||||
import org.hibernate.search.annotations.Field;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
||||
import javax.persistence.*;
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
@Entity
|
||||
@Table(name = "HFJ_RES_LINK" , indexes= {
|
||||
@Index(name="IDX_RL_TPATHRES", columnList= "SRC_PATH,TARGET_RESOURCE_ID"),
|
||||
@Index(name="IDX_RL_SRC", columnList= "SRC_RESOURCE_ID"),
|
||||
@Index(name="IDX_RL_DEST", columnList= "TARGET_RESOURCE_ID")
|
||||
@Table(name = "HFJ_RES_LINK", indexes = {
|
||||
@Index(name = "IDX_RL_TPATHRES", columnList = "SRC_PATH,TARGET_RESOURCE_ID"),
|
||||
@Index(name = "IDX_RL_SRC", columnList = "SRC_RESOURCE_ID"),
|
||||
@Index(name = "IDX_RL_DEST", columnList = "TARGET_RESOURCE_ID")
|
||||
})
|
||||
public class ResourceLink implements Serializable {
|
||||
|
||||
@ -64,30 +49,30 @@ public class ResourceLink implements Serializable {
|
||||
@Column(name = "SRC_PATH", length = 100, nullable = false)
|
||||
private String mySourcePath;
|
||||
|
||||
@ManyToOne(optional = false, fetch=FetchType.LAZY)
|
||||
@JoinColumn(name = "SRC_RESOURCE_ID", referencedColumnName = "RES_ID", nullable = false, foreignKey=@ForeignKey(name="FK_RESLINK_SOURCE"))
|
||||
@ManyToOne(optional = false, fetch = FetchType.LAZY)
|
||||
@JoinColumn(name = "SRC_RESOURCE_ID", referencedColumnName = "RES_ID", nullable = false, foreignKey = @ForeignKey(name = "FK_RESLINK_SOURCE"))
|
||||
private ResourceTable mySourceResource;
|
||||
|
||||
@Column(name = "SRC_RESOURCE_ID", insertable = false, updatable = false, nullable = false)
|
||||
private Long mySourceResourcePid;
|
||||
|
||||
@Column(name = "SOURCE_RESOURCE_TYPE", nullable=false, length=ResourceTable.RESTYPE_LEN)
|
||||
@Column(name = "SOURCE_RESOURCE_TYPE", nullable = false, length = ResourceTable.RESTYPE_LEN)
|
||||
@Field()
|
||||
private String mySourceResourceType;
|
||||
|
||||
@ManyToOne(optional = true, fetch=FetchType.LAZY)
|
||||
@JoinColumn(name = "TARGET_RESOURCE_ID", referencedColumnName = "RES_ID", nullable = true, foreignKey=@ForeignKey(name="FK_RESLINK_TARGET"))
|
||||
@ManyToOne(optional = true, fetch = FetchType.LAZY)
|
||||
@JoinColumn(name = "TARGET_RESOURCE_ID", referencedColumnName = "RES_ID", nullable = true, foreignKey = @ForeignKey(name = "FK_RESLINK_TARGET"))
|
||||
private ResourceTable myTargetResource;
|
||||
|
||||
@Column(name = "TARGET_RESOURCE_ID", insertable = false, updatable = false, nullable = true)
|
||||
@Field()
|
||||
private Long myTargetResourcePid;
|
||||
|
||||
@Column(name = "TARGET_RESOURCE_TYPE", nullable=false, length=ResourceTable.RESTYPE_LEN)
|
||||
@Column(name = "TARGET_RESOURCE_TYPE", nullable = false, length = ResourceTable.RESTYPE_LEN)
|
||||
@Field()
|
||||
private String myTargetResourceType;
|
||||
|
||||
@Column(name = "TARGET_RESOURCE_URL", length=200, nullable = true)
|
||||
@Column(name = "TARGET_RESOURCE_URL", length = 200, nullable = true)
|
||||
@Field()
|
||||
private String myTargetResourceUrl;
|
||||
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@ -19,44 +19,36 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.EnumType;
|
||||
import javax.persistence.Enumerated;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.Lob;
|
||||
import javax.persistence.Temporal;
|
||||
import javax.persistence.TemporalType;
|
||||
|
||||
import org.hibernate.annotations.Immutable;
|
||||
import org.hibernate.annotations.Subselect;
|
||||
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.model.primitive.InstantDt;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import org.hibernate.annotations.Immutable;
|
||||
import org.hibernate.annotations.Subselect;
|
||||
|
||||
import javax.persistence.*;
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
//@formatter:off
|
||||
@Entity
|
||||
@Immutable
|
||||
@Subselect("SELECT h.pid as pid " +
|
||||
", h.res_id as res_id " +
|
||||
", h.res_type as res_type " +
|
||||
", h.res_version as res_version " + // FHIR version
|
||||
", h.res_ver as res_ver " + // resource version
|
||||
", h.has_tags as has_tags " +
|
||||
", h.res_deleted_at as res_deleted_at " +
|
||||
", h.res_published as res_published " +
|
||||
", h.res_updated as res_updated " +
|
||||
", h.res_text as res_text " +
|
||||
", h.res_encoding as res_encoding " +
|
||||
", f.forced_id as forced_pid " +
|
||||
"FROM HFJ_RES_VER h "
|
||||
+ " LEFT OUTER JOIN HFJ_FORCED_ID f ON f.resource_pid = h.res_id "
|
||||
+ " INNER JOIN HFJ_RESOURCE r ON r.res_id = h.res_id and r.res_ver = h.res_ver")
|
||||
@Subselect("SELECT h.pid as pid " +
|
||||
", h.res_id as res_id " +
|
||||
", h.res_type as res_type " +
|
||||
", h.res_version as res_version " + // FHIR version
|
||||
", h.res_ver as res_ver " + // resource version
|
||||
", h.has_tags as has_tags " +
|
||||
", h.res_deleted_at as res_deleted_at " +
|
||||
", h.res_published as res_published " +
|
||||
", h.res_updated as res_updated " +
|
||||
", h.res_text as res_text " +
|
||||
", h.res_encoding as res_encoding " +
|
||||
", f.forced_id as forced_pid " +
|
||||
"FROM HFJ_RES_VER h "
|
||||
+ " LEFT OUTER JOIN HFJ_FORCED_ID f ON f.resource_pid = h.res_id "
|
||||
+ " INNER JOIN HFJ_RESOURCE r ON r.res_id = h.res_id and r.res_ver = h.res_ver")
|
||||
// @formatter:on
|
||||
public class ResourceSearchView implements IBaseResourceEntity, Serializable {
|
||||
|
||||
@ -142,7 +134,7 @@ public class ResourceSearchView implements IBaseResourceEntity, Serializable {
|
||||
return new IdDt(myResourceType + '/' + id + '/' + Constants.PARAM_HISTORY + '/' + getVersion());
|
||||
} else {
|
||||
return new IdDt(
|
||||
getResourceType() + '/' + getForcedId() + '/' + Constants.PARAM_HISTORY + '/' + getVersion());
|
||||
getResourceType() + '/' + getForcedId() + '/' + Constants.PARAM_HISTORY + '/' + getVersion());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -19,9 +19,9 @@ import static org.apache.commons.lang3.StringUtils.left;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@ -28,6 +28,7 @@ import java.io.Serializable;
|
||||
|
||||
@Entity
|
||||
@Table(name = "HFJ_RES_PARAM_PRESENT", indexes = {
|
||||
// We used to have a constraint named IDX_RESPARMPRESENT_SPID_RESID - Don't reuse
|
||||
@Index(name = "IDX_RESPARMPRESENT_RESID", columnList = "RES_ID"),
|
||||
@Index(name = "IDX_RESPARMPRESENT_HASHPRES", columnList = "HASH_PRESENCE")
|
||||
})
|
||||
|
@ -29,9 +29,9 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@ -93,15 +93,17 @@ public class TermConcept implements Serializable {
|
||||
private Long myIndexStatus;
|
||||
@Field(name = "myParentPids", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "conceptParentPidsAnalyzer"))
|
||||
@Lob
|
||||
@Column(name="PARENT_PIDS", nullable = true)
|
||||
@Column(name = "PARENT_PIDS", nullable = true)
|
||||
private String myParentPids;
|
||||
@OneToMany(cascade = {}, fetch = FetchType.LAZY, mappedBy = "myChild")
|
||||
private Collection<TermConceptParentChildLink> myParents;
|
||||
@Column(name = "CODE_SEQUENCE", nullable = true)
|
||||
private Integer mySequence;
|
||||
|
||||
public TermConcept() {
|
||||
super();
|
||||
}
|
||||
|
||||
public TermConcept(TermCodeSystemVersion theCs, String theCode) {
|
||||
setCodeSystemVersion(theCs);
|
||||
setCode(theCode);
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@ -38,7 +38,7 @@ public class TermConceptMapGroup implements Serializable {
|
||||
private Long myId;
|
||||
|
||||
@ManyToOne()
|
||||
@JoinColumn(name = "CONCEPT_MAP_PID", nullable = false, referencedColumnName = "PID", foreignKey=@ForeignKey(name="FK_TCMGROUP_CONCEPTMAP"))
|
||||
@JoinColumn(name = "CONCEPT_MAP_PID", nullable = false, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_TCMGROUP_CONCEPTMAP"))
|
||||
private TermConceptMap myConceptMap;
|
||||
|
||||
@Column(name = "SOURCE_URL", nullable = false, length = 200)
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@ -42,7 +42,7 @@ public class TermConceptMapGroupElement implements Serializable {
|
||||
private Long myId;
|
||||
|
||||
@ManyToOne()
|
||||
@JoinColumn(name = "CONCEPT_MAP_GROUP_PID", nullable = false, referencedColumnName = "PID", foreignKey=@ForeignKey(name="FK_TCMGELEMENT_GROUP"))
|
||||
@JoinColumn(name = "CONCEPT_MAP_GROUP_PID", nullable = false, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_TCMGELEMENT_GROUP"))
|
||||
private TermConceptMapGroup myConceptMapGroup;
|
||||
|
||||
@Column(name = "SOURCE_CODE", nullable = false, length = TermConcept.CODE_LENGTH)
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@ -41,7 +41,7 @@ public class TermConceptMapGroupElementTarget implements Serializable {
|
||||
private Long myId;
|
||||
|
||||
@ManyToOne()
|
||||
@JoinColumn(name = "CONCEPT_MAP_GRP_ELM_PID", nullable = false, referencedColumnName = "PID", foreignKey=@ForeignKey(name="FK_TCMGETARGET_ELEMENT"))
|
||||
@JoinColumn(name = "CONCEPT_MAP_GRP_ELM_PID", nullable = false, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_TCMGETARGET_ELEMENT"))
|
||||
private TermConceptMapGroupElement myConceptMapGroupElement;
|
||||
|
||||
@Column(name = "TARGET_CODE", nullable = false, length = TermConcept.CODE_LENGTH)
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
@ -105,7 +105,7 @@ public class TestR4Config extends BaseJavaConfigR4 {
|
||||
|
||||
DataSource dataSource = ProxyDataSourceBuilder
|
||||
.create(retVal)
|
||||
// .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL")
|
||||
.logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL")
|
||||
.logSlowQueryBySlf4j(10, TimeUnit.SECONDS)
|
||||
.countQuery(new ThreadQueryCountHolder())
|
||||
.build();
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -10,7 +10,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -3,6 +3,11 @@ package ca.uhn.fhir.jpa.demo;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import ca.uhn.fhir.jpa.provider.dstu3.TerminologyUploaderProviderDstu3;
|
||||
import ca.uhn.fhir.jpa.subscription.email.SubscriptionEmailInterceptor;
|
||||
import ca.uhn.fhir.jpa.subscription.resthook.SubscriptionRestHookInterceptor;
|
||||
import ca.uhn.fhir.jpa.subscription.websocket.SubscriptionWebsocketInterceptor;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@ -151,9 +156,15 @@ public class JpaServerDemo extends RestfulServer {
|
||||
* so it is a potential security vulnerability. Consider using an AuthorizationInterceptor
|
||||
* with this feature.
|
||||
*/
|
||||
//if (fhirVersion == FhirVersionEnum.DSTU3) {
|
||||
// registerProvider(myAppCtx.getBean(TerminologyUploaderProviderDstu3.class));
|
||||
//}
|
||||
if (fhirVersion == FhirVersionEnum.DSTU3) {
|
||||
registerProvider(myAppCtx.getBean(TerminologyUploaderProviderDstu3.class));
|
||||
}
|
||||
|
||||
// Enable various subscription types
|
||||
registerInterceptor(myAppCtx.getBean(SubscriptionWebsocketInterceptor.class));
|
||||
registerInterceptor(myAppCtx.getBean(SubscriptionRestHookInterceptor.class));
|
||||
registerInterceptor(myAppCtx.getBean(SubscriptionEmailInterceptor.class));
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
153
hapi-fhir-jpaserver-migrate/pom.xml
Normal file
153
hapi-fhir-jpaserver-migrate/pom.xml
Normal file
@ -0,0 +1,153 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<artifactId>hapi-fhir-jpaserver-migrate</artifactId>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<name>HAPI FHIR JPA Server - Migration</name>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>mysql</groupId>
|
||||
<artifactId>mysql-connector-java</artifactId>
|
||||
<version>6.0.5</version>
|
||||
</dependency>
|
||||
<!--
|
||||
<dependency>
|
||||
<groupId>org.aspectj</groupId>
|
||||
<artifactId>aspectjweaver</artifactId>
|
||||
<version>1.8.9</version>
|
||||
</dependency>
|
||||
-->
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-jdbc</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- This dependency includes the core HAPI-FHIR classes -->
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-jpaserver-base</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Test Database -->
|
||||
<dependency>
|
||||
<groupId>org.apache.derby</groupId>
|
||||
<artifactId>derby</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-dbcp2</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ch.qos.logback</groupId>
|
||||
<artifactId>logback-classic</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.intellij</groupId>
|
||||
<artifactId>annotations</artifactId>
|
||||
<version>12.0</version>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
||||
<!-- Tells Maven to name the generated WAR file as hapi-fhir-jpaserver-example.war -->
|
||||
<finalName>hapi-fhir-jpaserver-example</finalName>
|
||||
|
||||
<!-- The following is not required for the application to build, but allows you to test it by issuing "mvn jetty:run" from the command line. -->
|
||||
<pluginManagement>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<webApp>
|
||||
<contextPath>/hapi-fhir-jpaserver-example</contextPath>
|
||||
<allowDuplicateFragmentNames>true</allowDuplicateFragmentNames>
|
||||
</webApp>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</pluginManagement>
|
||||
|
||||
<plugins>
|
||||
<!-- Tell Maven which Java source version you want to use -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<configuration>
|
||||
<source>8</source>
|
||||
<target>8</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
<!-- The configuration here tells the WAR plugin to include the FHIR Tester overlay. You can omit it if you are not using that feature. -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-war-plugin</artifactId>
|
||||
<configuration>
|
||||
<archive>
|
||||
<manifestEntries>
|
||||
<Build-Time>${maven.build.timestamp}</Build-Time>
|
||||
</manifestEntries>
|
||||
</archive>
|
||||
<overlays>
|
||||
<overlay>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-testpage-overlay</artifactId>
|
||||
</overlay>
|
||||
</overlays>
|
||||
<webXml>src/main/webapp/WEB-INF/web.xml</webXml>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
<!-- This plugin is just a part of the HAPI internal build process, you do not need to incude it in your own projects -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-deploy-plugin</artifactId>
|
||||
<configuration>
|
||||
<skip>true</skip>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
<!-- This is to run the integration tests -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-failsafe-plugin</artifactId>
|
||||
<configuration>
|
||||
<redirectTestOutputToFile>true</redirectTestOutputToFile>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>integration-test</goal>
|
||||
<goal>verify</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
@ -0,0 +1,114 @@
|
||||
package ca.uhn.fhir.jpa.migrate;
|
||||
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.springframework.jdbc.core.JdbcTemplate;
|
||||
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
|
||||
import org.springframework.jdbc.datasource.SingleConnectionDataSource;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* Smile CDR - CDR
|
||||
* %%
|
||||
* Copyright (C) 2016 - 2018 Simpatico Intelligent Systems Inc
|
||||
* %%
|
||||
* All rights reserved.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
public enum DriverTypeEnum {
|
||||
|
||||
DERBY_EMBEDDED("org.apache.derby.jdbc.EmbeddedDriver", true),
|
||||
MARIADB_10_1("org.mariadb.jdbc.Driver", false),
|
||||
|
||||
// Formerly com.mysql.jdbc.Driver
|
||||
MYSQL_5_7("com.mysql.cj.jdbc.Driver", false),
|
||||
|
||||
POSTGRES_9_4("org.postgresql.Driver", false),
|
||||
|
||||
ORACLE_12C("oracle.jdbc.OracleDriver", false),
|
||||
|
||||
MSSQL_2012("com.microsoft.sqlserver.jdbc.SQLServerDataSource", false),
|
||||
|
||||
;
|
||||
|
||||
private String myDriverClassName;
|
||||
private boolean myDerby;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
DriverTypeEnum(String theDriverClassName, boolean theDerby) {
|
||||
myDriverClassName = theDriverClassName;
|
||||
myDerby = theDerby;
|
||||
}
|
||||
|
||||
|
||||
public ConnectionProperties newConnectionProperties(String theUrl, String theUsername, String thePassword) {
|
||||
SingleConnectionDataSource dataSource = new SingleConnectionDataSource();
|
||||
dataSource.setAutoCommit(false);
|
||||
dataSource.setDriverClassName(myDriverClassName);
|
||||
dataSource.setUrl(theUrl);
|
||||
dataSource.setUsername(theUsername);
|
||||
dataSource.setPassword(thePassword);
|
||||
dataSource.setSuppressClose(true);
|
||||
|
||||
DataSourceTransactionManager transactionManager = new DataSourceTransactionManager();
|
||||
transactionManager.setDataSource(dataSource);
|
||||
transactionManager.afterPropertiesSet();
|
||||
|
||||
TransactionTemplate txTemplate = new TransactionTemplate();
|
||||
txTemplate.setTransactionManager(transactionManager);
|
||||
txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
|
||||
txTemplate.afterPropertiesSet();
|
||||
|
||||
return new ConnectionProperties(dataSource, txTemplate, this);
|
||||
}
|
||||
|
||||
public static class ConnectionProperties {
|
||||
|
||||
private final DriverTypeEnum myDriverType;
|
||||
private final SingleConnectionDataSource myDataSource;
|
||||
private final TransactionTemplate myTxTemplate;
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public ConnectionProperties(SingleConnectionDataSource theDataSource, TransactionTemplate theTxTemplate, DriverTypeEnum theDriverType) {
|
||||
Validate.notNull(theDataSource);
|
||||
Validate.notNull(theTxTemplate);
|
||||
Validate.notNull(theDriverType);
|
||||
|
||||
myDataSource = theDataSource;
|
||||
myTxTemplate = theTxTemplate;
|
||||
myDriverType = theDriverType;
|
||||
}
|
||||
|
||||
public DriverTypeEnum getDriverType() {
|
||||
return myDriverType;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public SingleConnectionDataSource getDataSource() {
|
||||
return myDataSource;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public JdbcTemplate newJdbcTemplate() {
|
||||
JdbcTemplate jdbcTemplate = new JdbcTemplate();
|
||||
jdbcTemplate.setDataSource(myDataSource);
|
||||
return jdbcTemplate;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public TransactionTemplate getTxTemplate() {
|
||||
return myTxTemplate;
|
||||
}
|
||||
|
||||
public void close() {
|
||||
myDataSource.destroy();
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,165 @@
|
||||
package ca.uhn.fhir.jpa.migrate;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTableColumnTypeTask;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.jdbc.core.ColumnMapRowMapper;
|
||||
import org.thymeleaf.util.StringUtils;
|
||||
|
||||
import javax.sql.DataSource;
|
||||
import java.sql.*;
|
||||
import java.util.HashSet;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
public class JdbcUtils {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(JdbcUtils.class);
|
||||
|
||||
/**
|
||||
* Retrieve all index names
|
||||
*/
|
||||
public static Set<String> getIndexNames(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName) throws SQLException {
|
||||
DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource());
|
||||
Connection connection = dataSource.getConnection();
|
||||
return theConnectionProperties.getTxTemplate().execute(t -> {
|
||||
DatabaseMetaData metadata;
|
||||
try {
|
||||
metadata = connection.getMetaData();
|
||||
ResultSet indexes = metadata.getIndexInfo(null, null, theTableName, false, false);
|
||||
|
||||
Set<String> indexNames = new HashSet<>();
|
||||
while (indexes.next()) {
|
||||
|
||||
ourLog.debug("*** Next index: {}", new ColumnMapRowMapper().mapRow(indexes, 0));
|
||||
|
||||
String indexName = indexes.getString("INDEX_NAME");
|
||||
indexName = StringUtils.toUpperCase(indexName, Locale.US);
|
||||
indexNames.add(indexName);
|
||||
}
|
||||
|
||||
return indexNames;
|
||||
} catch (SQLException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
@SuppressWarnings("ConstantConditions")
|
||||
public static boolean isIndexUnique(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName, String theIndexName) throws SQLException {
|
||||
DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource());
|
||||
Connection connection = dataSource.getConnection();
|
||||
return theConnectionProperties.getTxTemplate().execute(t -> {
|
||||
DatabaseMetaData metadata;
|
||||
try {
|
||||
metadata = connection.getMetaData();
|
||||
ResultSet indexes = metadata.getIndexInfo(null, null, theTableName, false, false);
|
||||
|
||||
while (indexes.next()) {
|
||||
String indexName = indexes.getString("INDEX_NAME");
|
||||
if (indexName.equalsIgnoreCase(theIndexName)) {
|
||||
boolean nonUnique = indexes.getBoolean("NON_UNIQUE");
|
||||
return !nonUnique;
|
||||
}
|
||||
}
|
||||
|
||||
} catch (SQLException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
|
||||
throw new InternalErrorException("Can't find index: " + theIndexName + " on table " + theTableName);
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve all index names
|
||||
*/
|
||||
public static String getColumnType(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName, String theColumnName) throws SQLException {
|
||||
DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource());
|
||||
try (Connection connection = dataSource.getConnection()) {
|
||||
return theConnectionProperties.getTxTemplate().execute(t -> {
|
||||
DatabaseMetaData metadata;
|
||||
try {
|
||||
metadata = connection.getMetaData();
|
||||
ResultSet indexes = metadata.getColumns(null, null, theTableName, theColumnName);
|
||||
|
||||
indexes.next();
|
||||
|
||||
int dataType = indexes.getInt("DATA_TYPE");
|
||||
Long length = indexes.getLong("COLUMN_SIZE");
|
||||
switch (dataType) {
|
||||
case Types.VARCHAR:
|
||||
return BaseTableColumnTypeTask.ColumnTypeEnum.STRING.getDescriptor(length);
|
||||
case Types.BIGINT:
|
||||
return BaseTableColumnTypeTask.ColumnTypeEnum.LONG.getDescriptor(length);
|
||||
default:
|
||||
throw new IllegalArgumentException("Don't know how to handle datatype: " + dataType);
|
||||
}
|
||||
|
||||
} catch (SQLException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve all index names
|
||||
*/
|
||||
public static Set<String> getColumnNames(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName) throws SQLException {
|
||||
DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource());
|
||||
Connection connection = dataSource.getConnection();
|
||||
return theConnectionProperties.getTxTemplate().execute(t -> {
|
||||
DatabaseMetaData metadata;
|
||||
try {
|
||||
metadata = connection.getMetaData();
|
||||
ResultSet indexes = metadata.getColumns(null, null, theTableName, null);
|
||||
|
||||
Set<String> columnNames = new HashSet<>();
|
||||
while (indexes.next()) {
|
||||
String columnName = indexes.getString("COLUMN_NAME");
|
||||
columnName = StringUtils.toUpperCase(columnName, Locale.US);
|
||||
columnNames.add(columnName);
|
||||
}
|
||||
|
||||
return columnNames;
|
||||
} catch (SQLException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
public static Set<String> getTableNames(DriverTypeEnum.ConnectionProperties theConnectionProperties) throws SQLException {
|
||||
DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource());
|
||||
Connection connection = dataSource.getConnection();
|
||||
return theConnectionProperties.getTxTemplate().execute(t -> {
|
||||
DatabaseMetaData metadata;
|
||||
try {
|
||||
metadata = connection.getMetaData();
|
||||
ResultSet tables = metadata.getTables(null, null, null, null);
|
||||
|
||||
Set<String> columnNames = new HashSet<>();
|
||||
while (tables.next()) {
|
||||
String tableName = tables.getString("TABLE_NAME");
|
||||
tableName = StringUtils.toUpperCase(tableName, Locale.US);
|
||||
|
||||
String tableType = tables.getString("TABLE_TYPE");
|
||||
if ("SYSTEM TABLE".equalsIgnoreCase(tableType)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
columnNames.add(tableName);
|
||||
}
|
||||
|
||||
return columnNames;
|
||||
} catch (SQLException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,75 @@
|
||||
package ca.uhn.fhir.jpa.migrate;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class Migrator {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(Migrator.class);
|
||||
private DriverTypeEnum myDriverType;
|
||||
private String myConnectionUrl;
|
||||
private String myUsername;
|
||||
private String myPassword;
|
||||
private List<BaseTask> myTasks = new ArrayList<>();
|
||||
private DriverTypeEnum.ConnectionProperties myConnectionProperties;
|
||||
private int myChangesCount;
|
||||
private boolean myDryRun;
|
||||
|
||||
public int getChangesCount() {
|
||||
return myChangesCount;
|
||||
}
|
||||
|
||||
public void setDriverType(DriverTypeEnum theDriverType) {
|
||||
myDriverType = theDriverType;
|
||||
}
|
||||
|
||||
public void setConnectionUrl(String theConnectionUrl) {
|
||||
myConnectionUrl = theConnectionUrl;
|
||||
}
|
||||
|
||||
public void setUsername(String theUsername) {
|
||||
myUsername = theUsername;
|
||||
}
|
||||
|
||||
public void setPassword(String thePassword) {
|
||||
myPassword = thePassword;
|
||||
}
|
||||
|
||||
public void addTask(BaseTask theTask) {
|
||||
myTasks.add(theTask);
|
||||
}
|
||||
|
||||
public void setDryRun(boolean theDryRun) {
|
||||
myDryRun = theDryRun;
|
||||
}
|
||||
|
||||
public void migrate() {
|
||||
ourLog.info("Starting migration with {} tasks", myTasks.size());
|
||||
|
||||
myConnectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newConnectionProperties(myConnectionUrl, myUsername, myPassword);
|
||||
try {
|
||||
for (BaseTask next : myTasks) {
|
||||
next.setDriverType(myDriverType);
|
||||
next.setConnectionProperties(myConnectionProperties);
|
||||
next.setDryRun(myDryRun);
|
||||
try {
|
||||
next.execute();
|
||||
} catch (SQLException e) {
|
||||
throw new InternalErrorException("Failure executing task \"" + next.getDescription() + "\", aborting! Cause: " + e.toString(), e);
|
||||
}
|
||||
|
||||
myChangesCount += next.getChangesCount();
|
||||
}
|
||||
} finally {
|
||||
myConnectionProperties.close();
|
||||
}
|
||||
|
||||
ourLog.info("Finished migration of {} tasks", myTasks.size());
|
||||
}
|
||||
}
|
@ -0,0 +1,30 @@
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Set;
|
||||
|
||||
public class AddColumnTask extends BaseTableColumnTypeTask<AddColumnTask> {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(AddColumnTask.class);
|
||||
|
||||
|
||||
@Override
|
||||
public void execute() throws SQLException {
|
||||
Set<String> columnNames = JdbcUtils.getColumnNames(getConnectionProperties(), getTableName());
|
||||
if (columnNames.contains(getColumnName())) {
|
||||
ourLog.info("Column {} already exists on table {} - No action performed", getColumnName(), getTableName());
|
||||
return;
|
||||
}
|
||||
|
||||
String type = getSqlType();
|
||||
String nullable = getSqlNotNull();
|
||||
String sql = "alter table " + getTableName() + " add column " + getColumnName() + " " + type + " " + nullable;
|
||||
ourLog.info("Adding column {} of type {} to table {}", getColumnName(), type, getTableName());
|
||||
executeSql(sql);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,59 @@
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.thymeleaf.util.StringUtils;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Set;
|
||||
|
||||
public class AddIndexTask extends BaseTableTask<AddIndexTask> {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(AddIndexTask.class);
|
||||
private String myIndexName;
|
||||
private List<String> myColumns;
|
||||
private Boolean myUnique;
|
||||
|
||||
public void setIndexName(String theIndexName) {
|
||||
myIndexName = StringUtils.toUpperCase(theIndexName, Locale.US);
|
||||
}
|
||||
|
||||
public void setColumns(List<String> theColumns) {
|
||||
myColumns = theColumns;
|
||||
}
|
||||
|
||||
public void setUnique(boolean theUnique) {
|
||||
myUnique = theUnique;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validate() {
|
||||
super.validate();
|
||||
Validate.notBlank(myIndexName, "Index name not specified");
|
||||
Validate.isTrue(myColumns.size() > 0, "Columns not specified");
|
||||
Validate.notNull(myUnique, "Uniqueness not specified");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute() throws SQLException {
|
||||
Set<String> indexNames = JdbcUtils.getIndexNames(getConnectionProperties(), getTableName());
|
||||
if (indexNames.contains(myIndexName)) {
|
||||
ourLog.info("Index {} already exists on table {} - No action performed", myIndexName, getTableName());
|
||||
return;
|
||||
}
|
||||
|
||||
String unique = myUnique ? "UNIQUE " : "";
|
||||
String columns = String.join(", ", myColumns);
|
||||
String sql = "CREATE " + unique + " INDEX " + myIndexName + " ON " + getTableName() + "(" + columns + ")";
|
||||
executeSql(sql);
|
||||
}
|
||||
|
||||
public void setColumns(String... theColumns) {
|
||||
setColumns(Arrays.asList(theColumns));
|
||||
}
|
||||
}
|
@ -0,0 +1,48 @@
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.intellij.lang.annotations.Language;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.jdbc.core.JdbcTemplate;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.*;
|
||||
|
||||
public class AddTableTask extends BaseTableTask<AddTableTask> {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(AddTableTask.class);
|
||||
private Map<DriverTypeEnum, List<String>> myDriverToSqls = new HashMap<>();
|
||||
|
||||
public void addSql(DriverTypeEnum theDriverType, @Language("SQL") String theSql) {
|
||||
Validate.notNull(theDriverType);
|
||||
Validate.notBlank(theSql);
|
||||
|
||||
List<String> list = myDriverToSqls.computeIfAbsent(theDriverType, t -> new ArrayList<>());
|
||||
list.add(theSql);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute() throws SQLException {
|
||||
Set<String> tableNames = JdbcUtils.getTableNames(getConnectionProperties());
|
||||
if (tableNames.contains(getTableName())) {
|
||||
ourLog.info("Table {} already exists - No action performed", getTableName());
|
||||
return;
|
||||
}
|
||||
|
||||
List<String> sqlStatements = myDriverToSqls.get(getDriverType());
|
||||
ourLog.info("Going to create table {} using {} SQL statements", getTableName(), sqlStatements.size());
|
||||
getConnectionProperties().getTxTemplate().execute(t->{
|
||||
|
||||
JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate();
|
||||
for (String nextSql : sqlStatements) {
|
||||
jdbcTemplate.execute(nextSql);
|
||||
}
|
||||
|
||||
return null;
|
||||
});
|
||||
|
||||
}
|
||||
}
|
@ -0,0 +1,95 @@
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.jdbc.core.ColumnMapRowMapper;
|
||||
import org.springframework.jdbc.core.JdbcTemplate;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
public class ArbitrarySqlTask extends BaseTask<ArbitrarySqlTask> {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ArbitrarySqlTask.class);
|
||||
private final String myDescription;
|
||||
private List<Task> myTask = new ArrayList<>();
|
||||
private int myBatchSize = 1000;
|
||||
|
||||
public ArbitrarySqlTask(String theDescription) {
|
||||
myDescription = theDescription;
|
||||
}
|
||||
|
||||
public void addQuery(String theSql, QueryModeEnum theMode, Consumer<Map<String, Object>> theConsumer) {
|
||||
myTask.add(new QueryTask(theSql, theMode, theConsumer));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validate() {
|
||||
// nothing
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute() throws SQLException {
|
||||
ourLog.info("Starting: {}", myDescription);
|
||||
|
||||
for (Task next : myTask) {
|
||||
next.execute();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void setBatchSize(int theBatchSize) {
|
||||
myBatchSize = theBatchSize;
|
||||
}
|
||||
|
||||
public enum QueryModeEnum {
|
||||
BATCH_UNTIL_NO_MORE
|
||||
}
|
||||
|
||||
private abstract class Task {
|
||||
public abstract void execute();
|
||||
}
|
||||
|
||||
private class QueryTask extends Task {
|
||||
private final String mySql;
|
||||
private final QueryModeEnum myMode;
|
||||
private final Consumer<Map<String, Object>> myConsumer;
|
||||
|
||||
public QueryTask(String theSql, QueryModeEnum theMode, Consumer<Map<String, Object>> theConsumer) {
|
||||
mySql = theSql;
|
||||
myMode = theMode;
|
||||
myConsumer = theConsumer;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void execute() {
|
||||
if (isDryRun()) {
|
||||
logDryRunSql(mySql);
|
||||
return;
|
||||
}
|
||||
|
||||
List<Map<String, Object>> rows;
|
||||
do {
|
||||
ourLog.info("Querying for up to {} rows", myBatchSize);
|
||||
rows = getTxTemplate().execute(t -> {
|
||||
JdbcTemplate jdbcTemplate = newJdbcTemnplate();
|
||||
jdbcTemplate.setMaxRows(myBatchSize);
|
||||
return jdbcTemplate.query(mySql, new ColumnMapRowMapper());
|
||||
});
|
||||
|
||||
ourLog.info("Processing {} rows", rows.size());
|
||||
List<Map<String, Object>> finalRows = rows;
|
||||
getTxTemplate().execute(t -> {
|
||||
for (Map<String, Object> nextRow : finalRows) {
|
||||
myConsumer.accept(nextRow);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
} while (rows.size() > 0);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,30 @@
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.thymeleaf.util.StringUtils;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
public abstract class BaseTableColumnTask<T extends BaseTableTask> extends BaseTableTask<T> {
|
||||
|
||||
private String myColumnName;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public T setColumnName(String theColumnName) {
|
||||
myColumnName = StringUtils.toUpperCase(theColumnName, Locale.US);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
|
||||
public String getColumnName() {
|
||||
return myColumnName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validate() {
|
||||
super.validate();
|
||||
Validate.notBlank(myColumnName, "Column name not specified");
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,136 @@
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public abstract class BaseTableColumnTypeTask<T extends BaseTableTask> extends BaseTableColumnTask<T> {
|
||||
|
||||
private ColumnTypeEnum myColumnType;
|
||||
private Map<ColumnTypeEnum, Map<DriverTypeEnum, String>> myColumnTypeToDriverTypeToSqlType = new HashMap<>();
|
||||
private Boolean myNullable;
|
||||
private Long myColumnLength;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
BaseTableColumnTypeTask() {
|
||||
setColumnType(ColumnTypeEnum.LONG, DriverTypeEnum.DERBY_EMBEDDED, "bigint");
|
||||
setColumnType(ColumnTypeEnum.LONG, DriverTypeEnum.MARIADB_10_1, "bigint");
|
||||
setColumnType(ColumnTypeEnum.LONG, DriverTypeEnum.MYSQL_5_7, "bigint");
|
||||
setColumnType(ColumnTypeEnum.LONG, DriverTypeEnum.MSSQL_2012, "bigint");
|
||||
setColumnType(ColumnTypeEnum.LONG, DriverTypeEnum.ORACLE_12C, "number(19,0)");
|
||||
setColumnType(ColumnTypeEnum.LONG, DriverTypeEnum.POSTGRES_9_4, "int8");
|
||||
|
||||
setColumnType(ColumnTypeEnum.STRING, DriverTypeEnum.DERBY_EMBEDDED, "varchar(?)");
|
||||
setColumnType(ColumnTypeEnum.STRING, DriverTypeEnum.MARIADB_10_1, "varchar(?)");
|
||||
setColumnType(ColumnTypeEnum.STRING, DriverTypeEnum.MYSQL_5_7, "varchar(?)");
|
||||
setColumnType(ColumnTypeEnum.STRING, DriverTypeEnum.MSSQL_2012, "varchar(?)");
|
||||
setColumnType(ColumnTypeEnum.STRING, DriverTypeEnum.ORACLE_12C, "varchar2(?)");
|
||||
setColumnType(ColumnTypeEnum.STRING, DriverTypeEnum.POSTGRES_9_4, "varchar(?)");
|
||||
|
||||
setColumnType(ColumnTypeEnum.DATE_TIMESTAMPT, DriverTypeEnum.DERBY_EMBEDDED, "timestamp");
|
||||
setColumnType(ColumnTypeEnum.DATE_TIMESTAMPT, DriverTypeEnum.MARIADB_10_1, "datetime(6)");
|
||||
setColumnType(ColumnTypeEnum.DATE_TIMESTAMPT, DriverTypeEnum.MYSQL_5_7, "datetime(6)");
|
||||
setColumnType(ColumnTypeEnum.DATE_TIMESTAMPT, DriverTypeEnum.MSSQL_2012, "datetime2");
|
||||
setColumnType(ColumnTypeEnum.DATE_TIMESTAMPT, DriverTypeEnum.ORACLE_12C, "timestamp");
|
||||
setColumnType(ColumnTypeEnum.DATE_TIMESTAMPT, DriverTypeEnum.POSTGRES_9_4, "timestamp");
|
||||
}
|
||||
|
||||
public ColumnTypeEnum getColumnType() {
|
||||
return myColumnType;
|
||||
}
|
||||
|
||||
private void setColumnType(ColumnTypeEnum theColumnType, DriverTypeEnum theDriverType, String theColumnTypeSql) {
|
||||
Map<DriverTypeEnum, String> columnSqlType = myColumnTypeToDriverTypeToSqlType.computeIfAbsent(theColumnType, k -> new HashMap<>());
|
||||
if (columnSqlType.containsKey(theDriverType)) {
|
||||
throw new IllegalStateException("Duplicate key: " + theDriverType);
|
||||
}
|
||||
columnSqlType.put(theDriverType, theColumnTypeSql);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void validate() {
|
||||
super.validate();
|
||||
Validate.notNull(myColumnType);
|
||||
Validate.notNull(myNullable);
|
||||
|
||||
if (myColumnType == ColumnTypeEnum.STRING) {
|
||||
Validate.notNull(myColumnLength);
|
||||
} else {
|
||||
Validate.isTrue(myColumnLength == null);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public T setColumnType(ColumnTypeEnum theColumnType) {
|
||||
myColumnType = theColumnType;
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
protected String getSqlType() {
|
||||
String retVal = myColumnTypeToDriverTypeToSqlType.get(myColumnType).get(getDriverType());
|
||||
Objects.requireNonNull(retVal);
|
||||
|
||||
if (myColumnType == ColumnTypeEnum.STRING) {
|
||||
retVal = retVal.replace("?", Long.toString(getColumnLength()));
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
public boolean isNullable() {
|
||||
return myNullable;
|
||||
}
|
||||
|
||||
public void setNullable(boolean theNullable) {
|
||||
myNullable = theNullable;
|
||||
}
|
||||
|
||||
protected String getSqlNotNull() {
|
||||
return isNullable() ? "" : " not null";
|
||||
}
|
||||
|
||||
public Long getColumnLength() {
|
||||
return myColumnLength;
|
||||
}
|
||||
|
||||
public void setColumnLength(int theColumnLength) {
|
||||
myColumnLength = (long) theColumnLength;
|
||||
}
|
||||
|
||||
|
||||
public enum ColumnTypeEnum {
|
||||
|
||||
LONG {
|
||||
@Override
|
||||
public String getDescriptor(Long theColumnLength) {
|
||||
Assert.isTrue(theColumnLength == null, "Must not supply a column length");
|
||||
return "bigint";
|
||||
}
|
||||
},
|
||||
STRING {
|
||||
@Override
|
||||
public String getDescriptor(Long theColumnLength) {
|
||||
Assert.isTrue(theColumnLength != null, "Must supply a column length");
|
||||
return "varchar(" + theColumnLength + ")";
|
||||
}
|
||||
},
|
||||
DATE_TIMESTAMPT{
|
||||
@Override
|
||||
public String getDescriptor(Long theColumnLength) {
|
||||
Assert.isTrue(theColumnLength == null, "Must not supply a column length");
|
||||
return "timestamp";
|
||||
}
|
||||
};
|
||||
|
||||
public abstract String getDescriptor(Long theColumnLength);
|
||||
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
||||
public abstract class BaseTableTask<T extends BaseTableTask> extends BaseTask {
|
||||
private String myTableName;
|
||||
|
||||
public String getTableName() {
|
||||
return myTableName;
|
||||
}
|
||||
|
||||
public T setTableName(String theTableName) {
|
||||
myTableName = theTableName;
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validate() {
|
||||
Validate.notBlank(myTableName);
|
||||
}
|
||||
}
|
@ -0,0 +1,91 @@
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import org.intellij.lang.annotations.Language;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.jdbc.core.JdbcTemplate;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
public abstract class BaseTask<T extends BaseTask> {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BaseTask.class);
|
||||
private DriverTypeEnum.ConnectionProperties myConnectionProperties;
|
||||
private DriverTypeEnum myDriverType;
|
||||
private String myDescription;
|
||||
private int myChangesCount;
|
||||
private boolean myDryRun;
|
||||
|
||||
public boolean isDryRun() {
|
||||
return myDryRun;
|
||||
}
|
||||
|
||||
public void setDryRun(boolean theDryRun) {
|
||||
myDryRun = theDryRun;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return myDescription;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public T setDescription(String theDescription) {
|
||||
myDescription = theDescription;
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
public int getChangesCount() {
|
||||
return myChangesCount;
|
||||
}
|
||||
|
||||
public void executeSql(@Language("SQL") String theSql, Object... theArguments) {
|
||||
if (isDryRun()) {
|
||||
logDryRunSql(theSql);
|
||||
return;
|
||||
}
|
||||
|
||||
Integer changes = getConnectionProperties().getTxTemplate().execute(t -> {
|
||||
JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate();
|
||||
int changesCount = jdbcTemplate.update(theSql, theArguments);
|
||||
ourLog.info("SQL \"{}\" returned {}", theSql, changesCount);
|
||||
return changesCount;
|
||||
});
|
||||
|
||||
myChangesCount += changes;
|
||||
|
||||
}
|
||||
|
||||
protected void logDryRunSql(@Language("SQL") String theSql) {
|
||||
ourLog.info("WOULD EXECUTE SQL: {}", theSql);
|
||||
}
|
||||
|
||||
public DriverTypeEnum.ConnectionProperties getConnectionProperties() {
|
||||
return myConnectionProperties;
|
||||
}
|
||||
|
||||
public void setConnectionProperties(DriverTypeEnum.ConnectionProperties theConnectionProperties) {
|
||||
myConnectionProperties = theConnectionProperties;
|
||||
}
|
||||
|
||||
public DriverTypeEnum getDriverType() {
|
||||
return myDriverType;
|
||||
}
|
||||
|
||||
public void setDriverType(DriverTypeEnum theDriverType) {
|
||||
myDriverType = theDriverType;
|
||||
}
|
||||
|
||||
public abstract void validate();
|
||||
|
||||
public TransactionTemplate getTxTemplate() {
|
||||
return getConnectionProperties().getTxTemplate();
|
||||
}
|
||||
|
||||
public JdbcTemplate newJdbcTemnplate() {
|
||||
return getConnectionProperties().newJdbcTemplate();
|
||||
}
|
||||
|
||||
public abstract void execute() throws SQLException;
|
||||
}
|
@ -0,0 +1,133 @@
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import com.google.common.collect.ForwardingMap;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.checkerframework.checker.nullness.compatqual.NullableDecl;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.jdbc.core.JdbcTemplate;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
|
||||
public class CalculateHashesTask extends BaseTableColumnTask<CalculateHashesTask> {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(CalculateHashesTask.class);
|
||||
private int myBatchSize = 10000;
|
||||
private Map<String, Function<MandatoryKeyMap<String, Object>, Long>> myCalculators = new HashMap<>();
|
||||
|
||||
public void setBatchSize(int theBatchSize) {
|
||||
myBatchSize = theBatchSize;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void execute() {
|
||||
if (isDryRun()) {
|
||||
return;
|
||||
}
|
||||
|
||||
List<Map<String, Object>> rows;
|
||||
do {
|
||||
rows = getTxTemplate().execute(t -> {
|
||||
JdbcTemplate jdbcTemplate = newJdbcTemnplate();
|
||||
jdbcTemplate.setMaxRows(myBatchSize);
|
||||
String sql = "SELECT * FROM " + getTableName() + " WHERE " + getColumnName() + " IS NULL";
|
||||
ourLog.info("Finding up to {} rows in {} that requires hashes", myBatchSize, getTableName());
|
||||
return jdbcTemplate.queryForList(sql);
|
||||
});
|
||||
|
||||
updateRows(rows);
|
||||
} while (rows.size() > 0);
|
||||
}
|
||||
|
||||
private void updateRows(List<Map<String, Object>> theRows) {
|
||||
StopWatch sw = new StopWatch();
|
||||
getTxTemplate().execute(t -> {
|
||||
|
||||
// Loop through rows
|
||||
assert theRows != null;
|
||||
for (Map<String, Object> nextRow : theRows) {
|
||||
|
||||
Map<String, Long> newValues = new HashMap<>();
|
||||
MandatoryKeyMap<String, Object> nextRowMandatoryKeyMap = new MandatoryKeyMap<>(nextRow);
|
||||
|
||||
// Apply calculators
|
||||
for (Map.Entry<String, Function<MandatoryKeyMap<String, Object>, Long>> nextCalculatorEntry : myCalculators.entrySet()) {
|
||||
String nextColumn = nextCalculatorEntry.getKey();
|
||||
Function<MandatoryKeyMap<String, Object>, Long> nextCalculator = nextCalculatorEntry.getValue();
|
||||
Long value = nextCalculator.apply(nextRowMandatoryKeyMap);
|
||||
newValues.put(nextColumn, value);
|
||||
}
|
||||
|
||||
// Generate update SQL
|
||||
StringBuilder sqlBuilder = new StringBuilder();
|
||||
List<Long> arguments = new ArrayList<>();
|
||||
sqlBuilder.append("UPDATE ");
|
||||
sqlBuilder.append(getTableName());
|
||||
sqlBuilder.append(" SET ");
|
||||
for (Map.Entry<String, Long> nextNewValueEntry : newValues.entrySet()) {
|
||||
if (arguments.size() > 0) {
|
||||
sqlBuilder.append(", ");
|
||||
}
|
||||
sqlBuilder.append(nextNewValueEntry.getKey()).append(" = ?");
|
||||
arguments.add(nextNewValueEntry.getValue());
|
||||
}
|
||||
sqlBuilder.append(" WHERE SP_ID = ?");
|
||||
arguments.add((Long) nextRow.get("SP_ID"));
|
||||
|
||||
// Apply update SQL
|
||||
newJdbcTemnplate().update(sqlBuilder.toString(), arguments.toArray());
|
||||
|
||||
}
|
||||
|
||||
return theRows.size();
|
||||
});
|
||||
ourLog.info("Updated {} rows on {} in {}", theRows.size(), getTableName(), sw.toString());
|
||||
}
|
||||
|
||||
public CalculateHashesTask addCalculator(String theColumnName, Function<MandatoryKeyMap<String, Object>, Long> theConsumer) {
|
||||
Validate.isTrue(myCalculators.containsKey(theColumnName) == false);
|
||||
myCalculators.put(theColumnName, theConsumer);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public static class MandatoryKeyMap<K, V> extends ForwardingMap<K, V> {
|
||||
|
||||
private final Map<K, V> myWrap;
|
||||
|
||||
public MandatoryKeyMap(Map<K, V> theWrap) {
|
||||
myWrap = theWrap;
|
||||
}
|
||||
|
||||
@Override
|
||||
public V get(@NullableDecl Object theKey) {
|
||||
if (!containsKey(theKey)) {
|
||||
throw new IllegalArgumentException("No key: " + theKey);
|
||||
}
|
||||
return super.get(theKey);
|
||||
}
|
||||
|
||||
public String getString(String theKey) {
|
||||
return (String) get(theKey);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Map<K, V> delegate() {
|
||||
return myWrap;
|
||||
}
|
||||
|
||||
public String getResourceType() {
|
||||
return getString("RES_TYPE");
|
||||
}
|
||||
|
||||
public String getParamName() {
|
||||
return getString("SP_NAME");
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,83 @@
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Set;
|
||||
|
||||
public class DropIndexTask extends BaseTableTask<DropIndexTask> {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(DropIndexTask.class);
|
||||
private String myIndexName;
|
||||
|
||||
@Override
|
||||
public void validate() {
|
||||
super.validate();
|
||||
Validate.notBlank(myIndexName, "The index name must not be blank");
|
||||
|
||||
if (getDescription() == null) {
|
||||
setDescription("Drop index " + myIndexName + " on table " + getTableName());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute() throws SQLException {
|
||||
Set<String> indexNames = JdbcUtils.getIndexNames(getConnectionProperties(), getTableName());
|
||||
|
||||
if (!indexNames.contains(myIndexName)) {
|
||||
ourLog.info("Index {} does not exist on table {} - No action needed", myIndexName, getTableName());
|
||||
return;
|
||||
}
|
||||
|
||||
boolean isUnique = JdbcUtils.isIndexUnique(getConnectionProperties(), getTableName(), myIndexName);
|
||||
String uniquenessString = isUnique ? "unique" : "non-unique";
|
||||
ourLog.info("Dropping {} index {} on table {}", uniquenessString, myIndexName, getTableName());
|
||||
|
||||
String sql = null;
|
||||
|
||||
if (isUnique) {
|
||||
// Drop constraint
|
||||
switch (getDriverType()) {
|
||||
case MYSQL_5_7:
|
||||
case MARIADB_10_1:
|
||||
sql = "ALTER TABLE " + getTableName() + " DROP INDEX " + myIndexName;
|
||||
break;
|
||||
case DERBY_EMBEDDED:
|
||||
sql = "DROP INDEX " + myIndexName;
|
||||
break;
|
||||
case POSTGRES_9_4:
|
||||
case ORACLE_12C:
|
||||
case MSSQL_2012:
|
||||
sql = "ALTER TABLE " + getTableName() + " DROP CONSTRAINT " + myIndexName;
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
// Drop index
|
||||
switch (getDriverType()) {
|
||||
case MYSQL_5_7:
|
||||
case MARIADB_10_1:
|
||||
sql = "ALTER TABLE " + getTableName() + " DROP INDEX " + myIndexName;
|
||||
break;
|
||||
case POSTGRES_9_4:
|
||||
case DERBY_EMBEDDED:
|
||||
case ORACLE_12C:
|
||||
sql = "DROP INDEX " + myIndexName;
|
||||
break;
|
||||
case MSSQL_2012:
|
||||
sql = "DROP INDEX " + getTableName() + "." + myIndexName;
|
||||
break;
|
||||
}
|
||||
}
|
||||
executeSql(sql);
|
||||
|
||||
}
|
||||
|
||||
|
||||
public DropIndexTask setIndexName(String theIndexName) {
|
||||
myIndexName = theIndexName;
|
||||
return this;
|
||||
}
|
||||
}
|
@ -0,0 +1,27 @@
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class LogStartSectionWithMessageTask extends BaseTask {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(LogStartSectionWithMessageTask.class);
|
||||
private final String myMessage;
|
||||
|
||||
public LogStartSectionWithMessageTask(String theMessage) {
|
||||
myMessage = theMessage;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validate() {
|
||||
// nothing
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute() {
|
||||
ourLog.info("");
|
||||
ourLog.info(StringUtils.leftPad("", myMessage.length(), "*"));
|
||||
ourLog.info(myMessage);
|
||||
ourLog.info(StringUtils.leftPad("", myMessage.length(), "*"));
|
||||
}
|
||||
}
|
@ -0,0 +1,69 @@
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
public class ModifyColumnTask extends BaseTableColumnTypeTask<ModifyColumnTask> {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ModifyColumnTask.class);
|
||||
|
||||
|
||||
@Override
|
||||
public void execute() {
|
||||
|
||||
String existingType;
|
||||
try {
|
||||
existingType = JdbcUtils.getColumnType(getConnectionProperties(), getTableName(), getColumnName());
|
||||
} catch (SQLException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
|
||||
String wantedType = getColumnType().getDescriptor(getColumnLength());
|
||||
if (existingType.equals(wantedType)) {
|
||||
ourLog.info("Column {} on table {} is already of type {} - No action performed", getColumnName(), getTableName(), wantedType);
|
||||
return;
|
||||
}
|
||||
|
||||
String type = getSqlType();
|
||||
String notNull = getSqlNotNull();
|
||||
|
||||
String sql;
|
||||
String sqlNotNull = null;
|
||||
switch (getDriverType()) {
|
||||
case DERBY_EMBEDDED:
|
||||
sql = "alter table " + getTableName() + " alter column " + getColumnName() + " set data type " + type;
|
||||
break;
|
||||
case MARIADB_10_1:
|
||||
case MYSQL_5_7:
|
||||
sql = "alter table " + getTableName() + " modify column " + getColumnName() + " " + type + notNull;
|
||||
break;
|
||||
case POSTGRES_9_4:
|
||||
sql = "alter table " + getTableName() + " alter column " + getColumnName() + " type " + type;
|
||||
if (isNullable() == false) {
|
||||
sqlNotNull = "alter table " + getTableName() + " alter column " + getColumnName() + " set not null";
|
||||
}
|
||||
break;
|
||||
case ORACLE_12C:
|
||||
sql = "alter table " + getTableName() + " modify " + getColumnName() + " " + type + notNull;
|
||||
break;
|
||||
case MSSQL_2012:
|
||||
sql = "alter table " + getTableName() + " alter column " + getColumnName() + " " + type + notNull;
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("Dont know how to handle " + getDriverType());
|
||||
}
|
||||
|
||||
ourLog.info("Updating column {} on table {} to type {}", getColumnName(), getTableName(), type);
|
||||
executeSql(sql);
|
||||
|
||||
if (sqlNotNull != null) {
|
||||
ourLog.info("Updating column {} on table {} to not null", getColumnName(), getTableName());
|
||||
executeSql(sqlNotNull);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,406 @@
|
||||
package ca.uhn.fhir.jpa.migrate.tasks;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.entity.*;
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.AddColumnTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.ArbitrarySqlTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTableColumnTypeTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.CalculateHashesTask;
|
||||
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
|
||||
import ca.uhn.fhir.util.VersionEnum;
|
||||
|
||||
@SuppressWarnings({"UnstableApiUsage", "SqlNoDataSourceInspection", "SpellCheckingInspection"})
|
||||
public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public HapiFhirJpaMigrationTasks() {
|
||||
init350();
|
||||
}
|
||||
|
||||
private void init350() {
|
||||
Builder version = forVersion(VersionEnum.V3_5_0);
|
||||
|
||||
// Forced ID changes
|
||||
Builder.BuilderWithTableName forcedId = version.onTable("HFJ_FORCED_ID");
|
||||
version.startSectionWithMessage("Starting work on table: " + forcedId.getTableName());
|
||||
forcedId
|
||||
.dropIndex("IDX_FORCEDID_TYPE_FORCEDID");
|
||||
forcedId
|
||||
.dropIndex("IDX_FORCEDID_TYPE_RESID");
|
||||
forcedId
|
||||
.addIndex("IDX_FORCEDID_TYPE_FID")
|
||||
.unique(true)
|
||||
.withColumns("RESOURCE_TYPE", "FORCED_ID");
|
||||
|
||||
// Indexes - Coords
|
||||
Builder.BuilderWithTableName spidxCoords = version.onTable("HFJ_SPIDX_COORDS");
|
||||
version.startSectionWithMessage("Starting work on table: " + spidxCoords.getTableName());
|
||||
spidxCoords
|
||||
.dropIndex("IDX_SP_COORDS");
|
||||
spidxCoords
|
||||
.addColumn("HASH_IDENTITY")
|
||||
.nullable()
|
||||
.type(AddColumnTask.ColumnTypeEnum.LONG);
|
||||
spidxCoords
|
||||
.addIndex("IDX_SP_COORDS_HASH")
|
||||
.unique(false)
|
||||
.withColumns("HASH_IDENTITY", "SP_LATITUDE", "SP_LONGITUDE");
|
||||
spidxCoords
|
||||
.addTask(new CalculateHashesTask()
|
||||
.setColumnName("HASH_IDENTITY")
|
||||
.addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")))
|
||||
);
|
||||
|
||||
// Indexes - Date
|
||||
Builder.BuilderWithTableName spidxDate = version.onTable("HFJ_SPIDX_DATE");
|
||||
version.startSectionWithMessage("Starting work on table: " + spidxDate.getTableName());
|
||||
spidxDate
|
||||
.dropIndex("IDX_SP_TOKEN");
|
||||
spidxDate
|
||||
.addColumn("HASH_IDENTITY")
|
||||
.nullable()
|
||||
.type(AddColumnTask.ColumnTypeEnum.LONG);
|
||||
spidxDate
|
||||
.addIndex("IDX_SP_DATE_HASH")
|
||||
.unique(false)
|
||||
.withColumns("HASH_IDENTITY", "SP_VALUE_LOW", "SP_VALUE_HIGH");
|
||||
spidxDate
|
||||
.addTask(new CalculateHashesTask()
|
||||
.setColumnName("HASH_IDENTITY")
|
||||
.addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")))
|
||||
);
|
||||
|
||||
// Indexes - Number
|
||||
Builder.BuilderWithTableName spidxNumber = version.onTable("HFJ_SPIDX_NUMBER");
|
||||
version.startSectionWithMessage("Starting work on table: " + spidxNumber.getTableName());
|
||||
spidxNumber
|
||||
.dropIndex("IDX_SP_NUMBER");
|
||||
spidxNumber
|
||||
.addColumn("HASH_IDENTITY")
|
||||
.nullable()
|
||||
.type(AddColumnTask.ColumnTypeEnum.LONG);
|
||||
spidxNumber
|
||||
.addIndex("IDX_SP_NUMBER_HASH_VAL")
|
||||
.unique(false)
|
||||
.withColumns("HASH_IDENTITY", "SP_VALUE");
|
||||
spidxNumber
|
||||
.addTask(new CalculateHashesTask()
|
||||
.setColumnName("HASH_IDENTITY")
|
||||
.addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")))
|
||||
);
|
||||
|
||||
// Indexes - Quantity
|
||||
Builder.BuilderWithTableName spidxQuantity = version.onTable("HFJ_SPIDX_QUANTITY");
|
||||
version.startSectionWithMessage("Starting work on table: " + spidxQuantity.getTableName());
|
||||
spidxQuantity
|
||||
.dropIndex("IDX_SP_QUANTITY");
|
||||
spidxQuantity
|
||||
.addColumn("HASH_IDENTITY")
|
||||
.nullable()
|
||||
.type(AddColumnTask.ColumnTypeEnum.LONG);
|
||||
spidxQuantity
|
||||
.addColumn("HASH_IDENTITY_SYS_UNITS")
|
||||
.nullable()
|
||||
.type(AddColumnTask.ColumnTypeEnum.LONG);
|
||||
spidxQuantity
|
||||
.addColumn("HASH_IDENTITY_AND_UNITS")
|
||||
.nullable()
|
||||
.type(AddColumnTask.ColumnTypeEnum.LONG);
|
||||
spidxQuantity
|
||||
.addIndex("IDX_SP_QUANTITY_HASH")
|
||||
.unique(false)
|
||||
.withColumns("HASH_IDENTITY", "SP_VALUE");
|
||||
spidxQuantity
|
||||
.addIndex("IDX_SP_QUANTITY_HASH_UN")
|
||||
.unique(false)
|
||||
.withColumns("HASH_IDENTITY_AND_UNITS", "SP_VALUE");
|
||||
spidxQuantity
|
||||
.addIndex("IDX_SP_QUANTITY_HASH_SYSUN")
|
||||
.unique(false)
|
||||
.withColumns("HASH_IDENTITY_SYS_UNITS", "SP_VALUE");
|
||||
spidxQuantity
|
||||
.addTask(new CalculateHashesTask()
|
||||
.setColumnName("HASH_IDENTITY")
|
||||
.addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")))
|
||||
.addCalculator("HASH_IDENTITY_AND_UNITS", t -> ResourceIndexedSearchParamQuantity.calculateHashUnits(t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_UNITS")))
|
||||
.addCalculator("HASH_IDENTITY_SYS_UNITS", t -> ResourceIndexedSearchParamQuantity.calculateHashSystemAndUnits(t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_SYSTEM"), t.getString("SP_UNITS")))
|
||||
);
|
||||
|
||||
// Indexes - String
|
||||
Builder.BuilderWithTableName spidxString = version.onTable("HFJ_SPIDX_STRING");
|
||||
version.startSectionWithMessage("Starting work on table: " + spidxString.getTableName());
|
||||
spidxString
|
||||
.dropIndex("IDX_SP_STRING");
|
||||
spidxString
|
||||
.addColumn("HASH_NORM_PREFIX")
|
||||
.nullable()
|
||||
.type(AddColumnTask.ColumnTypeEnum.LONG);
|
||||
spidxString
|
||||
.addIndex("IDX_SP_STRING_HASH_NRM")
|
||||
.unique(false)
|
||||
.withColumns("HASH_NORM_PREFIX", "SP_VALUE_NORMALIZED");
|
||||
spidxString
|
||||
.addIndex("IDX_SP_STRING_HASH_EXCT")
|
||||
.unique(false)
|
||||
.withColumns("HASH_EXACT");
|
||||
spidxString
|
||||
.addTask(new CalculateHashesTask()
|
||||
.setColumnName("HASH_NORM_PREFIX")
|
||||
.addCalculator("HASH_NORM_PREFIX", t -> ResourceIndexedSearchParamString.calculateHashNormalized(new DaoConfig(), t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_VALUE_NORMALIZED")))
|
||||
.addCalculator("HASH_EXACT", t -> ResourceIndexedSearchParamString.calculateHashExact(t.getResourceType(), t.getParamName(), t.getString("SP_VALUE_EXACT")))
|
||||
);
|
||||
|
||||
// Indexes - Token
|
||||
Builder.BuilderWithTableName spidxToken = version.onTable("HFJ_SPIDX_TOKEN");
|
||||
version.startSectionWithMessage("Starting work on table: " + spidxToken.getTableName());
|
||||
spidxToken
|
||||
.dropIndex("IDX_SP_TOKEN");
|
||||
spidxToken
|
||||
.dropIndex("IDX_SP_TOKEN_UNQUAL");
|
||||
spidxToken
|
||||
.addColumn("HASH_IDENTITY")
|
||||
.nullable()
|
||||
.type(AddColumnTask.ColumnTypeEnum.LONG);
|
||||
spidxToken
|
||||
.addColumn("HASH_SYS")
|
||||
.nullable()
|
||||
.type(AddColumnTask.ColumnTypeEnum.LONG);
|
||||
spidxToken
|
||||
.addColumn("HASH_SYS_AND_VALUE")
|
||||
.nullable()
|
||||
.type(AddColumnTask.ColumnTypeEnum.LONG);
|
||||
spidxToken
|
||||
.addColumn("HASH_VALUE")
|
||||
.nullable()
|
||||
.type(AddColumnTask.ColumnTypeEnum.LONG);
|
||||
spidxToken
|
||||
.addIndex("IDX_SP_TOKEN_HASH")
|
||||
.unique(false)
|
||||
.withColumns("HASH_IDENTITY");
|
||||
spidxToken
|
||||
.addIndex("IDX_SP_TOKEN_HASH_S")
|
||||
.unique(false)
|
||||
.withColumns("HASH_SYS");
|
||||
spidxToken
|
||||
.addIndex("IDX_SP_TOKEN_HASH_SV")
|
||||
.unique(false)
|
||||
.withColumns("HASH_SYS_AND_VALUE");
|
||||
spidxToken
|
||||
.addIndex("IDX_SP_TOKEN_HASH_V")
|
||||
.unique(false)
|
||||
.withColumns("HASH_VALUE");
|
||||
spidxToken
|
||||
.addTask(new CalculateHashesTask()
|
||||
.setColumnName("HASH_IDENTITY")
|
||||
.addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")))
|
||||
.addCalculator("HASH_SYS", t -> ResourceIndexedSearchParamToken.calculateHashSystem(t.getResourceType(), t.getParamName(), t.getString("SP_SYSTEM")))
|
||||
.addCalculator("HASH_SYS_AND_VALUE", t -> ResourceIndexedSearchParamToken.calculateHashSystemAndValue(t.getResourceType(), t.getParamName(), t.getString("SP_SYSTEM"), t.getString("SP_VALUE")))
|
||||
.addCalculator("HASH_VALUE", t -> ResourceIndexedSearchParamToken.calculateHashValue(t.getResourceType(), t.getParamName(), t.getString("SP_VALUE")))
|
||||
);
|
||||
|
||||
// Indexes - URI
|
||||
Builder.BuilderWithTableName spidxUri = version.onTable("HFJ_SPIDX_URI");
|
||||
version.startSectionWithMessage("Starting work on table: " + spidxUri.getTableName());
|
||||
spidxUri
|
||||
.addColumn("HASH_IDENTITY")
|
||||
.nullable()
|
||||
.type(AddColumnTask.ColumnTypeEnum.LONG);
|
||||
spidxUri
|
||||
.addIndex("IDX_SP_URI_HASH_IDENTITY")
|
||||
.unique(false)
|
||||
.withColumns("HASH_IDENTITY", "SP_URI");
|
||||
spidxUri
|
||||
.addIndex("IDX_SP_URI_HASH_URI")
|
||||
.unique(false)
|
||||
.withColumns("HASH_URI");
|
||||
spidxUri
|
||||
.addTask(new CalculateHashesTask()
|
||||
.setColumnName("HASH_IDENTITY")
|
||||
.addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")))
|
||||
.addCalculator("HASH_URI", t -> ResourceIndexedSearchParamUri.calculateHashUri(t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_URI")))
|
||||
);
|
||||
|
||||
// Search Parameter Presence
|
||||
Builder.BuilderWithTableName spp = version.onTable("HFJ_RES_PARAM_PRESENT");
|
||||
version.startSectionWithMessage("Starting work on table: " + spp.getTableName());
|
||||
spp.dropIndex("IDX_RESPARMPRESENT_SPID_RESID");
|
||||
spp
|
||||
.addColumn("HASH_PRESENCE")
|
||||
.nullable()
|
||||
.type(AddColumnTask.ColumnTypeEnum.LONG);
|
||||
spp
|
||||
.addIndex("IDX_RESPARMPRESENT_HASHPRES")
|
||||
.unique(false)
|
||||
.withColumns("HASH_PRESENCE");
|
||||
ArbitrarySqlTask consolidateSearchParamPresenceIndexesTask = new ArbitrarySqlTask("Consolidate search parameter presence indexes");
|
||||
consolidateSearchParamPresenceIndexesTask.setBatchSize(1);
|
||||
String sql = "SELECT " +
|
||||
"HFJ_SEARCH_PARM.RES_TYPE RES_TYPE, HFJ_SEARCH_PARM.PARAM_NAME PARAM_NAME, " +
|
||||
"HFJ_RES_PARAM_PRESENT.PID PID, HFJ_RES_PARAM_PRESENT.SP_ID SP_ID, HFJ_RES_PARAM_PRESENT.SP_PRESENT SP_PRESENT, HFJ_RES_PARAM_PRESENT.HASH_PRESENCE HASH_PRESENCE " +
|
||||
"from HFJ_RES_PARAM_PRESENT " +
|
||||
"join HFJ_SEARCH_PARM ON (HFJ_SEARCH_PARM.PID = HFJ_RES_PARAM_PRESENT.SP_ID) " +
|
||||
"where HFJ_RES_PARAM_PRESENT.HASH_PRESENCE is null";
|
||||
consolidateSearchParamPresenceIndexesTask.addQuery(sql, ArbitrarySqlTask.QueryModeEnum.BATCH_UNTIL_NO_MORE, t -> {
|
||||
Long pid = (Long) t.get("PID");
|
||||
Boolean present = (Boolean) t.get("HASH_PRESENCE");
|
||||
String resType = (String) t.get("RES_TYPE");
|
||||
String paramName = (String) t.get("PARAM_NAME");
|
||||
Long hash = SearchParamPresent.calculateHashPresence(resType, paramName, present);
|
||||
consolidateSearchParamPresenceIndexesTask.executeSql("update HFJ_RES_PARAM_PRESENT set HASH_PRESENCE = ? where PID = ?", hash, pid);
|
||||
});
|
||||
version.addTask(consolidateSearchParamPresenceIndexesTask);
|
||||
|
||||
// Concept
|
||||
Builder.BuilderWithTableName trmConcept = version.onTable("TRM_CONCEPT");
|
||||
version.startSectionWithMessage("Starting work on table: " + trmConcept.getTableName());
|
||||
trmConcept
|
||||
.addColumn("CONCEPT_UPDATED")
|
||||
.nullable()
|
||||
.type(BaseTableColumnTypeTask.ColumnTypeEnum.DATE_TIMESTAMPT);
|
||||
trmConcept
|
||||
.addIndex("IDX_CONCEPT_UPDATED")
|
||||
.unique(false)
|
||||
.withColumns("CONCEPT_UPDATED");
|
||||
trmConcept
|
||||
.modifyColumn("CODE")
|
||||
.nonNullable()
|
||||
.withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 500);
|
||||
|
||||
// Concept Designation
|
||||
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_DESIG");
|
||||
version
|
||||
.addTable("TRM_CONCEPT_DESIG")
|
||||
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER")
|
||||
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT")
|
||||
.addSql(DriverTypeEnum.MYSQL_5_7, "create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID)) ENGINE=InnoDB")
|
||||
.addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER (PID)")
|
||||
.addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT (PID)")
|
||||
.addSql(DriverTypeEnum.MARIADB_10_1, "create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER (PID)")
|
||||
.addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT (PID)")
|
||||
.addSql(DriverTypeEnum.ORACLE_12C, "create table TRM_CONCEPT_DESIG (PID number(19,0) not null, LANG varchar2(500 char), USE_CODE varchar2(500 char), USE_DISPLAY varchar2(500 char), USE_SYSTEM varchar2(500 char), VAL varchar2(500 char) not null, CS_VER_PID number(19,0), CONCEPT_PID number(19,0), primary key (PID))")
|
||||
.addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER")
|
||||
.addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT")
|
||||
.addSql(DriverTypeEnum.POSTGRES_9_4, "create table TRM_CONCEPT_DESIG (PID int8 not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID int8, CONCEPT_PID int8, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER")
|
||||
.addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT")
|
||||
.addSql(DriverTypeEnum.MSSQL_2012, "create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER")
|
||||
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT");
|
||||
|
||||
// Concept Property
|
||||
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_PROPERTY");
|
||||
version
|
||||
.addTable("TRM_CONCEPT_PROPERTY")
|
||||
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE integer not null, PROP_VAL varchar(500), CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER")
|
||||
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT")
|
||||
.addSql(DriverTypeEnum.MARIADB_10_1, "create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE integer not null, PROP_VAL varchar(500), CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER (PID)")
|
||||
.addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT (PID)")
|
||||
.addSql(DriverTypeEnum.MYSQL_5_7, "create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE integer not null, PROP_VAL varchar(500), CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER (PID)")
|
||||
.addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT (PID)")
|
||||
.addSql(DriverTypeEnum.ORACLE_12C, "create table TRM_CONCEPT_PROPERTY (PID number(19,0) not null, PROP_CODESYSTEM varchar2(500 char), PROP_DISPLAY varchar2(500 char), PROP_KEY varchar2(500 char) not null, PROP_TYPE number(10,0) not null, PROP_VAL varchar2(500 char), CS_VER_PID number(19,0), CONCEPT_PID number(19,0), primary key (PID))")
|
||||
.addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER")
|
||||
.addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT")
|
||||
.addSql(DriverTypeEnum.POSTGRES_9_4, "create table TRM_CONCEPT_PROPERTY (PID int8 not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE int4 not null, PROP_VAL varchar(500), CS_VER_PID int8, CONCEPT_PID int8, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER")
|
||||
.addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT")
|
||||
.addSql(DriverTypeEnum.MSSQL_2012, "create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE int not null, PROP_VAL varchar(500), CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER")
|
||||
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT");
|
||||
|
||||
// Concept Map - Map
|
||||
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_MAP");
|
||||
version
|
||||
.addTable("TRM_CONCEPT_MAP")
|
||||
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_MAP (PID bigint not null, RES_ID bigint, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE")
|
||||
.addSql(DriverTypeEnum.MYSQL_5_7, "create table TRM_CONCEPT_MAP (PID bigint not null, RES_ID bigint, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_MAP add constraint IDX_CONCEPT_MAP_URL unique (URL)")
|
||||
.addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE (RES_ID)")
|
||||
.addSql(DriverTypeEnum.ORACLE_12C, "create table TRM_CONCEPT_MAP (PID number(19,0) not null, RES_ID number(19,0), SOURCE_URL varchar2(200 char), TARGET_URL varchar2(200 char), URL varchar2(200 char) not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_MAP add constraint IDX_CONCEPT_MAP_URL unique (URL)")
|
||||
.addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE")
|
||||
.addSql(DriverTypeEnum.POSTGRES_9_4, "create table TRM_CONCEPT_MAP (PID int8 not null, RES_ID int8, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE")
|
||||
.addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_MAP add constraint IDX_CONCEPT_MAP_URL unique (URL)")
|
||||
.addSql(DriverTypeEnum.MSSQL_2012, "create table TRM_CONCEPT_MAP (PID bigint not null, RES_ID bigint, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_MAP add constraint IDX_CONCEPT_MAP_URL unique (URL)")
|
||||
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE")
|
||||
.addSql(DriverTypeEnum.MARIADB_10_1, "create table TRM_CONCEPT_MAP (PID bigint not null, RES_ID bigint, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE (RES_ID)")
|
||||
.addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_MAP add constraint IDX_CONCEPT_MAP_URL unique (URL)");
|
||||
|
||||
// Concept Map - Group
|
||||
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_MAP_GROUP");
|
||||
version
|
||||
.addTable("TRM_CONCEPT_MAP_GROUP")
|
||||
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_MAP_GROUP (PID bigint not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID bigint not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP")
|
||||
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create unique index IDX_CONCEPT_MAP_URL on TRM_CONCEPT_MAP (URL)")
|
||||
.addSql(DriverTypeEnum.ORACLE_12C, "create table TRM_CONCEPT_MAP_GROUP (PID number(19,0) not null, myConceptMapUrl varchar2(255 char), SOURCE_URL varchar2(200 char) not null, mySourceValueSet varchar2(255 char), SOURCE_VERSION varchar2(100 char), TARGET_URL varchar2(200 char) not null, myTargetValueSet varchar2(255 char), TARGET_VERSION varchar2(100 char), CONCEPT_MAP_PID number(19,0) not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP")
|
||||
.addSql(DriverTypeEnum.MARIADB_10_1, "create table TRM_CONCEPT_MAP_GROUP (PID bigint not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID bigint not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP (PID)")
|
||||
.addSql(DriverTypeEnum.MYSQL_5_7, "create table TRM_CONCEPT_MAP_GROUP (PID bigint not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID bigint not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP (PID)")
|
||||
.addSql(DriverTypeEnum.MSSQL_2012, "create table TRM_CONCEPT_MAP_GROUP (PID bigint not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID bigint not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP")
|
||||
.addSql(DriverTypeEnum.POSTGRES_9_4, "create table TRM_CONCEPT_MAP_GROUP (PID int8 not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID int8 not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP");
|
||||
|
||||
// Concept Map - Group Element
|
||||
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_MAP_GRP_ELEMENT");
|
||||
version
|
||||
.addTable("TRM_CONCEPT_MAP_GRP_ELEMENT")
|
||||
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID bigint not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP")
|
||||
.addSql(DriverTypeEnum.MARIADB_10_1, "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID bigint not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP (PID)")
|
||||
.addSql(DriverTypeEnum.MARIADB_10_1, "create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE)")
|
||||
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE)")
|
||||
.addSql(DriverTypeEnum.MYSQL_5_7, "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID bigint not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.MYSQL_5_7, "create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE)")
|
||||
.addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP (PID)")
|
||||
.addSql(DriverTypeEnum.POSTGRES_9_4, "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID int8 not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID int8 not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP")
|
||||
.addSql(DriverTypeEnum.POSTGRES_9_4, "create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE)")
|
||||
.addSql(DriverTypeEnum.ORACLE_12C, "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID number(19,0) not null, SOURCE_CODE varchar2(500 char) not null, myConceptMapUrl varchar2(255 char), SOURCE_DISPLAY varchar2(400 char), mySystem varchar2(255 char), mySystemVersion varchar2(255 char), myValueSet varchar2(255 char), CONCEPT_MAP_GROUP_PID number(19,0) not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP")
|
||||
.addSql(DriverTypeEnum.ORACLE_12C, "create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE)")
|
||||
.addSql(DriverTypeEnum.MSSQL_2012, "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID bigint not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.MSSQL_2012, "create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE)")
|
||||
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP");
|
||||
|
||||
// Concept Map - Group Element Target
|
||||
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_MAP_GRP_ELM_TGT");
|
||||
version
|
||||
.addTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
|
||||
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT")
|
||||
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)")
|
||||
.addSql(DriverTypeEnum.MARIADB_10_1, "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT (PID)")
|
||||
.addSql(DriverTypeEnum.MARIADB_10_1, "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)")
|
||||
.addSql(DriverTypeEnum.MYSQL_5_7, "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT (PID)")
|
||||
.addSql(DriverTypeEnum.MYSQL_5_7, "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)")
|
||||
.addSql(DriverTypeEnum.ORACLE_12C, "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID number(19,0) not null, TARGET_CODE varchar2(500 char) not null, myConceptMapUrl varchar2(255 char), TARGET_DISPLAY varchar2(400 char), TARGET_EQUIVALENCE varchar2(50 char), mySystem varchar2(255 char), mySystemVersion varchar2(255 char), myValueSet varchar2(255 char), CONCEPT_MAP_GRP_ELM_PID number(19,0) not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT")
|
||||
.addSql(DriverTypeEnum.ORACLE_12C, "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)")
|
||||
.addSql(DriverTypeEnum.POSTGRES_9_4, "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID int8 not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID int8 not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT")
|
||||
.addSql(DriverTypeEnum.POSTGRES_9_4, "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)")
|
||||
.addSql(DriverTypeEnum.MSSQL_2012, "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.MSSQL_2012, "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)")
|
||||
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT");
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,203 @@
|
||||
package ca.uhn.fhir.jpa.migrate.tasks.api;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.*;
|
||||
import ca.uhn.fhir.jpa.migrate.tasks.HapiFhirJpaMigrationTasks;
|
||||
import ca.uhn.fhir.util.VersionEnum;
|
||||
import com.google.common.collect.Multimap;
|
||||
import com.google.common.collect.MultimapBuilder;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.intellij.lang.annotations.Language;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
public class BaseMigrationTasks {
|
||||
private Multimap<VersionEnum, BaseTask<?>> myTasks = MultimapBuilder.hashKeys().arrayListValues().build();
|
||||
|
||||
public List<BaseTask<?>> getTasks(@Nonnull VersionEnum theFrom, @Nonnull VersionEnum theTo) {
|
||||
Validate.notNull(theFrom);
|
||||
Validate.notNull(theTo);
|
||||
Validate.isTrue(theFrom.ordinal() < theTo.ordinal(), "From version must be lower than to version");
|
||||
|
||||
List<BaseTask<?>> retVal = new ArrayList<>();
|
||||
for (VersionEnum nextVersion : VersionEnum.values()) {
|
||||
if (nextVersion.ordinal() <= theFrom.ordinal()) {
|
||||
continue;
|
||||
}
|
||||
if (nextVersion.ordinal() > theTo.ordinal()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
Collection<BaseTask<?>> nextValues = myTasks.get(nextVersion);
|
||||
if (nextValues != null) {
|
||||
retVal.addAll(nextValues);
|
||||
}
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
protected HapiFhirJpaMigrationTasks.Builder forVersion(VersionEnum theVersion) {
|
||||
return new HapiFhirJpaMigrationTasks.Builder(theVersion);
|
||||
}
|
||||
|
||||
protected class Builder {
|
||||
|
||||
private final VersionEnum myVersion;
|
||||
private String myTableName;
|
||||
|
||||
Builder(VersionEnum theVersion) {
|
||||
myVersion = theVersion;
|
||||
}
|
||||
|
||||
public BuilderWithTableName onTable(String theTableName) {
|
||||
myTableName = theTableName;
|
||||
return new BuilderWithTableName();
|
||||
}
|
||||
|
||||
public void addTask(BaseTask theTask) {
|
||||
theTask.validate();
|
||||
myTasks.put(myVersion, theTask);
|
||||
}
|
||||
|
||||
public BuilderAddTable addTable(String theTableName) {
|
||||
myTableName = theTableName;
|
||||
return new BuilderAddTable();
|
||||
}
|
||||
|
||||
public void startSectionWithMessage(String theMessage) {
|
||||
Validate.notBlank(theMessage);
|
||||
addTask(new LogStartSectionWithMessageTask(theMessage));
|
||||
}
|
||||
|
||||
public class BuilderWithTableName {
|
||||
private String myIndexName;
|
||||
private String myColumnName;
|
||||
|
||||
public String getTableName() {
|
||||
return myTableName;
|
||||
}
|
||||
|
||||
public void dropIndex(String theIndexName) {
|
||||
DropIndexTask task = new DropIndexTask();
|
||||
task.setIndexName(theIndexName);
|
||||
task.setTableName(myTableName);
|
||||
addTask(task);
|
||||
}
|
||||
|
||||
public BuilderAddIndexWithName addIndex(String theIndexName) {
|
||||
myIndexName = theIndexName;
|
||||
return new BuilderAddIndexWithName();
|
||||
}
|
||||
|
||||
public BuilderAddColumnWithName addColumn(String theColumnName) {
|
||||
myColumnName = theColumnName;
|
||||
return new BuilderAddColumnWithName();
|
||||
}
|
||||
|
||||
public void addTask(BaseTableTask<?> theTask) {
|
||||
theTask.setTableName(myTableName);
|
||||
Builder.this.addTask(theTask);
|
||||
}
|
||||
|
||||
public BuilderModifyColumnWithName modifyColumn(String theColumnName) {
|
||||
myColumnName = theColumnName;
|
||||
return new BuilderModifyColumnWithName();
|
||||
}
|
||||
|
||||
public class BuilderAddIndexWithName {
|
||||
private boolean myUnique;
|
||||
|
||||
public BuilderAddIndexUnique unique(boolean theUnique) {
|
||||
myUnique = theUnique;
|
||||
return new BuilderAddIndexUnique();
|
||||
}
|
||||
|
||||
public class BuilderAddIndexUnique {
|
||||
public void withColumns(String... theColumnNames) {
|
||||
AddIndexTask task = new AddIndexTask();
|
||||
task.setTableName(myTableName);
|
||||
task.setIndexName(myIndexName);
|
||||
task.setUnique(myUnique);
|
||||
task.setColumns(theColumnNames);
|
||||
addTask(task);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public class BuilderAddColumnWithName {
|
||||
private boolean myNullable;
|
||||
|
||||
public BuilderAddColumnWithNameNullable nullable() {
|
||||
myNullable = true;
|
||||
return new BuilderAddColumnWithNameNullable();
|
||||
}
|
||||
|
||||
public class BuilderAddColumnWithNameNullable {
|
||||
public void type(AddColumnTask.ColumnTypeEnum theColumnType) {
|
||||
AddColumnTask task = new AddColumnTask();
|
||||
task.setColumnName(myColumnName);
|
||||
task.setNullable(myNullable);
|
||||
task.setColumnType(theColumnType);
|
||||
addTask(task);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public class BuilderModifyColumnWithName {
|
||||
|
||||
private boolean myNullable;
|
||||
|
||||
public BuilderModifyColumnWithNameAndNullable nullable() {
|
||||
myNullable = true;
|
||||
return new BuilderModifyColumnWithNameAndNullable();
|
||||
}
|
||||
|
||||
public BuilderModifyColumnWithNameAndNullable nonNullable() {
|
||||
myNullable = false;
|
||||
return new BuilderModifyColumnWithNameAndNullable();
|
||||
}
|
||||
|
||||
public class BuilderModifyColumnWithNameAndNullable {
|
||||
|
||||
public void withType(BaseTableColumnTypeTask.ColumnTypeEnum theColumnType, int theLength) {
|
||||
if (theColumnType == BaseTableColumnTypeTask.ColumnTypeEnum.STRING) {
|
||||
ModifyColumnTask task = new ModifyColumnTask();
|
||||
task.setColumnName(myColumnName);
|
||||
task.setTableName(myTableName);
|
||||
task.setColumnLength(theLength);
|
||||
task.setNullable(myNullable);
|
||||
task.setColumnType(theColumnType);
|
||||
addTask(task);
|
||||
} else {
|
||||
throw new IllegalArgumentException("Can not specify length for column of type " + theColumnType);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public class BuilderAddTable {
|
||||
|
||||
private final AddTableTask myTask;
|
||||
|
||||
protected BuilderAddTable() {
|
||||
myTask = new AddTableTask();
|
||||
myTask.setTableName(myTableName);
|
||||
addTask(myTask);
|
||||
}
|
||||
|
||||
|
||||
public BuilderAddTable addSql(DriverTypeEnum theDriverTypeEnum, @Language("SQL") String theSql) {
|
||||
myTask.addSql(theDriverTypeEnum, theSql);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,44 @@
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
public class AddColumnTest extends BaseTest {
|
||||
|
||||
@Test
|
||||
public void testColumnDoesntAlreadyExist() throws SQLException {
|
||||
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
|
||||
|
||||
AddColumnTask task = new AddColumnTask();
|
||||
task.setTableName("SOMETABLE");
|
||||
task.setColumnName("newcol");
|
||||
task.setColumnType(AddColumnTask.ColumnTypeEnum.LONG);
|
||||
task.setNullable(true);
|
||||
getMigrator().addTask(task);
|
||||
|
||||
getMigrator().migrate();
|
||||
|
||||
assertThat(JdbcUtils.getColumnNames(getConnectionProperties(), "SOMETABLE"), containsInAnyOrder("PID", "TEXTCOL", "NEWCOL"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testColumnAlreadyExists() throws SQLException {
|
||||
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255), newcol bigint)");
|
||||
|
||||
AddColumnTask task = new AddColumnTask();
|
||||
task.setTableName("SOMETABLE");
|
||||
task.setColumnName("newcol");
|
||||
task.setColumnType(AddColumnTask.ColumnTypeEnum.LONG);
|
||||
getMigrator().addTask(task);
|
||||
|
||||
getMigrator().migrate();
|
||||
|
||||
assertThat(JdbcUtils.getColumnNames(getConnectionProperties(), "SOMETABLE"), containsInAnyOrder("PID", "TEXTCOL", "NEWCOL"));
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,50 @@
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
public class AddIndexTest extends BaseTest {
|
||||
|
||||
@Test
|
||||
public void testIndexAlreadyExists() throws SQLException {
|
||||
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
|
||||
executeSql("create unique index IDX_ANINDEX on SOMETABLE (PID, TEXTCOL)");
|
||||
executeSql("create unique index IDX_DIFINDEX on SOMETABLE (TEXTCOL)");
|
||||
|
||||
AddIndexTask task = new AddIndexTask();
|
||||
task.setIndexName("IDX_ANINDEX");
|
||||
task.setTableName("SOMETABLE");
|
||||
task.setColumns("PID", "TEXTCOL");
|
||||
task.setUnique(false);
|
||||
getMigrator().addTask(task);
|
||||
|
||||
getMigrator().migrate();
|
||||
|
||||
assertThat(JdbcUtils.getIndexNames(getConnectionProperties(), "SOMETABLE"), containsInAnyOrder("IDX_DIFINDEX", "IDX_ANINDEX"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIndexDoesntAlreadyExist() throws SQLException {
|
||||
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
|
||||
executeSql("create unique index IDX_DIFINDEX on SOMETABLE (TEXTCOL)");
|
||||
|
||||
AddIndexTask task = new AddIndexTask();
|
||||
task.setIndexName("IDX_ANINDEX");
|
||||
task.setTableName("SOMETABLE");
|
||||
task.setColumns("PID", "TEXTCOL");
|
||||
task.setUnique(false);
|
||||
getMigrator().addTask(task);
|
||||
|
||||
getMigrator().migrate();
|
||||
|
||||
assertThat(JdbcUtils.getIndexNames(getConnectionProperties(), "SOMETABLE"), containsInAnyOrder("IDX_DIFINDEX", "IDX_ANINDEX"));
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,42 @@
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
public class AddTableTest extends BaseTest {
|
||||
|
||||
@Test
|
||||
public void testTableDoesntAlreadyExist() throws SQLException {
|
||||
|
||||
AddTableTask task = new AddTableTask();
|
||||
task.setTableName("SOMETABLE");
|
||||
task.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
|
||||
getMigrator().addTask(task);
|
||||
|
||||
getMigrator().migrate();
|
||||
|
||||
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), containsInAnyOrder("SOMETABLE"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTableAlreadyExists() throws SQLException {
|
||||
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
|
||||
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), containsInAnyOrder("SOMETABLE"));
|
||||
|
||||
AddTableTask task = new AddTableTask();
|
||||
task.setTableName("SOMETABLE");
|
||||
task.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
|
||||
getMigrator().addTask(task);
|
||||
|
||||
getMigrator().migrate();
|
||||
|
||||
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), containsInAnyOrder("SOMETABLE"));
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,53 @@
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.SearchParamPresent;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public class ArbitrarySqlTest extends BaseTest {
|
||||
|
||||
@Test
|
||||
public void test350MigrateSearchParams() {
|
||||
executeSql("create table HFJ_SEARCH_PARM (PID bigint not null, RES_TYPE varchar(255), PARAM_NAME varchar(255))");
|
||||
executeSql("insert into HFJ_SEARCH_PARM (PID, RES_TYPE, PARAM_NAME) values (1, 'Patient', 'identifier')");
|
||||
executeSql("insert into HFJ_SEARCH_PARM (PID, RES_TYPE, PARAM_NAME) values (2, 'Patient', 'family')");
|
||||
executeSql("create table HFJ_RES_PARAM_PRESENT (PID bigint, SP_ID bigint, SP_PRESENT boolean, HASH_PRESENT bigint)");
|
||||
executeSql("insert into HFJ_RES_PARAM_PRESENT (PID, SP_ID, SP_PRESENT, HASH_PRESENT) values (100, 1, true, null)");
|
||||
executeSql("insert into HFJ_RES_PARAM_PRESENT (PID, SP_ID, SP_PRESENT, HASH_PRESENT) values (101, 2, true, null)");
|
||||
|
||||
ArbitrarySqlTask task = new ArbitrarySqlTask("Consolidate search parameter presence indexes");
|
||||
task.setBatchSize(1);
|
||||
String sql = "SELECT " +
|
||||
"HFJ_SEARCH_PARM.RES_TYPE RES_TYPE, HFJ_SEARCH_PARM.PARAM_NAME PARAM_NAME, " +
|
||||
"HFJ_RES_PARAM_PRESENT.PID PID, HFJ_RES_PARAM_PRESENT.SP_ID SP_ID, HFJ_RES_PARAM_PRESENT.SP_PRESENT SP_PRESENT, HFJ_RES_PARAM_PRESENT.HASH_PRESENT HASH_PRESENT " +
|
||||
"from HFJ_RES_PARAM_PRESENT " +
|
||||
"join HFJ_SEARCH_PARM ON (HFJ_SEARCH_PARM.PID = HFJ_RES_PARAM_PRESENT.SP_ID) " +
|
||||
"where HFJ_RES_PARAM_PRESENT.HASH_PRESENT is null";
|
||||
task.addQuery(sql, ArbitrarySqlTask.QueryModeEnum.BATCH_UNTIL_NO_MORE, t -> {
|
||||
Long pid = (Long) t.get("PID");
|
||||
Boolean present = (Boolean) t.get("SP_PRESENT");
|
||||
String resType = (String) t.get("RES_TYPE");
|
||||
String paramName = (String) t.get("PARAM_NAME");
|
||||
Long hash = SearchParamPresent.calculateHashPresence(resType, paramName, present);
|
||||
task.executeSql("update HFJ_RES_PARAM_PRESENT set HASH_PRESENT = ? where PID = ?", hash, pid);
|
||||
});
|
||||
|
||||
getMigrator().addTask(task);
|
||||
getMigrator().migrate();
|
||||
|
||||
|
||||
List<Map<String, Object>> rows = executeQuery("select * from HFJ_RES_PARAM_PRESENT order by PID asc");
|
||||
assertEquals(2, rows.size());
|
||||
assertEquals(100L, rows.get(0).get("PID"));
|
||||
assertEquals(-1100208805056022671L, rows.get(0).get("HASH_PRESENT"));
|
||||
assertEquals(101L, rows.get(1).get("PID"));
|
||||
assertEquals(-756348509333838170L, rows.get(1).get("HASH_PRESENT"));
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,64 @@
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import ca.uhn.fhir.jpa.migrate.Migrator;
|
||||
import org.intellij.lang.annotations.Language;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.springframework.jdbc.core.ColumnMapRowMapper;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class BaseTest {
|
||||
|
||||
private static int ourDatabaseUrl = 0;
|
||||
private String myUrl;
|
||||
private Migrator myMigrator;
|
||||
private DriverTypeEnum.ConnectionProperties myConnectionProperties;
|
||||
|
||||
public String getUrl() {
|
||||
return myUrl;
|
||||
}
|
||||
|
||||
public DriverTypeEnum.ConnectionProperties getConnectionProperties() {
|
||||
return myConnectionProperties;
|
||||
}
|
||||
|
||||
|
||||
protected void executeSql(@Language("SQL") String theSql, Object... theArgs) {
|
||||
myConnectionProperties.getTxTemplate().execute(t -> {
|
||||
myConnectionProperties.newJdbcTemplate().update(theSql, theArgs);
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
protected List<Map<String, Object>> executeQuery(@Language("SQL") String theSql, Object... theArgs) {
|
||||
return myConnectionProperties.getTxTemplate().execute(t -> {
|
||||
return myConnectionProperties.newJdbcTemplate().query(theSql, theArgs, new ColumnMapRowMapper());
|
||||
});
|
||||
}
|
||||
|
||||
public Migrator getMigrator() {
|
||||
return myMigrator;
|
||||
}
|
||||
|
||||
@After
|
||||
public void after() {
|
||||
myConnectionProperties.close();
|
||||
}
|
||||
|
||||
@Before()
|
||||
public void before() {
|
||||
myUrl = "jdbc:derby:memory:database " + (ourDatabaseUrl++) + ";create=true";
|
||||
|
||||
myConnectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newConnectionProperties(myUrl, "SA", "SA");
|
||||
|
||||
myMigrator = new Migrator();
|
||||
myMigrator.setConnectionUrl(myUrl);
|
||||
myMigrator.setDriverType(DriverTypeEnum.DERBY_EMBEDDED);
|
||||
myMigrator.setUsername("SA");
|
||||
myMigrator.setPassword("SA");
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,53 @@
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.BaseResourceIndexedSearchParam;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamToken;
|
||||
import org.junit.Test;
|
||||
import org.springframework.jdbc.core.JdbcTemplate;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public class CreateHashesTest extends BaseTest {
|
||||
|
||||
@Test
|
||||
public void testCreateHashes() {
|
||||
executeSql("create table HFJ_SPIDX_TOKEN (SP_ID bigint not null, SP_MISSING boolean, SP_NAME varchar(100) not null, RES_ID bigint, RES_TYPE varchar(255) not null, SP_UPDATED timestamp, HASH_IDENTITY bigint, HASH_SYS bigint, HASH_SYS_AND_VALUE bigint, HASH_VALUE bigint, SP_SYSTEM varchar(200), SP_VALUE varchar(200), primary key (SP_ID))");
|
||||
executeSql("insert into HFJ_SPIDX_TOKEN (SP_MISSING, SP_NAME, RES_ID, RES_TYPE, SP_UPDATED, SP_SYSTEM, SP_VALUE, SP_ID) values (false, 'identifier', 999, 'Patient', '2018-09-03 07:44:49.196', 'urn:oid:1.2.410.100110.10.41308301', '88888888', 1)");
|
||||
executeSql("insert into HFJ_SPIDX_TOKEN (SP_MISSING, SP_NAME, RES_ID, RES_TYPE, SP_UPDATED, SP_SYSTEM, SP_VALUE, SP_ID) values (false, 'identifier', 999, 'Patient', '2018-09-03 07:44:49.196', 'urn:oid:1.2.410.100110.10.41308301', '99999999', 2)");
|
||||
|
||||
CalculateHashesTask task = new CalculateHashesTask();
|
||||
task.setTableName("HFJ_SPIDX_TOKEN");
|
||||
task.setColumnName("HASH_IDENTITY");
|
||||
task.addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")));
|
||||
task.addCalculator("HASH_SYS", t -> ResourceIndexedSearchParamToken.calculateHashSystem(t.getResourceType(), t.getParamName(), t.getString("SP_SYSTEM")));
|
||||
task.addCalculator("HASH_SYS_AND_VALUE", t -> ResourceIndexedSearchParamToken.calculateHashSystemAndValue(t.getResourceType(), t.getParamName(), t.getString("SP_SYSTEM"), t.getString("SP_VALUE")));
|
||||
task.addCalculator("HASH_VALUE", t -> ResourceIndexedSearchParamToken.calculateHashValue(t.getResourceType(), t.getParamName(), t.getString("SP_VALUE")));
|
||||
task.setBatchSize(1);
|
||||
getMigrator().addTask(task);
|
||||
|
||||
getMigrator().migrate();
|
||||
|
||||
|
||||
getConnectionProperties().getTxTemplate().execute(t -> {
|
||||
Map<String, Object> map;
|
||||
JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate();
|
||||
|
||||
map = jdbcTemplate.queryForMap("select * from HFJ_SPIDX_TOKEN where SP_ID = 1");
|
||||
assertEquals(7001889285610424179L, map.get("HASH_IDENTITY"));
|
||||
assertEquals(2686400398917843456L, map.get("HASH_SYS"));
|
||||
assertEquals(-3943098850992523411L, map.get("HASH_SYS_AND_VALUE"));
|
||||
assertEquals(845040519142030272L, map.get("HASH_VALUE"));
|
||||
|
||||
map = jdbcTemplate.queryForMap("select * from HFJ_SPIDX_TOKEN where SP_ID = 2");
|
||||
assertEquals(7001889285610424179L, map.get("HASH_IDENTITY"));
|
||||
assertEquals(2686400398917843456L, map.get("HASH_SYS"));
|
||||
assertEquals(-6583685191951870327L, map.get("HASH_SYS_AND_VALUE"));
|
||||
assertEquals(8271382783311609619L, map.get("HASH_VALUE"));
|
||||
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,46 @@
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
public class DropIndexTest extends BaseTest {
|
||||
|
||||
@Test
|
||||
public void testIndexAlreadyExists() throws SQLException {
|
||||
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
|
||||
executeSql("create unique index IDX_ANINDEX on SOMETABLE (PID, TEXTCOL)");
|
||||
executeSql("create unique index IDX_DIFINDEX on SOMETABLE (TEXTCOL)");
|
||||
|
||||
DropIndexTask task = new DropIndexTask();
|
||||
task.setDescription("Drop an index");
|
||||
task.setIndexName("IDX_ANINDEX");
|
||||
task.setTableName("SOMETABLE");
|
||||
getMigrator().addTask(task);
|
||||
|
||||
getMigrator().migrate();
|
||||
|
||||
assertThat(JdbcUtils.getIndexNames(getConnectionProperties(), "SOMETABLE"), contains("IDX_DIFINDEX"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIndexDoesntAlreadyExist() throws SQLException {
|
||||
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
|
||||
executeSql("create unique index IDX_DIFINDEX on SOMETABLE (TEXTCOL)");
|
||||
|
||||
DropIndexTask task = new DropIndexTask();
|
||||
task.setDescription("Drop an index");
|
||||
task.setIndexName("IDX_ANINDEX");
|
||||
task.setTableName("SOMETABLE");
|
||||
getMigrator().addTask(task);
|
||||
|
||||
getMigrator().migrate();
|
||||
|
||||
assertThat(JdbcUtils.getIndexNames(getConnectionProperties(), "SOMETABLE"), contains("IDX_DIFINDEX"));
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,32 @@
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
public class ModifyColumnTest extends BaseTest {
|
||||
|
||||
|
||||
@Test
|
||||
public void testColumnAlreadyExists() throws SQLException {
|
||||
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255), newcol bigint)");
|
||||
|
||||
ModifyColumnTask task = new ModifyColumnTask();
|
||||
task.setTableName("SOMETABLE");
|
||||
task.setColumnName("TEXTCOL");
|
||||
task.setColumnType(AddColumnTask.ColumnTypeEnum.STRING);
|
||||
task.setNullable(true);
|
||||
task.setColumnLength(300);
|
||||
getMigrator().addTask(task);
|
||||
|
||||
getMigrator().migrate();
|
||||
|
||||
assertEquals("varchar(300)", JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,12 @@
|
||||
package ca.uhn.fhir.jpa.migrate.tasks;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
public class HapiFhirJpaMigrationTasksTest {
|
||||
|
||||
@Test
|
||||
public void testCreate() {
|
||||
new HapiFhirJpaMigrationTasks();
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,14 @@
|
||||
<configuration>
|
||||
|
||||
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder>
|
||||
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} [%file:%line] - %msg%n
|
||||
</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<root level="info">
|
||||
<appender-ref ref="STDOUT" />
|
||||
</root>
|
||||
|
||||
</configuration>
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -158,7 +158,7 @@
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-converter</artifactId>
|
||||
<version>3.5.0-SNAPSHOT</version>
|
||||
<version>3.5.0</version>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user