Work on migrator

This commit is contained in:
James Agnew 2018-09-08 03:30:35 +08:00
parent 77305eb570
commit f5d567cd00
27 changed files with 563 additions and 174 deletions

View File

@ -63,6 +63,10 @@
<groupId>org.apache.commons</groupId> <groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId> <artifactId>commons-lang3</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
</dependency>
<dependency> <dependency>
<groupId>commons-codec</groupId> <groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId> <artifactId>commons-codec</artifactId>

View File

@ -178,7 +178,7 @@ class ModelScanner {
} }
/** /**
* There are two implementations of all of the annotations (e.g. {@link Child} and {@link org.hl7.fhir.instance.model.annotations.Child}) since the HL7.org ones will eventually replace the HAPI * There are two implementations of all of the annotations (e.g. {@link Child} since the HL7.org ones will eventually replace the HAPI
* ones. Annotations can't extend each other or implement interfaces or anything like that, so rather than duplicate all of the annotation processing code this method just creates an interface * ones. Annotations can't extend each other or implement interfaces or anything like that, so rather than duplicate all of the annotation processing code this method just creates an interface
* Proxy to simulate the HAPI annotations if the HL7.org ones are found instead. * Proxy to simulate the HAPI annotations if the HL7.org ones are found instead.
*/ */
@ -482,9 +482,8 @@ class ModelScanner {
static Set<Class<? extends IBase>> scanVersionPropertyFile(Set<Class<? extends IBase>> theDatatypes, Map<String, Class<? extends IBaseResource>> theResourceTypes, FhirVersionEnum theVersion, Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> theExistingElementDefinitions) { static Set<Class<? extends IBase>> scanVersionPropertyFile(Set<Class<? extends IBase>> theDatatypes, Map<String, Class<? extends IBaseResource>> theResourceTypes, FhirVersionEnum theVersion, Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> theExistingElementDefinitions) {
Set<Class<? extends IBase>> retVal = new HashSet<Class<? extends IBase>>(); Set<Class<? extends IBase>> retVal = new HashSet<Class<? extends IBase>>();
InputStream str = theVersion.getVersionImplementation().getFhirVersionPropertiesFile(); try (InputStream str = theVersion.getVersionImplementation().getFhirVersionPropertiesFile()) {
Properties prop = new Properties(); Properties prop = new Properties();
try {
prop.load(str); prop.load(str);
for (Entry<Object, Object> nextEntry : prop.entrySet()) { for (Entry<Object, Object> nextEntry : prop.entrySet()) {
String nextKey = nextEntry.getKey().toString(); String nextKey = nextEntry.getKey().toString();
@ -542,8 +541,6 @@ class ModelScanner {
} }
} catch (IOException e) { } catch (IOException e) {
throw new ConfigurationException("Failed to load model property file from classpath: " + "/ca/uhn/fhir/model/dstu/model.properties"); throw new ConfigurationException("Failed to load model property file from classpath: " + "/ca/uhn/fhir/model/dstu/model.properties");
} finally {
IOUtils.closeQuietly(str);
} }
return retVal; return retVal;

View File

@ -27,7 +27,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.apache.commons.lang3.text.WordUtils; import org.apache.commons.text.WordUtils;
import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;

View File

@ -38,7 +38,7 @@ import com.google.gson.Gson;
import com.google.gson.GsonBuilder; import com.google.gson.GsonBuilder;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.text.WordUtils; import org.apache.commons.text.WordUtils;
import org.hl7.fhir.instance.model.api.*; import org.hl7.fhir.instance.model.api.*;
import java.io.IOException; import java.io.IOException;

View File

@ -23,8 +23,6 @@ package ca.uhn.fhir.util;
import java.io.InputStream; import java.io.InputStream;
import java.util.Properties; import java.util.Properties;
import org.apache.commons.io.IOUtils;
/** /**
* Used internally by HAPI to log the version of the HAPI FHIR framework * Used internally by HAPI to log the version of the HAPI FHIR framework
* once, when the framework is first loaded by the classloader. * once, when the framework is first loaded by the classloader.
@ -43,17 +41,13 @@ public class VersionUtil {
} }
private static void initialize() { private static void initialize() {
InputStream is = null; try (InputStream is = VersionUtil.class.getResourceAsStream("/ca/uhn/fhir/hapi-version.properties")) {
try {
is = VersionUtil.class.getResourceAsStream("/ca/uhn/fhir/hapi-version.properties");
Properties p = new Properties(); Properties p = new Properties();
p.load(is); p.load(is);
ourVersion = p.getProperty("version"); ourVersion = p.getProperty("version");
ourLog.info("HAPI FHIR version is: " + ourVersion); ourLog.info("HAPI FHIR version is: " + ourVersion);
} catch (Exception e) { } catch (Exception e) {
ourLog.warn("Unable to determine HAPI version information", e); ourLog.warn("Unable to determine HAPI version information", e);
} finally {
IOUtils.closeQuietly(is);
} }
} }

View File

@ -27,7 +27,7 @@ import ca.uhn.fhir.util.jar.DependencyLogFactory;
import ca.uhn.fhir.util.jar.IDependencyLog; import ca.uhn.fhir.util.jar.IDependencyLog;
import com.ctc.wstx.api.WstxInputProperties; import com.ctc.wstx.api.WstxInputProperties;
import com.ctc.wstx.stax.WstxOutputFactory; import com.ctc.wstx.stax.WstxOutputFactory;
import org.apache.commons.lang3.StringEscapeUtils; import org.apache.commons.text.StringEscapeUtils;
import org.codehaus.stax2.XMLOutputFactory2; import org.codehaus.stax2.XMLOutputFactory2;
import org.codehaus.stax2.io.EscapingWriterFactory; import org.codehaus.stax2.io.EscapingWriterFactory;
@ -54,7 +54,7 @@ public class XmlUtil {
private static volatile XMLOutputFactory ourOutputFactory; private static volatile XMLOutputFactory ourOutputFactory;
static { static {
HashMap<String, Integer> validEntityNames = new HashMap<String, Integer>(1448); HashMap<String, Integer> validEntityNames = new HashMap<>(1448);
validEntityNames.put("AElig", 0x000C6); validEntityNames.put("AElig", 0x000C6);
validEntityNames.put("Aacute", 0x000C1); validEntityNames.put("Aacute", 0x000C1);
validEntityNames.put("Abreve", 0x00102); validEntityNames.put("Abreve", 0x00102);

View File

@ -212,7 +212,6 @@ public class SchemaBaseValidator implements IValidatorModule {
InputStream baseIs = FhirValidator.class.getResourceAsStream(pathToBase); InputStream baseIs = FhirValidator.class.getResourceAsStream(pathToBase);
if (baseIs == null) { if (baseIs == null) {
IOUtils.closeQuietly(baseIs);
throw new InternalErrorException("Schema file not found: " + pathToBase); throw new InternalErrorException("Schema file not found: " + pathToBase);
} }

View File

@ -30,12 +30,14 @@ import com.helger.commons.error.list.IErrorList;
import com.helger.schematron.ISchematronResource; import com.helger.schematron.ISchematronResource;
import com.helger.schematron.SchematronHelper; import com.helger.schematron.SchematronHelper;
import com.helger.schematron.xslt.SchematronResourceSCH; import com.helger.schematron.xslt.SchematronResourceSCH;
import org.apache.commons.io.IOUtils;
import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
import org.oclc.purl.dsdl.svrl.SchematronOutputType; import org.oclc.purl.dsdl.svrl.SchematronOutputType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.xml.transform.stream.StreamSource; import javax.xml.transform.stream.StreamSource;
import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.StringReader; import java.io.StringReader;
import java.util.HashMap; import java.util.HashMap;
@ -49,9 +51,13 @@ import java.util.Map;
*/ */
public class SchematronBaseValidator implements IValidatorModule { public class SchematronBaseValidator implements IValidatorModule {
private Map<Class<? extends IBaseResource>, ISchematronResource> myClassToSchematron = new HashMap<Class<? extends IBaseResource>, ISchematronResource>(); private static final Logger ourLog = LoggerFactory.getLogger(SchematronBaseValidator.class);
private final Map<Class<? extends IBaseResource>, ISchematronResource> myClassToSchematron = new HashMap<>();
private FhirContext myCtx; private FhirContext myCtx;
/**
* Constructor
*/
public SchematronBaseValidator(FhirContext theContext) { public SchematronBaseValidator(FhirContext theContext) {
myCtx = theContext; myCtx = theContext;
} }
@ -128,14 +134,13 @@ public class SchematronBaseValidator implements IValidatorModule {
String pathToBase = myCtx.getVersion().getPathToSchemaDefinitions() + '/' + theCtx.getFhirContext().getResourceDefinition(theCtx.getResource()).getBaseDefinition().getName().toLowerCase() String pathToBase = myCtx.getVersion().getPathToSchemaDefinitions() + '/' + theCtx.getFhirContext().getResourceDefinition(theCtx.getResource()).getBaseDefinition().getName().toLowerCase()
+ ".sch"; + ".sch";
InputStream baseIs = FhirValidator.class.getResourceAsStream(pathToBase); try (InputStream baseIs = FhirValidator.class.getResourceAsStream(pathToBase)) {
try {
if (baseIs == null) { if (baseIs == null) {
throw new InternalErrorException("Failed to load schematron for resource '" + theCtx.getFhirContext().getResourceDefinition(theCtx.getResource()).getBaseDefinition().getName() + "'. " throw new InternalErrorException("Failed to load schematron for resource '" + theCtx.getFhirContext().getResourceDefinition(theCtx.getResource()).getBaseDefinition().getName() + "'. "
+ SchemaBaseValidator.RESOURCES_JAR_NOTE); + SchemaBaseValidator.RESOURCES_JAR_NOTE);
} }
} finally { } catch (IOException e) {
IOUtils.closeQuietly(baseIs); ourLog.error("Failed to close stream", e);
} }
retVal = SchematronResourceSCH.fromClassPath(pathToBase); retVal = SchematronResourceSCH.fromClassPath(pathToBase);
@ -143,5 +148,4 @@ public class SchematronBaseValidator implements IValidatorModule {
return retVal; return retVal;
} }
} }
} }

View File

@ -38,6 +38,11 @@
<artifactId>hapi-fhir-igpacks</artifactId> <artifactId>hapi-fhir-igpacks</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-jpaserver-migrate</artifactId>
<version>${project.version}</version>
</dependency>
<dependency> <dependency>
<groupId>org.apache.commons</groupId> <groupId>org.apache.commons</groupId>

View File

@ -27,7 +27,7 @@ import ch.qos.logback.core.joran.spi.JoranException;
import com.helger.commons.io.file.FileHelper; import com.helger.commons.io.file.FileHelper;
import org.apache.commons.cli.*; import org.apache.commons.cli.*;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.text.WordUtils; import org.apache.commons.text.WordUtils;
import org.fusesource.jansi.Ansi; import org.fusesource.jansi.Ansi;
import org.fusesource.jansi.AnsiConsole; import org.fusesource.jansi.AnsiConsole;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -77,11 +77,6 @@ public abstract class BaseApp {
} }
private void logCommandUsageNoHeader(BaseCommand theCommand) { private void logCommandUsageNoHeader(BaseCommand theCommand) {
System.out.println("Usage:");
System.out.println(" " + provideCommandName() + " " + theCommand.getCommandName() + " [options]");
System.out.println();
System.out.println("Options:");
// This is passed in from the launch script // This is passed in from the launch script
String columnsString = System.getProperty("columns"); String columnsString = System.getProperty("columns");
int columns; int columns;
@ -93,11 +88,34 @@ public abstract class BaseApp {
columns = 80; columns = 80;
} }
// Usage
System.out.println("Usage:");
System.out.println(" " + provideCommandName() + " " + theCommand.getCommandName() + " [options]");
System.out.println();
// Description
String wrapped = WordUtils.wrap(theCommand.getCommandDescription(), columns);
System.out.println(wrapped);
System.out.println();
// Usage Notes
List<String> usageNotes = theCommand.provideUsageNotes();
for (String next : usageNotes) {
wrapped = WordUtils.wrap(next, columns);
System.out.println(wrapped);
System.out.println();
}
// Options
System.out.println("Options:");
HelpFormatter fmt = new HelpFormatter(); HelpFormatter fmt = new HelpFormatter();
PrintWriter pw = new PrintWriter(System.out); PrintWriter pw = new PrintWriter(System.out);
fmt.printOptions(pw, columns, theCommand.getOptions(), 2, 2); fmt.printOptions(pw, columns, theCommand.getOptions(), 2, 2);
pw.flush(); pw.flush();
pw.close(); pw.close();
// That's it!
System.out.println();
} }
private void logUsage() { private void logUsage() {
@ -139,6 +157,7 @@ public abstract class BaseApp {
commands.add(new IgPackUploader()); commands.add(new IgPackUploader());
commands.add(new ExportConceptMapToCsvCommand()); commands.add(new ExportConceptMapToCsvCommand());
commands.add(new ImportCsvToConceptMapCommand()); commands.add(new ImportCsvToConceptMapCommand());
commands.add(new BaseMigrateDatabaseCommand());
return commands; return commands;
} }

View File

@ -36,7 +36,6 @@ import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.client.HttpClientBuilder;
import org.fusesource.jansi.Ansi;
import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -52,6 +51,7 @@ import static org.apache.commons.lang3.StringUtils.*;
import static org.fusesource.jansi.Ansi.ansi; import static org.fusesource.jansi.Ansi.ansi;
public abstract class BaseCommand implements Comparable<BaseCommand> { public abstract class BaseCommand implements Comparable<BaseCommand> {
public static final String PROMPT = "PROMPT";
protected static final String BASE_URL_PARAM = "t"; protected static final String BASE_URL_PARAM = "t";
protected static final String BASE_URL_PARAM_LONGOPT = "target"; protected static final String BASE_URL_PARAM_LONGOPT = "target";
protected static final String BASE_URL_PARAM_NAME = "target"; protected static final String BASE_URL_PARAM_NAME = "target";
@ -72,7 +72,6 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
protected static final String VERBOSE_LOGGING_PARAM_DESC = "If specified, verbose logging will be used."; protected static final String VERBOSE_LOGGING_PARAM_DESC = "If specified, verbose logging will be used.";
// TODO: Don't use qualified names for loggers in HAPI CLI. // TODO: Don't use qualified names for loggers in HAPI CLI.
private static final Logger ourLog = LoggerFactory.getLogger(BaseCommand.class); private static final Logger ourLog = LoggerFactory.getLogger(BaseCommand.class);
public static final String PROMPT = "PROMPT";
protected FhirContext myFhirCtx; protected FhirContext myFhirCtx;
public BaseCommand() { public BaseCommand() {
@ -99,7 +98,7 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
try { try {
retVal = reader.readLine(); retVal = reader.readLine();
} catch (IOException e) { } catch (IOException e) {
throw new ParseException("Failed to read input from user: "+ e.toString()); throw new ParseException("Failed to read input from user: " + e.toString());
} }
System.out.print(ansi().boldOff().fgDefault()); System.out.print(ansi().boldOff().fgDefault());
@ -117,7 +116,6 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
} }
private void addOption(Options theOptions, OptionGroup theOptionGroup, boolean theRequired, String theOpt, String theLongOpt, boolean theHasArgument, String theArgumentName, String theDescription) { private void addOption(Options theOptions, OptionGroup theOptionGroup, boolean theRequired, String theOpt, String theLongOpt, boolean theHasArgument, String theArgumentName, String theDescription) {
Option option = createOption(theRequired, theOpt, theLongOpt, theHasArgument, theDescription); Option option = createOption(theRequired, theOpt, theLongOpt, theHasArgument, theDescription);
if (theHasArgument && isNotBlank(theArgumentName)) { if (theHasArgument && isNotBlank(theArgumentName)) {
@ -329,7 +327,7 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
File suppliedFile = new File(FilenameUtils.normalize(theFilepath)); File suppliedFile = new File(FilenameUtils.normalize(theFilepath));
if (suppliedFile.isDirectory()) { if (suppliedFile.isDirectory()) {
inputFiles = FileUtils.listFiles(suppliedFile, new String[] {"zip"}, false); inputFiles = FileUtils.listFiles(suppliedFile, new String[]{"zip"}, false);
} else { } else {
inputFiles = Collections.singletonList(suppliedFile); inputFiles = Collections.singletonList(suppliedFile);
} }
@ -433,4 +431,8 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
public abstract void run(CommandLine theCommandLine) throws ParseException, ExecutionException; public abstract void run(CommandLine theCommandLine) throws ParseException, ExecutionException;
public List<String> provideUsageNotes() {
return Collections.emptyList();
}
} }

View File

@ -0,0 +1,49 @@
package ca.uhn.fhir.cli;
import ca.uhn.fhir.util.VersionEnum;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
public class BaseMigrateDatabaseCommand extends BaseCommand {
@Override
public String getCommandDescription() {
return "This command migrates a HAPI FHIR JPA database from one version of HAPI FHIR to a newer version";
}
@Override
public String getCommandName() {
return "migrate-database";
}
@Override
public Options getOptions() {
Options retVal = new Options();
addRequiredOption(retVal,"u", "url", "URL", "The JDBC database URL");
addRequiredOption(retVal,"n", "username", "Username", "The JDBC database username");
addRequiredOption(retVal,"p", "password", "Password", "The JDBC database password");
addRequiredOption(retVal,"f", "from", "Version", "The database schema version to migrate FROM");
addRequiredOption(retVal,"t", "to", "Version", "The database schema version to migrate TO");
return retVal;
}
@Override
public List<String> provideUsageNotes() {
String versions = "The following versions are supported: " +
Arrays.stream(VersionEnum.values()).map(Enum::name).collect(Collectors.joining(", "));
return Collections.singletonList(versions);
}
@Override
public void run(CommandLine theCommandLine) throws ParseException, ExecutionException {
}
}

View File

@ -26,6 +26,7 @@ import ca.uhn.fhir.jpa.demo.FhirServerConfig;
import ca.uhn.fhir.jpa.demo.FhirServerConfigDstu3; import ca.uhn.fhir.jpa.demo.FhirServerConfigDstu3;
import ca.uhn.fhir.jpa.demo.FhirServerConfigR4; import ca.uhn.fhir.jpa.demo.FhirServerConfigR4;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.OptionGroup;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
@ -71,6 +72,8 @@ public class RunServerCommand extends BaseCommand {
options.addOption(null, OPTION_ALLOW_EXTERNAL_REFS, false, "If this flag is set, the server will allow resources to be persisted contaning external resource references"); options.addOption(null, OPTION_ALLOW_EXTERNAL_REFS, false, "If this flag is set, the server will allow resources to be persisted contaning external resource references");
options.addOption(null, OPTION_DISABLE_REFERENTIAL_INTEGRITY, false, "If this flag is set, the server will not enforce referential integrity"); options.addOption(null, OPTION_DISABLE_REFERENTIAL_INTEGRITY, false, "If this flag is set, the server will not enforce referential integrity");
addOptionalOption(options, "u", "url", "Url", "If this option is set, specifies the JDBC URL to use for the database connection");
Long defaultReuseSearchResults = DaoConfig.DEFAULT_REUSE_CACHED_SEARCH_RESULTS_FOR_MILLIS; Long defaultReuseSearchResults = DaoConfig.DEFAULT_REUSE_CACHED_SEARCH_RESULTS_FOR_MILLIS;
String defaultReuseSearchResultsStr = defaultReuseSearchResults == null ? "off" : String.valueOf(defaultReuseSearchResults); String defaultReuseSearchResultsStr = defaultReuseSearchResults == null ? "off" : String.valueOf(defaultReuseSearchResults);
options.addOption(null, OPTION_REUSE_SEARCH_RESULTS_MILLIS, true, "The time in milliseconds within which the same results will be returned for multiple identical searches, or \"off\" (default is " + defaultReuseSearchResultsStr + ")"); options.addOption(null, OPTION_REUSE_SEARCH_RESULTS_MILLIS, true, "The time in milliseconds within which the same results will be returned for multiple identical searches, or \"off\" (default is " + defaultReuseSearchResultsStr + ")");
@ -106,6 +109,8 @@ public class RunServerCommand extends BaseCommand {
ContextHolder.setDisableReferentialIntegrity(true); ContextHolder.setDisableReferentialIntegrity(true);
} }
ContextHolder.setDatabaseUrl(theCommandLine.getOptionValue("u"));
String reuseSearchResults = theCommandLine.getOptionValue(OPTION_REUSE_SEARCH_RESULTS_MILLIS); String reuseSearchResults = theCommandLine.getOptionValue(OPTION_REUSE_SEARCH_RESULTS_MILLIS);
if (reuseSearchResults != null) { if (reuseSearchResults != null) {
if (reuseSearchResults.equals("off")) { if (reuseSearchResults.equals("off")) {

View File

@ -12,6 +12,7 @@ public class ContextHolder {
private static boolean ourDisableReferentialIntegrity; private static boolean ourDisableReferentialIntegrity;
private static String ourPath; private static String ourPath;
private static Long ourReuseSearchResultsMillis; private static Long ourReuseSearchResultsMillis;
private static String ourDatabaseUrl;
static { static {
ourReuseSearchResultsMillis = DaoConfig.DEFAULT_REUSE_CACHED_SEARCH_RESULTS_FOR_MILLIS; ourReuseSearchResultsMillis = DaoConfig.DEFAULT_REUSE_CACHED_SEARCH_RESULTS_FOR_MILLIS;
@ -27,12 +28,15 @@ public class ContextHolder {
case DSTU2: case DSTU2:
ourPath = "/baseDstu2/"; ourPath = "/baseDstu2/";
break; break;
case DSTU2_1:
break;
case DSTU3: case DSTU3:
ourPath = "/baseDstu3/"; ourPath = "/baseDstu3/";
break; break;
case R4: case R4:
ourPath = "/baseR4/"; ourPath = "/baseR4/";
break; break;
case DSTU2_HL7ORG:
default: default:
throw new ParseException("FHIR version not supported by this command: " + theCtx.getVersion().getVersion()); throw new ParseException("FHIR version not supported by this command: " + theCtx.getVersion().getVersion());
} }
@ -68,4 +72,12 @@ public class ContextHolder {
public static void setDisableReferentialIntegrity(boolean theDisableReferentialIntegrity) { public static void setDisableReferentialIntegrity(boolean theDisableReferentialIntegrity) {
ourDisableReferentialIntegrity = theDisableReferentialIntegrity; ourDisableReferentialIntegrity = theDisableReferentialIntegrity;
} }
public static String getDatabaseUrl() {
return ourDatabaseUrl;
}
public static void setDatabaseUrl(String theDatabaseUrl) {
ourDatabaseUrl = theDatabaseUrl;
}
} }

View File

@ -4,14 +4,38 @@ import java.util.Properties;
import ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory; import ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory;
import ca.uhn.fhir.jpa.util.DerbyTenSevenHapiFhirDialect; import ca.uhn.fhir.jpa.util.DerbyTenSevenHapiFhirDialect;
import org.apache.commons.dbcp2.BasicDataSource;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import javax.sql.DataSource;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
@SuppressWarnings("Duplicates")
@Configuration @Configuration
public class FhirDbConfig { public class FhirDbConfig {
/**
* The following bean configures the database connection. The 'url' property value of "jdbc:derby:directory:jpaserver_derby_files;create=true" indicates that the server should save resources in a
* directory called "jpaserver_derby_files".
*
* A URL to a remote database could also be placed here, along with login credentials and other properties supported by BasicDataSource.
*/
@Bean(destroyMethod = "close")
public DataSource dataSource() {
String url = "jdbc:derby:directory:target/jpaserver_derby_files;create=true";
if (isNotBlank(ContextHolder.getDatabaseUrl())) {
url = ContextHolder.getDatabaseUrl();
}
private boolean ourLowMemMode; BasicDataSource retVal = new BasicDataSource();
retVal.setDriver(new org.apache.derby.jdbc.EmbeddedDriver());
retVal.setUrl(url);
retVal.setUsername("");
retVal.setPassword("");
return retVal;
}
@Bean() @Bean()
public Properties jpaProperties() { public Properties jpaProperties() {

View File

@ -1,11 +1,11 @@
package ca.uhn.fhir.jpa.demo; package ca.uhn.fhir.jpa.demo;
import java.util.Properties; import ca.uhn.fhir.jpa.config.BaseJavaConfigDstu2;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import javax.persistence.EntityManagerFactory; import ca.uhn.fhir.jpa.util.SubscriptionsRequireManualActivationInterceptorDstu2;
import javax.sql.DataSource; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
import ca.uhn.fhir.rest.server.interceptor.LoggingInterceptor;
import org.apache.commons.dbcp2.BasicDataSource; import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
import org.apache.commons.lang3.time.DateUtils; import org.apache.commons.lang3.time.DateUtils;
import org.hibernate.jpa.HibernatePersistenceProvider; import org.hibernate.jpa.HibernatePersistenceProvider;
import org.springframework.beans.factory.annotation.Autowire; import org.springframework.beans.factory.annotation.Autowire;
@ -18,18 +18,21 @@ import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.transaction.annotation.EnableTransactionManagement; import org.springframework.transaction.annotation.EnableTransactionManagement;
import ca.uhn.fhir.jpa.config.BaseJavaConfigDstu2; import javax.persistence.EntityManagerFactory;
import ca.uhn.fhir.jpa.dao.DaoConfig; import javax.sql.DataSource;
import ca.uhn.fhir.jpa.util.SubscriptionsRequireManualActivationInterceptorDstu2; import java.util.Properties;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
import ca.uhn.fhir.rest.server.interceptor.LoggingInterceptor;
import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
@Configuration @Configuration
@EnableTransactionManagement() @EnableTransactionManagement()
@Import(FhirDbConfig.class) @Import(FhirDbConfig.class)
public class FhirServerConfig extends BaseJavaConfigDstu2 { public class FhirServerConfig extends BaseJavaConfigDstu2 {
@Autowired
private DataSource myDataSource;
@Autowired()
@Qualifier("jpaProperties")
private Properties myJpaProperties;
/** /**
* Configure FHIR properties around the the JPA server via this bean * Configure FHIR properties around the the JPA server via this bean
*/ */
@ -43,32 +46,12 @@ public class FhirServerConfig extends BaseJavaConfigDstu2 {
return retVal; return retVal;
} }
/**
* The following bean configures the database connection. The 'url' property value of "jdbc:derby:directory:jpaserver_derby_files;create=true" indicates that the server should save resources in a
* directory called "jpaserver_derby_files".
*
* A URL to a remote database could also be placed here, along with login credentials and other properties supported by BasicDataSource.
*/
@Bean(destroyMethod = "close")
public DataSource dataSource() {
BasicDataSource retVal = new BasicDataSource();
retVal.setDriver(new org.apache.derby.jdbc.EmbeddedDriver());
retVal.setUrl("jdbc:derby:directory:target/jpaserver_derby_files;create=true");
retVal.setUsername("");
retVal.setPassword("");
return retVal;
}
@Autowired()
@Qualifier("jpaProperties")
private Properties myJpaProperties;
@Override @Override
@Bean() @Bean()
public LocalContainerEntityManagerFactoryBean entityManagerFactory() { public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(); LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
retVal.setPersistenceUnitName("HAPI_PU"); retVal.setPersistenceUnitName("HAPI_PU");
retVal.setDataSource(dataSource()); retVal.setDataSource(myDataSource);
retVal.setPackagesToScan("ca.uhn.fhir.jpa.entity"); retVal.setPackagesToScan("ca.uhn.fhir.jpa.entity");
retVal.setPersistenceProvider(new HibernatePersistenceProvider()); retVal.setPersistenceProvider(new HibernatePersistenceProvider());
retVal.setJpaProperties(myJpaProperties); retVal.setJpaProperties(myJpaProperties);

View File

@ -1,13 +1,12 @@
package ca.uhn.fhir.jpa.demo; package ca.uhn.fhir.jpa.demo;
import java.util.Properties; import ca.uhn.fhir.jpa.config.BaseJavaConfigDstu3;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import javax.persistence.EntityManagerFactory; import ca.uhn.fhir.jpa.util.SubscriptionsRequireManualActivationInterceptorDstu3;
import javax.sql.DataSource; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
import ca.uhn.fhir.rest.server.interceptor.LoggingInterceptor;
import org.apache.commons.dbcp2.BasicDataSource; import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
import org.apache.commons.lang3.time.DateUtils; import org.apache.commons.lang3.time.DateUtils;
import org.hibernate.jpa.HibernatePersistenceProvider;
import org.springframework.beans.factory.annotation.Autowire; import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
@ -18,18 +17,15 @@ import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.transaction.annotation.EnableTransactionManagement; import org.springframework.transaction.annotation.EnableTransactionManagement;
import ca.uhn.fhir.jpa.config.BaseJavaConfigDstu3; import javax.persistence.EntityManagerFactory;
import ca.uhn.fhir.jpa.dao.DaoConfig; import javax.sql.DataSource;
import ca.uhn.fhir.jpa.util.SubscriptionsRequireManualActivationInterceptorDstu3; import java.util.Properties;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
import ca.uhn.fhir.rest.server.interceptor.LoggingInterceptor;
import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
/** /**
* This class isn't used by default by the example, but * This class isn't used by default by the example, but
* you can use it as a config if you want to support DSTU3 * you can use it as a config if you want to support DSTU3
* instead of DSTU2 in your server. * instead of DSTU2 in your server.
* * <p>
* See https://github.com/jamesagnew/hapi-fhir/issues/278 * See https://github.com/jamesagnew/hapi-fhir/issues/278
*/ */
@Configuration @Configuration
@ -37,6 +33,12 @@ import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
@Import(FhirDbConfig.class) @Import(FhirDbConfig.class)
public class FhirServerConfigDstu3 extends BaseJavaConfigDstu3 { public class FhirServerConfigDstu3 extends BaseJavaConfigDstu3 {
@Autowired
private DataSource myDataSource;
@Autowired()
@Qualifier("jpaProperties")
private Properties myJpaProperties;
/** /**
* Configure FHIR properties around the the JPA server via this bean * Configure FHIR properties around the the JPA server via this bean
*/ */
@ -50,36 +52,16 @@ public class FhirServerConfigDstu3 extends BaseJavaConfigDstu3 {
return retVal; return retVal;
} }
/**
* The following bean configures the database connection. The 'url' property value of "jdbc:derby:directory:jpaserver_derby_files;create=true" indicates that the server should save resources in a
* directory called "jpaserver_derby_files".
*
* A URL to a remote database could also be placed here, along with login credentials and other properties supported by BasicDataSource.
*/
@Bean(destroyMethod = "close")
public DataSource dataSource() {
BasicDataSource retVal = new BasicDataSource();
retVal.setDriver(new org.apache.derby.jdbc.EmbeddedDriver());
retVal.setUrl("jdbc:derby:directory:target/jpaserver_derby_files;create=true");
retVal.setUsername("");
retVal.setPassword("");
return retVal;
}
@Override @Override
@Bean() @Bean()
public LocalContainerEntityManagerFactoryBean entityManagerFactory() { public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(); LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
retVal.setPersistenceUnitName("HAPI_PU"); retVal.setPersistenceUnitName("HAPI_PU");
retVal.setDataSource(dataSource()); retVal.setDataSource(myDataSource);
retVal.setJpaProperties(myJpaProperties); retVal.setJpaProperties(myJpaProperties);
return retVal; return retVal;
} }
@Autowired()
@Qualifier("jpaProperties")
private Properties myJpaProperties;
/** /**
* Do some fancy logging to create a nice access log that has details about each incoming request. * Do some fancy logging to create a nice access log that has details about each incoming request.
*/ */

View File

@ -1,13 +1,12 @@
package ca.uhn.fhir.jpa.demo; package ca.uhn.fhir.jpa.demo;
import java.util.Properties; import ca.uhn.fhir.jpa.config.BaseJavaConfigR4;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import javax.persistence.EntityManagerFactory; import ca.uhn.fhir.jpa.util.SubscriptionsRequireManualActivationInterceptorR4;
import javax.sql.DataSource; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
import ca.uhn.fhir.rest.server.interceptor.LoggingInterceptor;
import org.apache.commons.dbcp2.BasicDataSource; import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
import org.apache.commons.lang3.time.DateUtils; import org.apache.commons.lang3.time.DateUtils;
import org.hibernate.jpa.HibernatePersistenceProvider;
import org.springframework.beans.factory.annotation.Autowire; import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
@ -18,18 +17,15 @@ import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.transaction.annotation.EnableTransactionManagement; import org.springframework.transaction.annotation.EnableTransactionManagement;
import ca.uhn.fhir.jpa.config.BaseJavaConfigR4; import javax.persistence.EntityManagerFactory;
import ca.uhn.fhir.jpa.dao.DaoConfig; import javax.sql.DataSource;
import ca.uhn.fhir.jpa.util.SubscriptionsRequireManualActivationInterceptorR4; import java.util.Properties;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
import ca.uhn.fhir.rest.server.interceptor.LoggingInterceptor;
import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
/** /**
* This class isn't used by default by the example, but * This class isn't used by default by the example, but
* you can use it as a config if you want to support DSTU3 * you can use it as a config if you want to support DSTU3
* instead of DSTU2 in your server. * instead of DSTU2 in your server.
* * <p>
* See https://github.com/jamesagnew/hapi-fhir/issues/278 * See https://github.com/jamesagnew/hapi-fhir/issues/278
*/ */
@Configuration @Configuration
@ -37,6 +33,12 @@ import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
@Import(FhirDbConfig.class) @Import(FhirDbConfig.class)
public class FhirServerConfigR4 extends BaseJavaConfigR4 { public class FhirServerConfigR4 extends BaseJavaConfigR4 {
@Autowired
private DataSource myDataSource;
@Autowired()
@Qualifier("jpaProperties")
private Properties myJpaProperties;
/** /**
* Configure FHIR properties around the the JPA server via this bean * Configure FHIR properties around the the JPA server via this bean
*/ */
@ -50,36 +52,16 @@ public class FhirServerConfigR4 extends BaseJavaConfigR4 {
return retVal; return retVal;
} }
/**
* The following bean configures the database connection. The 'url' property value of "jdbc:derby:directory:jpaserver_derby_files;create=true" indicates that the server should save resources in a
* directory called "jpaserver_derby_files".
*
* A URL to a remote database could also be placed here, along with login credentials and other properties supported by BasicDataSource.
*/
@Bean(destroyMethod = "close")
public DataSource dataSource() {
BasicDataSource retVal = new BasicDataSource();
retVal.setDriver(new org.apache.derby.jdbc.EmbeddedDriver());
retVal.setUrl("jdbc:derby:directory:target/jpaserver_derby_files;create=true");
retVal.setUsername("");
retVal.setPassword("");
return retVal;
}
@Override @Override
@Bean() @Bean()
public LocalContainerEntityManagerFactoryBean entityManagerFactory() { public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(); LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
retVal.setPersistenceUnitName("HAPI_PU"); retVal.setPersistenceUnitName("HAPI_PU");
retVal.setDataSource(dataSource()); retVal.setDataSource(myDataSource);
retVal.setJpaProperties(myJpaProperties); retVal.setJpaProperties(myJpaProperties);
return retVal; return retVal;
} }
@Autowired()
@Qualifier("jpaProperties")
private Properties myJpaProperties;
/** /**
* Do some fancy logging to create a nice access log that has details about each incoming request. * Do some fancy logging to create a nice access log that has details about each incoming request.
*/ */

View File

@ -343,6 +343,10 @@
<groupId>org.jboss.spec.javax.transaction</groupId> <groupId>org.jboss.spec.javax.transaction</groupId>
<artifactId>jboss-transaction-api_1.2_spec</artifactId> <artifactId>jboss-transaction-api_1.2_spec</artifactId>
</exclusion> </exclusion>
<exclusion>
<groupId>javax.activation</groupId>
<artifactId>activation</artifactId>
</exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency> <dependency>
@ -399,10 +403,11 @@
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency> <!-- <dependency>
<groupId>com.sun.activation</groupId> <groupId>com.sun.activation</groupId>
<artifactId>javax.activation</artifactId> <artifactId>javax.activation</artifactId>
</dependency> </dependency>
-->
<!--<dependency> <!--<dependency>
<groupId>javax.validation</groupId> <groupId>javax.validation</groupId>
<artifactId>validation-api</artifactId> <artifactId>validation-api</artifactId>
@ -568,10 +573,21 @@
<goal>create</goal> <goal>create</goal>
</goals> </goals>
<configuration> <configuration>
<dialect>org.hibernate.dialect.MySQL57InnoDBDialect</dialect> <dialect>org.hibernate.dialect.MySQL57Dialect</dialect>
<outputFile>${project.build.directory}/classes/ca/uhn/hapi/fhir/jpa/docs/database/persistence_create_mysql57.sql</outputFile> <outputFile>${project.build.directory}/classes/ca/uhn/hapi/fhir/jpa/docs/database/persistence_create_mysql57.sql</outputFile>
</configuration> </configuration>
</execution> </execution>
<execution>
<id>mariadb103</id>
<phase>process-classes</phase>
<goals>
<goal>create</goal>
</goals>
<configuration>
<dialect>org.hibernate.dialect.MariaDB103Dialect</dialect>
<outputFile>${project.build.directory}/classes/ca/uhn/hapi/fhir/jpa/docs/database/persistence_create_mariadb103.sql</outputFile>
</configuration>
</execution>
<execution> <execution>
<id>oracle12c</id> <id>oracle12c</id>
<phase>process-classes</phase> <phase>process-classes</phase>

View File

@ -3,6 +3,11 @@ package ca.uhn.fhir.jpa.demo;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
import ca.uhn.fhir.jpa.provider.dstu3.TerminologyUploaderProviderDstu3;
import ca.uhn.fhir.jpa.subscription.email.SubscriptionEmailInterceptor;
import ca.uhn.fhir.jpa.subscription.resthook.SubscriptionRestHookInterceptor;
import ca.uhn.fhir.jpa.subscription.websocket.SubscriptionWebsocketInterceptor;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -151,9 +156,15 @@ public class JpaServerDemo extends RestfulServer {
* so it is a potential security vulnerability. Consider using an AuthorizationInterceptor * so it is a potential security vulnerability. Consider using an AuthorizationInterceptor
* with this feature. * with this feature.
*/ */
//if (fhirVersion == FhirVersionEnum.DSTU3) { if (fhirVersion == FhirVersionEnum.DSTU3) {
// registerProvider(myAppCtx.getBean(TerminologyUploaderProviderDstu3.class)); registerProvider(myAppCtx.getBean(TerminologyUploaderProviderDstu3.class));
//} }
// Enable various subscription types
registerInterceptor(myAppCtx.getBean(SubscriptionWebsocketInterceptor.class));
registerInterceptor(myAppCtx.getBean(SubscriptionRestHookInterceptor.class));
registerInterceptor(myAppCtx.getBean(SubscriptionEmailInterceptor.class));
} }
} }

View File

@ -1,10 +1,14 @@
package ca.uhn.fhir.jpa.migrate; package ca.uhn.fhir.jpa.migrate;
import org.apache.commons.lang3.Validate;
import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.datasource.DataSourceTransactionManager; import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.jdbc.datasource.SingleConnectionDataSource; import org.springframework.jdbc.datasource.SingleConnectionDataSource;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.support.TransactionTemplate; import org.springframework.transaction.support.TransactionTemplate;
import javax.annotation.Nonnull;
/*- /*-
* #%L * #%L
* Smile CDR - CDR * Smile CDR - CDR
@ -57,32 +61,47 @@ public enum DriverTypeEnum {
TransactionTemplate txTemplate = new TransactionTemplate(); TransactionTemplate txTemplate = new TransactionTemplate();
txTemplate.setTransactionManager(transactionManager); txTemplate.setTransactionManager(transactionManager);
txTemplate.setPropagationBehavior(TransactionTemplate.PROPAGATION_REQUIRES_NEW); txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
txTemplate.afterPropertiesSet(); txTemplate.afterPropertiesSet();
return new ConnectionProperties(dataSource, txTemplate); return new ConnectionProperties(dataSource, txTemplate, this);
} }
public static class ConnectionProperties { public static class ConnectionProperties {
private final DriverTypeEnum myDriverType;
private final SingleConnectionDataSource myDataSource; private final SingleConnectionDataSource myDataSource;
private final TransactionTemplate myTxTemplate; private final TransactionTemplate myTxTemplate;
/**
* Constructor
*/
public ConnectionProperties(SingleConnectionDataSource theDataSource, TransactionTemplate theTxTemplate, DriverTypeEnum theDriverType) {
Validate.notNull(theDataSource);
Validate.notNull(theTxTemplate);
Validate.notNull(theDriverType);
public ConnectionProperties(SingleConnectionDataSource theDataSource, TransactionTemplate theTxTemplate) {
myDataSource = theDataSource; myDataSource = theDataSource;
myTxTemplate = theTxTemplate; myTxTemplate = theTxTemplate;
myDriverType = theDriverType;
} }
public DriverTypeEnum getDriverType() {
return myDriverType;
}
@Nonnull
public SingleConnectionDataSource getDataSource() { public SingleConnectionDataSource getDataSource() {
return myDataSource; return myDataSource;
} }
@Nonnull
public JdbcTemplate newJdbcTemplate() { public JdbcTemplate newJdbcTemplate() {
JdbcTemplate jdbcTemplate = new JdbcTemplate(); JdbcTemplate jdbcTemplate = new JdbcTemplate();
jdbcTemplate.setDataSource(myDataSource); jdbcTemplate.setDataSource(myDataSource);
return jdbcTemplate; return jdbcTemplate;
} }
@Nonnull
public TransactionTemplate getTxTemplate() { public TransactionTemplate getTxTemplate() {
return myTxTemplate; return myTxTemplate;
} }

View File

@ -134,4 +134,32 @@ public class JdbcUtils {
} }
public static Set<String> getTableNames(DriverTypeEnum.ConnectionProperties theConnectionProperties) throws SQLException {
DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource());
Connection connection = dataSource.getConnection();
return theConnectionProperties.getTxTemplate().execute(t -> {
DatabaseMetaData metadata;
try {
metadata = connection.getMetaData();
ResultSet tables = metadata.getTables(null, null, null, null);
Set<String> columnNames = new HashSet<>();
while (tables.next()) {
String tableName = tables.getString("TABLE_NAME");
tableName = StringUtils.toUpperCase(tableName, Locale.US);
String tableType = tables.getString("TABLE_TYPE");
if ("SYSTEM TABLE".equalsIgnoreCase(tableType)) {
continue;
}
columnNames.add(tableName);
}
return columnNames;
} catch (SQLException e) {
throw new InternalErrorException(e);
}
});
}
} }

View File

@ -0,0 +1,48 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.apache.commons.lang3.Validate;
import org.intellij.lang.annotations.Language;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.JdbcTemplate;
import java.sql.SQLException;
import java.util.*;
public class AddTableTask extends BaseTableTask<AddTableTask> {
private static final Logger ourLog = LoggerFactory.getLogger(AddTableTask.class);
private Map<DriverTypeEnum, List<String>> myDriverToSqls = new HashMap<>();
public void addSql(DriverTypeEnum theDriverType, @Language("SQL") String theSql) {
Validate.notNull(theDriverType);
Validate.notBlank(theSql);
List<String> list = myDriverToSqls.computeIfAbsent(theDriverType, t -> new ArrayList<>());
list.add(theSql);
}
@Override
public void execute() throws SQLException {
Set<String> tableNames = JdbcUtils.getTableNames(getConnectionProperties());
if (tableNames.contains(getTableName())) {
ourLog.info("Table {} already exists - No action performed", getTableName());
return;
}
List<String> sqlStatements = myDriverToSqls.get(getDriverType());
ourLog.info("Going to create table {} using {} SQL statements", getTableName(), sqlStatements.size());
getConnectionProperties().getTxTemplate().execute(t->{
JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate();
for (String nextSql : sqlStatements) {
jdbcTemplate.execute(nextSql);
}
return null;
});
}
}

View File

@ -2,10 +2,12 @@ package ca.uhn.fhir.jpa.migrate.tasks;
import ca.uhn.fhir.jpa.dao.DaoConfig; import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.entity.*; import ca.uhn.fhir.jpa.entity.*;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.taskdef.*; import ca.uhn.fhir.jpa.migrate.taskdef.*;
import ca.uhn.fhir.util.VersionEnum; import ca.uhn.fhir.util.VersionEnum;
import com.google.common.collect.Multimap; import com.google.common.collect.Multimap;
import com.google.common.collect.MultimapBuilder; import com.google.common.collect.MultimapBuilder;
import org.intellij.lang.annotations.Language;
@SuppressWarnings("UnstableApiUsage") @SuppressWarnings("UnstableApiUsage")
public class HapiFhirJpaMigrationTasks { public class HapiFhirJpaMigrationTasks {
@ -253,6 +255,132 @@ public class HapiFhirJpaMigrationTasks {
.nonNullable() .nonNullable()
.withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 500); .withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 500);
// Concept Designation
forVersion(VersionEnum.V3_5_0)
.addTable("TRM_CONCEPT_DESIG")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT")
.addSql(DriverTypeEnum.MYSQL_5_7, "create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID)) ENGINE=InnoDB")
.addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER (PID)")
.addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT (PID)")
.addSql(DriverTypeEnum.MARIADB_10_1, "create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))")
.addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER (PID)")
.addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT (PID)")
.addSql(DriverTypeEnum.ORACLE_12C, "create table TRM_CONCEPT_DESIG (PID number(19,0) not null, LANG varchar2(500 char), USE_CODE varchar2(500 char), USE_DISPLAY varchar2(500 char), USE_SYSTEM varchar2(500 char), VAL varchar2(500 char) not null, CS_VER_PID number(19,0), CONCEPT_PID number(19,0), primary key (PID))")
.addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER")
.addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT")
.addSql(DriverTypeEnum.POSTGRES_9_4, "create table TRM_CONCEPT_DESIG (PID int8 not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID int8, CONCEPT_PID int8, primary key (PID))")
.addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER")
.addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT")
.addSql(DriverTypeEnum.MSSQL_2012, "create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))")
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER")
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT");
// Concept Property
forVersion(VersionEnum.V3_5_0)
.addTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE integer not null, PROP_VAL varchar(500), CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT")
.addSql(DriverTypeEnum.MARIADB_10_1, "create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE integer not null, PROP_VAL varchar(500), CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))")
.addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER (PID)")
.addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT (PID)")
.addSql(DriverTypeEnum.MYSQL_5_7, "create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE integer not null, PROP_VAL varchar(500), CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))")
.addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER (PID)")
.addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT (PID)")
.addSql(DriverTypeEnum.ORACLE_12C, "create table TRM_CONCEPT_PROPERTY (PID number(19,0) not null, PROP_CODESYSTEM varchar2(500 char), PROP_DISPLAY varchar2(500 char), PROP_KEY varchar2(500 char) not null, PROP_TYPE number(10,0) not null, PROP_VAL varchar2(500 char), CS_VER_PID number(19,0), CONCEPT_PID number(19,0), primary key (PID))")
.addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER")
.addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT")
.addSql(DriverTypeEnum.POSTGRES_9_4, "create table TRM_CONCEPT_PROPERTY (PID int8 not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE int4 not null, PROP_VAL varchar(500), CS_VER_PID int8, CONCEPT_PID int8, primary key (PID))")
.addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER")
.addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT")
.addSql(DriverTypeEnum.MSSQL_2012, "create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE int not null, PROP_VAL varchar(500), CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))")
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER")
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT");
// Concept Map - Map
forVersion(VersionEnum.V3_5_0)
.addTable("TRM_CONCEPT_MAP")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_MAP (PID bigint not null, RES_ID bigint, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE")
.addSql(DriverTypeEnum.MYSQL_5_7, "create table TRM_CONCEPT_MAP (PID bigint not null, RES_ID bigint, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))")
.addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_MAP add constraint IDX_CONCEPT_MAP_URL unique (URL)")
.addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE (RES_ID)")
.addSql(DriverTypeEnum.ORACLE_12C, "create table TRM_CONCEPT_MAP (PID number(19,0) not null, RES_ID number(19,0), SOURCE_URL varchar2(200 char), TARGET_URL varchar2(200 char), URL varchar2(200 char) not null, primary key (PID))")
.addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_MAP add constraint IDX_CONCEPT_MAP_URL unique (URL)")
.addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE")
.addSql(DriverTypeEnum.POSTGRES_9_4, "create table TRM_CONCEPT_MAP (PID int8 not null, RES_ID int8, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))")
.addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE")
.addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_MAP add constraint IDX_CONCEPT_MAP_URL unique (URL)")
.addSql(DriverTypeEnum.MSSQL_2012, "create table TRM_CONCEPT_MAP (PID bigint not null, RES_ID bigint, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))")
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_MAP add constraint IDX_CONCEPT_MAP_URL unique (URL)")
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE")
.addSql(DriverTypeEnum.MARIADB_10_1, "create table TRM_CONCEPT_MAP (PID bigint not null, RES_ID bigint, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))")
.addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE (RES_ID)")
.addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_MAP add constraint IDX_CONCEPT_MAP_URL unique (URL)");
// Concept Map - Group
forVersion(VersionEnum.V3_5_0)
.addTable("TRM_CONCEPT_MAP_GROUP")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_MAP_GROUP (PID bigint not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID bigint not null, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create unique index IDX_CONCEPT_MAP_URL on TRM_CONCEPT_MAP (URL)")
.addSql(DriverTypeEnum.ORACLE_12C, "create table TRM_CONCEPT_MAP_GROUP (PID number(19,0) not null, myConceptMapUrl varchar2(255 char), SOURCE_URL varchar2(200 char) not null, mySourceValueSet varchar2(255 char), SOURCE_VERSION varchar2(100 char), TARGET_URL varchar2(200 char) not null, myTargetValueSet varchar2(255 char), TARGET_VERSION varchar2(100 char), CONCEPT_MAP_PID number(19,0) not null, primary key (PID))")
.addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP")
.addSql(DriverTypeEnum.MARIADB_10_1, "create table TRM_CONCEPT_MAP_GROUP (PID bigint not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID bigint not null, primary key (PID))")
.addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP (PID)")
.addSql(DriverTypeEnum.MYSQL_5_7, "create table TRM_CONCEPT_MAP_GROUP (PID bigint not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID bigint not null, primary key (PID))")
.addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP (PID)")
.addSql(DriverTypeEnum.MSSQL_2012, "create table TRM_CONCEPT_MAP_GROUP (PID bigint not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID bigint not null, primary key (PID))")
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP")
.addSql(DriverTypeEnum.POSTGRES_9_4, "create table TRM_CONCEPT_MAP_GROUP (PID int8 not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID int8 not null, primary key (PID))")
.addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP");
// Concept Map - Group Element
forVersion(VersionEnum.V3_5_0)
.addTable("TRM_CONCEPT_MAP_GRP_ELEMENT")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID bigint not null, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP")
.addSql(DriverTypeEnum.MARIADB_10_1, "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID bigint not null, primary key (PID))")
.addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP (PID)")
.addSql(DriverTypeEnum.MARIADB_10_1, "create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE)")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE)")
.addSql(DriverTypeEnum.MYSQL_5_7, "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID bigint not null, primary key (PID))")
.addSql(DriverTypeEnum.MYSQL_5_7, "create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE)")
.addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP (PID)")
.addSql(DriverTypeEnum.POSTGRES_9_4, "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID int8 not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID int8 not null, primary key (PID))")
.addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP")
.addSql(DriverTypeEnum.POSTGRES_9_4, "create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE)")
.addSql(DriverTypeEnum.ORACLE_12C, "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID number(19,0) not null, SOURCE_CODE varchar2(500 char) not null, myConceptMapUrl varchar2(255 char), SOURCE_DISPLAY varchar2(400 char), mySystem varchar2(255 char), mySystemVersion varchar2(255 char), myValueSet varchar2(255 char), CONCEPT_MAP_GROUP_PID number(19,0) not null, primary key (PID))")
.addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP")
.addSql(DriverTypeEnum.ORACLE_12C, "create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE)")
.addSql(DriverTypeEnum.MSSQL_2012, "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID bigint not null, primary key (PID))")
.addSql(DriverTypeEnum.MSSQL_2012, "create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE)")
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP");
// Concept Map - Group Element Target
forVersion(VersionEnum.V3_5_0)
.addTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)")
.addSql(DriverTypeEnum.MARIADB_10_1, "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID))")
.addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT (PID)")
.addSql(DriverTypeEnum.MARIADB_10_1, "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)")
.addSql(DriverTypeEnum.MYSQL_5_7, "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID))")
.addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT (PID)")
.addSql(DriverTypeEnum.MYSQL_5_7, "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)")
.addSql(DriverTypeEnum.ORACLE_12C, "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID number(19,0) not null, TARGET_CODE varchar2(500 char) not null, myConceptMapUrl varchar2(255 char), TARGET_DISPLAY varchar2(400 char), TARGET_EQUIVALENCE varchar2(50 char), mySystem varchar2(255 char), mySystemVersion varchar2(255 char), myValueSet varchar2(255 char), CONCEPT_MAP_GRP_ELM_PID number(19,0) not null, primary key (PID))")
.addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT")
.addSql(DriverTypeEnum.ORACLE_12C, "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)")
.addSql(DriverTypeEnum.POSTGRES_9_4, "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID int8 not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID int8 not null, primary key (PID))")
.addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT")
.addSql(DriverTypeEnum.POSTGRES_9_4, "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)")
.addSql(DriverTypeEnum.MSSQL_2012, "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID))")
.addSql(DriverTypeEnum.MSSQL_2012, "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)")
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT");
} }
private Builder forVersion(VersionEnum theVersion) { private Builder forVersion(VersionEnum theVersion) {
@ -279,6 +407,11 @@ public class HapiFhirJpaMigrationTasks {
myTasks.put(myVersion, theTask); myTasks.put(myVersion, theTask);
} }
public BuilderAddTable addTable(String theTableName) {
myTableName = theTableName;
return new BuilderAddTable();
}
private class BuilderWithTableName { private class BuilderWithTableName {
private String myIndexName; private String myIndexName;
private String myColumnName; private String myColumnName;
@ -381,6 +514,22 @@ public class HapiFhirJpaMigrationTasks {
} }
} }
} }
private class BuilderAddTable {
private final AddTableTask myTask;
private BuilderAddTable() {
myTask = new AddTableTask();
myTask.setTableName(myTableName);
}
public BuilderAddTable addSql(DriverTypeEnum theDriverTypeEnum, @Language("SQL") String theSql) {
myTask.addSql(theDriverTypeEnum, theSql);
return this;
}
}
} }
} }

View File

@ -0,0 +1,42 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.junit.Test;
import java.sql.SQLException;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.Assert.assertThat;
public class AddTableTest extends BaseTest {
@Test
public void testTableDoesntAlreadyExist() throws SQLException {
AddTableTask task = new AddTableTask();
task.setTableName("SOMETABLE");
task.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
getMigrator().addTask(task);
getMigrator().migrate();
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), containsInAnyOrder("SOMETABLE"));
}
@Test
public void testTableAlreadyExists() throws SQLException {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), containsInAnyOrder("SOMETABLE"));
AddTableTask task = new AddTableTask();
task.setTableName("SOMETABLE");
task.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
getMigrator().addTask(task);
getMigrator().migrate();
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), containsInAnyOrder("SOMETABLE"));
}
}

23
pom.xml
View File

@ -499,22 +499,22 @@
<caffeine_version>2.6.2</caffeine_version> <caffeine_version>2.6.2</caffeine_version>
<commons_codec_version>1.11</commons_codec_version> <commons_codec_version>1.11</commons_codec_version>
<commons_io_version>2.6</commons_io_version> <commons_io_version>2.6</commons_io_version>
<commons_lang3_version>3.7</commons_lang3_version> <commons_lang3_version>3.8</commons_lang3_version>
<derby_version>10.14.2.0</derby_version> <derby_version>10.14.2.0</derby_version>
<error_prone_annotations_version>2.0.18</error_prone_annotations_version> <error_prone_annotations_version>2.0.18</error_prone_annotations_version>
<guava_version>25.0-jre</guava_version> <guava_version>25.0-jre</guava_version>
<gson_version>2.8.1</gson_version> <gson_version>2.8.5</gson_version>
<jaxb_bundle_version>2.2.11_1</jaxb_bundle_version> <jaxb_bundle_version>2.2.11_1</jaxb_bundle_version>
<jaxb_api_version>2.3.0</jaxb_api_version> <jaxb_api_version>2.3.0</jaxb_api_version>
<jaxb_core_version>2.3.0</jaxb_core_version> <jaxb_core_version>2.3.0</jaxb_core_version>
<jersey_version>2.25.1</jersey_version> <jersey_version>2.25.1</jersey_version>
<jetty_version>9.4.10.v20180503</jetty_version> <jetty_version>9.4.12.v20180830</jetty_version>
<jsr305_version>3.0.2</jsr305_version> <jsr305_version>3.0.2</jsr305_version>
<!--<hibernate_version>5.2.10.Final</hibernate_version>--> <!--<hibernate_version>5.2.10.Final</hibernate_version>-->
<hibernate_version>5.3.1.Final</hibernate_version> <hibernate_version>5.3.6.Final</hibernate_version>
<hibernate_validator_version>5.4.1.Final</hibernate_validator_version> <hibernate_validator_version>5.4.1.Final</hibernate_validator_version>
<!-- Update lucene version when you update hibernate-search version --> <!-- Update lucene version when you update hibernate-search version -->
<hibernate_search_version>5.10.1.Final</hibernate_search_version> <hibernate_search_version>5.10.3.Final</hibernate_search_version>
<httpcore_version>4.4.6</httpcore_version> <httpcore_version>4.4.6</httpcore_version>
<httpclient_version>4.5.3</httpclient_version> <httpclient_version>4.5.3</httpclient_version>
<lucene_version>5.5.5</lucene_version> <lucene_version>5.5.5</lucene_version>
@ -527,7 +527,7 @@
<servicemix_saxon_version>9.5.1-5_1</servicemix_saxon_version> <servicemix_saxon_version>9.5.1-5_1</servicemix_saxon_version>
<servicemix_xmlresolver_version>1.2_5</servicemix_xmlresolver_version> <servicemix_xmlresolver_version>1.2_5</servicemix_xmlresolver_version>
<slf4j_version>1.7.25</slf4j_version> <slf4j_version>1.7.25</slf4j_version>
<spring_version>5.0.6.RELEASE</spring_version> <spring_version>5.0.8.RELEASE</spring_version>
<spring_data_version>2.0.7.RELEASE</spring_data_version> <spring_data_version>2.0.7.RELEASE</spring_data_version>
<spring-boot.version>1.5.6.RELEASE</spring-boot.version> <spring-boot.version>1.5.6.RELEASE</spring-boot.version>
@ -830,6 +830,11 @@
<artifactId>commons-lang3</artifactId> <artifactId>commons-lang3</artifactId>
<version>${commons_lang3_version}</version> <version>${commons_lang3_version}</version>
</dependency> </dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
<version>1.4</version>
</dependency>
<dependency> <dependency>
<groupId>org.apache.derby</groupId> <groupId>org.apache.derby</groupId>
<artifactId>derby</artifactId> <artifactId>derby</artifactId>
@ -1446,7 +1451,7 @@
<plugin> <plugin>
<groupId>org.codehaus.mojo</groupId> <groupId>org.codehaus.mojo</groupId>
<artifactId>versions-maven-plugin</artifactId> <artifactId>versions-maven-plugin</artifactId>
<version>2.6-SNAPSHOT</version> <version>2.6</version>
<configuration> <configuration>
<processDependencyManagementTransitive>false</processDependencyManagementTransitive> <processDependencyManagementTransitive>false</processDependencyManagementTransitive>
</configuration> </configuration>
@ -1468,7 +1473,7 @@
<plugin> <plugin>
<groupId>org.jacoco</groupId> <groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId> <artifactId>jacoco-maven-plugin</artifactId>
<version>0.8.1</version> <version>0.8.2</version>
</plugin> </plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
@ -1644,7 +1649,7 @@
<configuration> <configuration>
<rules> <rules>
<requireMavenVersion> <requireMavenVersion>
<version>3.3.1</version> <version>3.3.9</version>
</requireMavenVersion> </requireMavenVersion>
<requireJavaVersion> <requireJavaVersion>
<version>1.8</version> <version>1.8</version>

View File

@ -17,16 +17,26 @@
latest versions (dependent HAPI modules listed in brackets): latest versions (dependent HAPI modules listed in brackets):
<![CDATA[ <![CDATA[
<ul> <ul>
<li>Spring Framework (JPA): 5.0.3.RELEASE -&gt; 5.0.6.RELEASE</li> <li>Gson (JSON Parser): 2.8.1 -&gt; 2.8.5</li>
<li>Hibernate OR (JPA): 5.2.16.Final -&gt; 5.3.1.Final</li> <li>Spring Framework (JPA): 5.0.3.RELEASE -&gt; 5.0.8.RELEASE</li>
<li>Hibernate Search (JPA): 5.7.1.Final -&gt; 5.10.1.Final</li> <li>Hibernate ORM (JPA): 5.2.16.Final -&gt; 5.3.6.Final</li>
<li>Jetty (CLI): 9.4.8.v20171121 -&gt; 9.4.10.v20180503</li> <li>Hibernate Search (JPA): 5.7.1.Final -&gt; 5.10.3.Final</li>
<li>Jetty (CLI): 9.4.8.v20171121 -&gt; 9.4.12.v20180830</li>
<li>Commons-Codec (All): 1.10 -&gt; 1.11</li> <li>Commons-Codec (All): 1.10 -&gt; 1.11</li>
<li>Commons-Lang (All): 3.7 -&gt; 3.8</li>
<li>Commons-IO (All): 2.5 -&gt; 2.6</li> <li>Commons-IO (All): 2.5 -&gt; 2.6</li>
<li>Spring-Data (JPA): 1.11.6.RELEASE -&gt; 2.0.7.RELEASE</li> <li>Spring-Data (JPA): 1.11.6.RELEASE -&gt; 2.0.7.RELEASE</li>
</ul> </ul>
]]> ]]>
</action> </action>
<action type="add">
A new mnandatory library depdendency has been added to hapi-fhir-base, meaning that all
applications using HAPI FHIR must import ti: commons-text. This library has been added as
a few utility methods used by HAPI FHIR that were formerly in the commons-lang3
project have been moved into commons-text. This library has been added as a non-optional
dependency in the hapi-fhir-base POM, so Maven/Gradle users should not have to make
any changes.
</action>
<action type="add"> <action type="add">
The JPA server now has a configuration item in the DaoConfig to specify which bundle types The JPA server now has a configuration item in the DaoConfig to specify which bundle types
may be stored as-is on the /Bundle endpoint. By default the following types may be stored as-is on the /Bundle endpoint. By default the following types