Merge remote-tracking branch 'origin/master' into ks-20191119-scheduler
This commit is contained in:
commit
bfa89483e7
|
@ -146,6 +146,34 @@ public class HapiFlywayMigrateDatabaseCommandTest {
|
|||
assertTrue(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_BLK_EXPORT_JOB")); // Late table
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMigrateFromEmptySchema_NoFlyway() throws IOException, SQLException {
|
||||
|
||||
File location = getLocation("migrator_h2_test_empty_current_noflyway");
|
||||
|
||||
String url = "jdbc:h2:" + location.getAbsolutePath() + ";create=true";
|
||||
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "", "");
|
||||
|
||||
ourLog.info("**********************************************");
|
||||
ourLog.info("Starting Migration...");
|
||||
ourLog.info("**********************************************");
|
||||
|
||||
String[] args = new String[]{
|
||||
BaseFlywayMigrateDatabaseCommand.MIGRATE_DATABASE,
|
||||
"-d", "H2_EMBEDDED",
|
||||
"-u", url,
|
||||
"-n", "",
|
||||
"-p", "",
|
||||
"--" + BaseFlywayMigrateDatabaseCommand.DONT_USE_FLYWAY
|
||||
};
|
||||
|
||||
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_RESOURCE"));
|
||||
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_BLK_EXPORT_JOB"));
|
||||
App.main(args);
|
||||
assertTrue(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_RESOURCE")); // Early table
|
||||
assertTrue(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_BLK_EXPORT_JOB")); // Late table
|
||||
}
|
||||
|
||||
@NotNull
|
||||
private File getLocation(String theDatabaseName) throws IOException {
|
||||
File directory = new File(DB_DIRECTORY);
|
||||
|
|
|
@ -23,15 +23,33 @@ package ca.uhn.hapi.fhir.docs;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.parser.DataFormatException;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class Parser {
|
||||
|
||||
public static void main(String[] args) throws DataFormatException, IOException {
|
||||
{
|
||||
//START SNIPPET: createParser
|
||||
// Create a FHIR context
|
||||
FhirContext ctx = FhirContext.forR4();
|
||||
|
||||
// Create a Patient resource to serialize
|
||||
Patient patient = new Patient();
|
||||
patient.addName().setFamily("Simpson").addGiven("James");
|
||||
|
||||
// Instantiate a new parser
|
||||
IParser parser = ctx.newJsonParser();
|
||||
|
||||
// Serialize it
|
||||
String serialized = parser.encodeResourceToString(patient);
|
||||
System.out.println(serialized);
|
||||
//END SNIPPET: createParser
|
||||
}
|
||||
{
|
||||
//START SNIPPET: disableStripVersions
|
||||
FhirContext ctx = FhirContext.forDstu2();
|
||||
FhirContext ctx = FhirContext.forR4();
|
||||
IParser parser = ctx.newJsonParser();
|
||||
|
||||
// Disable the automatic stripping of versions from references on the parser
|
||||
|
@ -41,12 +59,11 @@ parser.setStripVersionsFromReferences(false);
|
|||
//START SNIPPET: disableStripVersionsCtx
|
||||
ctx.getParserOptions().setStripVersionsFromReferences(false);
|
||||
//END SNIPPET: disableStripVersionsCtx
|
||||
|
||||
}
|
||||
|
||||
{
|
||||
//START SNIPPET: disableStripVersionsField
|
||||
FhirContext ctx = FhirContext.forDstu2();
|
||||
FhirContext ctx = FhirContext.forR4();
|
||||
IParser parser = ctx.newJsonParser();
|
||||
|
||||
// Preserve versions only on these two fields (for the given parser)
|
||||
|
|
|
@ -9,6 +9,7 @@ page.introduction.downloading_and_importing=Downloading and Importing
|
|||
|
||||
section.model.title=Working With The FHIR Model
|
||||
page.model.working_with_resources=Working With Resources
|
||||
page.model.parsers=Parsing and Serializing
|
||||
page.model.references=Resource References
|
||||
page.model.profiles_and_extensions=Profiles and Extensions
|
||||
page.model.converter=Version Converters
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
# Parsers and Serializers
|
||||
|
||||
HAPI FHIR has built-in support for the FHIR [JSON](http://hl7.org/fhir/json.html) and [XML](http://hl7.org/fhir/json.html) encoding formats.
|
||||
|
||||
A built in parser can be used to convert HAPI FHIR Java objects into a serialized form, and to parse serialized data into Java objects. Note that unlike some other frameworks, HAPI FHIR does not have separate parsers and serializers. Both of these functions are handled by a single object called the **Parser**.
|
||||
|
||||
# Serializing
|
||||
|
||||
As with many parts of the HAPI FHIR API, parsing beginis with a [FhirContext](/apidocs/hapi-fhr-base/ca/uhn/fhir/context/FhirContext.html) object. The FhirContext can be used to request an [IParser](/apidocs/hapi-fhir-base/ca/uhn/fhir/parser/IParser.html) for your chosen encodng style that is then used to serialize.
|
||||
|
||||
The following example shows a JSON Parser being used to serialize a FHIR resource.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/Parser.java|createParser}}
|
||||
```
|
||||
|
||||
HapiWorkerContext
|
|
@ -42,11 +42,12 @@ public class BruteForceMigrator extends BaseMigrator {
|
|||
|
||||
@Override
|
||||
public void migrate() {
|
||||
DriverTypeEnum.ConnectionProperties connectionProperties = getDriverType().newConnectionProperties(getConnectionUrl(), getUsername(), getPassword());
|
||||
|
||||
for (BaseTask<?> next : myTasks) {
|
||||
next.setDriverType(getDriverType());
|
||||
next.setDryRun(isDryRun());
|
||||
next.setNoColumnShrink(isNoColumnShrink());
|
||||
DriverTypeEnum.ConnectionProperties connectionProperties = getDriverType().newConnectionProperties(getConnectionUrl(), getUsername(), getPassword());
|
||||
next.setConnectionProperties(connectionProperties);
|
||||
|
||||
try {
|
||||
|
|
Loading…
Reference in New Issue