Add option to upload-terminology for security header

This commit is contained in:
James Agnew 2016-08-09 15:38:30 -04:00
parent af68a9d3b4
commit ac2d55139d
6 changed files with 91 additions and 59 deletions

View File

@ -3,76 +3,20 @@ package ca.uhn.fhir.cli;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.fusesource.jansi.Ansi;
import org.hl7.fhir.dstu3.hapi.validation.DefaultProfileValidationSupport;
import org.hl7.fhir.dstu3.hapi.validation.FhirInstanceValidator;
import org.hl7.fhir.dstu3.model.Attachment;
import org.hl7.fhir.dstu3.model.Bundle.BundleEntryComponent;
import org.hl7.fhir.dstu3.model.Bundle.BundleType;
import org.hl7.fhir.dstu3.model.Bundle.HTTPVerb;
import org.hl7.fhir.dstu3.model.IdType;
import org.hl7.fhir.dstu3.model.Parameters;
import org.hl7.fhir.dstu3.model.Resource;
import org.hl7.fhir.dstu3.model.StringType;
import org.hl7.fhir.dstu3.model.UriType;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseParameters;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.omg.Dynamic.Parameter;
import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.dstu2.resource.Bundle;
import ca.uhn.fhir.model.dstu2.resource.Bundle.Entry;
import ca.uhn.fhir.model.dstu2.resource.Bundle.EntryRequest;
import ca.uhn.fhir.model.dstu2.valueset.HTTPVerbEnum;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.valueset.BundleTypeEnum;
import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.rest.client.IGenericClient;
import ca.uhn.fhir.rest.client.apache.GZipContentInterceptor;
import ca.uhn.fhir.rest.server.IVersionSpecificBundleFactory;
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
import ca.uhn.fhir.util.BundleUtil;
import ca.uhn.fhir.util.ResourceReferenceInfo;
import ca.uhn.fhir.validation.FhirValidator;
import ca.uhn.fhir.validation.ValidationResult;
import ca.uhn.fhir.rest.client.interceptor.BearerTokenAuthInterceptor;
public class UploadTerminologyCommand extends BaseCommand {
@ -111,6 +55,10 @@ public class UploadTerminologyCommand extends BaseCommand {
opt.setRequired(false);
options.addOption(opt);
opt = new Option("b", "bearer-token", true, "Bearer token to add to the request");
opt.setRequired(false);
options.addOption(opt);
return options;
}
@ -134,7 +82,10 @@ public class UploadTerminologyCommand extends BaseCommand {
if (datafile == null || datafile.length == 0) {
throw new ParseException("No data file provided");
}
String bearerToken = theCommandLine.getOptionValue("b");
IGenericClient client = super.newClient(ctx, targetServer);
IBaseParameters inputParameters;
if (ctx.getVersion().getVersion() == FhirVersionEnum.DSTU3) {
@ -148,6 +99,10 @@ public class UploadTerminologyCommand extends BaseCommand {
throw new ParseException("This command does not support FHIR version " + ctx.getVersion().getVersion());
}
if (isNotBlank(bearerToken)) {
client.registerInterceptor(new BearerTokenAuthInterceptor(bearerToken));
}
ourLog.info("Beginning upload - This may take a while...");
IBaseParameters response = client
.operation()

View File

@ -65,6 +65,13 @@ public class TermCodeSystemVersion implements Serializable {
@Column(name = "RES_VERSION_ID", nullable = false, updatable = false)
private Long myResourceVersionId;
/**
* Constructor
*/
public TermCodeSystemVersion() {
super();
}
public Collection<TermConcept> getConcepts() {
if (myConcepts == null) {
myConcepts = new ArrayList<TermConcept>();

View File

@ -38,7 +38,7 @@
<appender-ref ref="STDOUT" />
</logger>
<!-- Set to 'trace' to enable SQL Value logging -->
<logger name="org.hibernate.type" additivity="false" level="info">
<logger name="org.hibernate.type" additivity="false" level="trace">
<appender-ref ref="STDOUT" />
</logger>

View File

@ -151,6 +151,11 @@
populated with the actual target resource instance. Thanks to
Neal Acharya for reporting!
</action>
<action type="add">
hapi-fhir-cli upload-terminology command now has an argument
"-b FOO" that lets you add an authorization header in the form
<![CDATA[<code>Authorization: Bearer FOO</code>]]>
</action>
</release>
<release version="1.6" date="2016-07-07">
<action type="fix">

View File

@ -241,6 +241,36 @@ public DaoConfig daoConfig() {
<!--
alter table hfj_res_link ALTER COLUMN "TARGET_RESOURCE_ID" NULL;
--select sp_index_status, count(*) from hfj_resource group by sp_index_status
delete from hfj_history_tag where res_id in (select res_id from hfj_resource where sp_index_status = 2);
delete from hfj_res_tag where res_id in (select res_id from hfj_resource where sp_index_status = 2);
delete from hfj_spidx_coords where res_id in (select res_id from hfj_resource where sp_index_status = 2);
delete from hfj_spidx_number where res_id in (select res_id from hfj_resource where sp_index_status = 2);
delete from hfj_spidx_quantity where res_id in (select res_id from hfj_resource where sp_index_status = 2);
delete from hfj_spidx_string where res_id in (select res_id from hfj_resource where sp_index_status = 2);
delete from hfj_spidx_token where res_id in (select res_id from hfj_resource where sp_index_status = 2);
delete from hfj_spidx_uri where res_id in (select res_id from hfj_resource where sp_index_status = 2);
delete from hfj_search_result where resource_pid in (select res_id from hfj_resource where sp_index_status = 2);
delete from hfj_res_link where src_resource_id in (select res_id from hfj_resource where sp_index_status = 2);
delete from hfj_res_link where target_resource_id in (select res_id from hfj_resource where sp_index_status = 2);
delete from hfj_subscription where res_id in (select res_id from hfj_resource where sp_index_status = 2);
delete from hfj_subscription_flag_res where res_id in (select res_id from hfj_resource where sp_index_status = 2);
delete from trm_concept_pc_link where pid in (select pid from trm_concept where codesystem_pid in (select pid from trm_codesystem_ver where res_id in (select res_id from hfj_resource where sp_index_status = 2)));
delete from trm_concept where codesystem_pid in (select pid from trm_codesystem_ver where res_id in (select res_id from hfj_resource where sp_index_status = 2));
delete from trm_codesystem_ver where res_id in (select res_id from hfj_resource where sp_index_status = 2);
delete from trm_codesystem where res_id in (select res_id from hfj_resource where sp_index_status = 2);
update hfj_resource set forced_id_pid = null where res_id in (select res_id from hfj_resource where sp_index_status = 2);
update hfj_res_ver set forced_id_pid = null where res_id in (select res_id from hfj_resource where sp_index_status = 2);
delete from hfj_forced_id where resource_pid in (select res_id from hfj_resource where sp_index_status = 2);
delete from hfj_resource where res_id in (select res_id from hfj_resource where sp_index_status = 2);
delete from hfj_res_ver where res_id in (select res_id from hfj_resource where sp_index_status = 2);
-->
</body>

View File

@ -97,8 +97,43 @@
[INFO] Total time: 20:45 min
[INFO] Finished at: 2016-02-27T15:05:35+00:00</pre>
</p>
<subsection name="Troubleshooting">
<p>
If the build fails to execute successfully, try the following:
</p>
<ul>
<li>
The first thing to try is always a fresh clean build when things aren't working:<br/>
<pre>mvn clean install</pre>
</li>
<li>
<b>If you are trying to build a submodule</b> (e.g. <code>hapi-fhir-jpaserver-example</code>),
try building the root project first. Especially when building from the Git <code>master</code>,
often times there will be dependencies that require a fresh complete build (note that this is
not generally an issue when building from a release version)<br/>
<pre><![CDATA[cd [workspace]/hapi-fhir
mvn install]]></pre>
</li>
<li>
<b>If the build fails with memory issues (or mysteriously dies during unit tests)</b>,
your build environment may be running out of memory. By default, the HAPI build executes
unit tests in multiple parallel JVMs in order to save time. This can consume a lot of RAM
and sometimes causes issues. Try executing with the following command to disable
this behaviour:<br/>
<pre>mvn -P ALLMODULES,NOPARALLEL install</pre>
</li>
<li>
If you figure something else out, please <b>let us know</b> so that we can add it
to this list!
</li>
</ul>
</subsection>
</section>
<section name="Importing into Eclipse">
<p>
This section shows how to import HAPI into Eclipse. There is no requirement